├── CNAME ├── _config.yml ├── frontend ├── loadtest │ ├── targets.txt │ ├── load.sh │ └── 10.json ├── deploy │ ├── ingress.yaml │ ├── frontend-service.yaml │ ├── frontend-monitor-service.yaml │ ├── prom-frontend.yaml │ ├── frontend-autoscale.yaml │ ├── frontend-configmap.yaml │ └── frontend-deployment.yaml ├── scripts │ ├── dockerize.sh │ └── build_cluster.sh ├── Dockerfile ├── go.mod ├── types │ ├── types.go │ └── types_ffjson.go ├── README.md ├── config │ └── config.go ├── main.go ├── publisher │ └── publisher.go ├── collector │ └── collector.go └── go.sum ├── Banias_Architecture.png ├── banias-logo-lowres.png ├── banias-logo-lowres-trimmed.png ├── .travis.yml ├── backend ├── src │ ├── test │ │ ├── transaction_16.json │ │ ├── click_64.json │ │ ├── transaction_32.json │ │ └── test_events.txt │ └── main │ │ └── java │ │ └── com │ │ └── doitintl │ │ └── banias │ │ ├── MapEvents.java │ │ ├── BaniasPipelineOptions.java │ │ ├── BaseMap.java │ │ ├── SchemaHelpers.java │ │ └── BaniasPipeline.java ├── .gitignore ├── pom.xml ├── Makefile └── README.md ├── LICENSE ├── README.md └── .gitignore /CNAME: -------------------------------------------------------------------------------- 1 | banias.io -------------------------------------------------------------------------------- /_config.yml: -------------------------------------------------------------------------------- 1 | theme: jekyll-theme-minimal -------------------------------------------------------------------------------- /frontend/loadtest/targets.txt: -------------------------------------------------------------------------------- 1 | POST http://xx.xxx.xxx.xxx:80/track 2 | @10.json -------------------------------------------------------------------------------- /Banias_Architecture.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/doitintl/banias/HEAD/Banias_Architecture.png -------------------------------------------------------------------------------- /banias-logo-lowres.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/doitintl/banias/HEAD/banias-logo-lowres.png -------------------------------------------------------------------------------- /banias-logo-lowres-trimmed.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/doitintl/banias/HEAD/banias-logo-lowres-trimmed.png -------------------------------------------------------------------------------- /frontend/deploy/ingress.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: extensions/v1beta1 2 | kind: Ingress 3 | metadata: 4 | name: banias-ingress 5 | spec: 6 | backend: 7 | serviceName: banias-frontend 8 | servicePort: 8081 -------------------------------------------------------------------------------- /frontend/scripts/dockerize.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | docker build -t banias-frontend . 3 | docker tag banias-frontend gcr.io/$PROJECT_ID/banias-frontend 4 | gcloud docker -- push gcr.io/$PROJECT_ID/banias-frontend -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: go 2 | 3 | env: 4 | - env GO111MODULE=on 5 | 6 | go: 7 | - 1.11.4 8 | 9 | install: 10 | - cd frontend 11 | 12 | script: go build . 13 | 14 | notifications: 15 | email: false -------------------------------------------------------------------------------- /frontend/loadtest/load.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | #Vegeta from here #Vegeta from here https://github.com/tsenart/vegeta 3 | vegeta -cpus 4 attack -targets=targets.txt -duration=1m -rate 10000 tee /tmp/results.bin | vegeta report 4 | -------------------------------------------------------------------------------- /frontend/deploy/frontend-service.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | name: banias-frontend 5 | labels: 6 | app: banias-frontend 7 | spec: 8 | type: NodePort 9 | selector: 10 | app: banias-frontend 11 | ports: 12 | - name: http 13 | port: 8081 14 | -------------------------------------------------------------------------------- /frontend/deploy/frontend-monitor-service.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | name: banias-monitor-service 5 | labels: 6 | k8s-app: banias-frontend 7 | spec: 8 | ports: 9 | - name: prom-metrics 10 | port: 8080 11 | protocol: TCP 12 | selector: 13 | k8s-app: banias-frontend 14 | 15 | -------------------------------------------------------------------------------- /frontend/deploy/prom-frontend.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: monitoring.coreos.com/v1 2 | kind: ServiceMonitor 3 | metadata: 4 | name: banias-frontend 5 | namespace: monitoring 6 | labels: 7 | k8s-app: banias-frontend 8 | spec: 9 | selector: 10 | matchLabels: 11 | k8s-app: banias-frontend 12 | endpoints: 13 | - port: prom-metrics 14 | interval: 10s 15 | namespaceSelector: 16 | matchNames: 17 | - default 18 | - monitoring -------------------------------------------------------------------------------- /frontend/deploy/frontend-autoscale.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: autoscaling/v2beta1 2 | kind: HorizontalPodAutoscaler 3 | metadata: 4 | name: banias-frontend 5 | namespace: default 6 | spec: 7 | scaleTargetRef: 8 | apiVersion: apps/v1beta1 9 | kind: Deployment 10 | name: banias-frontend 11 | minReplicas: 3 12 | maxReplicas: 20 13 | metrics: 14 | - type: Resource 15 | resource: 16 | name: cpu 17 | targetAverageUtilization: 80 -------------------------------------------------------------------------------- /backend/src/test/transaction_16.json: -------------------------------------------------------------------------------- 1 | { 2 | "fields": [ 3 | { 4 | "name": "SenderID", 5 | "mode": "NULLABLE", 6 | "type": "STRING" 7 | }, 8 | { 9 | "name": "action", 10 | "mode": "NULLABLE", 11 | "type": "STRING" 12 | }, 13 | { 14 | "name": "price", 15 | "mode": "NULLABLE", 16 | "type": "INTEGER" 17 | }, 18 | { 19 | "name": "screen", 20 | "mode": "NULLABLE", 21 | "type": "STRING" 22 | } 23 | ] 24 | } -------------------------------------------------------------------------------- /frontend/deploy/frontend-configmap.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | data: 3 | BANIAS_PROJECTID: "my-project" 4 | BANIAS_DEBUG: "false" 5 | BANIAS_TOPIC: "banias" 6 | BANIAS_PORT: "8081" 7 | BANIAS_METRICSPORT: "8080" 8 | BANIAS_PUBSUBMAXBATCH: "1000" 9 | BANIAS_PUBSUBAGGRIGATORS: "30" 10 | BANIAS_PUBSUBMAXPUBLISHDELAY: "5" 11 | BANIAS_MAXPUBSUBGOROUTINESAMOUNT: "50000" 12 | BANIAS_MAXPUBSUBGOROUTINEIDLEDURATION: "10" 13 | 14 | 15 | kind: ConfigMap 16 | metadata: 17 | labels: 18 | app: banias-frontend 19 | name: banias-frontend-config 20 | namespace: default -------------------------------------------------------------------------------- /backend/src/test/click_64.json: -------------------------------------------------------------------------------- 1 | { 2 | "fields": [ 3 | { 4 | "name": "SenderID", 5 | "mode": "NULLABLE", 6 | "type": "STRING" 7 | }, 8 | { 9 | "name": "action", 10 | "mode": "NULLABLE", 11 | "type": "STRING" 12 | }, 13 | { 14 | "name": "price", 15 | "mode": "NULLABLE", 16 | "type": "INTEGER" 17 | }, 18 | { 19 | "name": "date", 20 | "mode": "NULLABLE", 21 | "type": "DATE" 22 | }, 23 | { 24 | "name": "screen", 25 | "mode": "NULLABLE", 26 | "type": "STRING" 27 | } 28 | ] 29 | } -------------------------------------------------------------------------------- /backend/src/test/transaction_32.json: -------------------------------------------------------------------------------- 1 | { 2 | "fields": [ 3 | { 4 | "name": "SenderID", 5 | "mode": "NULLABLE", 6 | "type": "STRING" 7 | }, 8 | { 9 | "name": "action", 10 | "mode": "NULLABLE", 11 | "type": "STRING" 12 | }, 13 | { 14 | "name": "price", 15 | "mode": "NULLABLE", 16 | "type": "INTEGER" 17 | }, 18 | { 19 | "name": "date", 20 | "mode": "NULLABLE", 21 | "type": "DATE" 22 | }, 23 | { 24 | "name": "screen", 25 | "mode": "NULLABLE", 26 | "type": "STRING" 27 | } 28 | ] 29 | } 30 | -------------------------------------------------------------------------------- /frontend/scripts/build_cluster.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | gcloud container clusters create $CLUSTERNAME --cluster-version \ 3 | "1.11.5-gke.5" --no-enable-legacy-authorization --image-type "UBUNTU" \ 4 | --machine-type "n1-standard-4" \ 5 | --scopes "https://www.googleapis.com/auth/compute","https://www.googleapis.com/auth/devstorage.read_only","https://www.googleapis.com/auth/logging.write","https://www.googleapis.com/auth/monitoring","https://www.googleapis.com/auth/pubsub","https://www.googleapis.com/auth/servicecontrol","https://www.googleapis.com/auth/service.management.readonly","https://www.googleapis.com/auth/trace.append" \ 6 | --network "default" --enable-autoscaling --enable-cloud-logging --enable-cloud-monitoring --subnetwork "default" \ 7 | --num-nodes "3" --min-nodes 3 --max-nodes 10 -------------------------------------------------------------------------------- /frontend/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM golang:1.12-alpine AS build_base 2 | LABEL maintainer="Aviv Laufer " 3 | RUN apk update && apk upgrade && \ 4 | apk add --no-cache git build-base ca-certificates 5 | 6 | WORKDIR /go/src/github.com/doitintl/banias/frontend 7 | ENV GO111MODULE=on 8 | COPY go.mod . 9 | COPY go.sum . 10 | RUN go mod download 11 | 12 | 13 | FROM build_base AS builder 14 | COPY . . 15 | 16 | RUN cd /go/src/github.com/doitintl/banias/frontend && GO111MODULE=on CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -ldflags "-w -s" -o /banias-frontend -tags netgo -installsuffix netgo . 17 | 18 | FROM alpine 19 | RUN apk add --no-cache ca-certificates 20 | 21 | COPY --from=builder //banias-frontend /bin//banias-frontend 22 | 23 | ENTRYPOINT ["/bin//banias-frontend"] 24 | 25 | 26 | 27 | 28 | 29 | -------------------------------------------------------------------------------- /backend/.gitignore: -------------------------------------------------------------------------------- 1 | ### Example user template template 2 | ### Example user template 3 | 4 | # IntelliJ project files 5 | .idea 6 | *.iml 7 | out 8 | gen 9 | 10 | # Log file 11 | *.log 12 | 13 | # BlueJ files 14 | *.ctxt 15 | 16 | # Mobile Tools for Java (J2ME) 17 | .mtj.tmp/ 18 | 19 | # Package Files # 20 | *.war 21 | *.ear 22 | *.zip 23 | *.tar.gz 24 | *.rar 25 | 26 | # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml 27 | hs_err_pid* 28 | ### Maven template 29 | target/ 30 | pom.xml.tag 31 | pom.xml.releaseBackup 32 | pom.xml.versionsBackup 33 | pom.xml.next 34 | release.properties 35 | dependency-reduced-pom.xml 36 | buildNumber.properties 37 | .mvn/timing.properties 38 | 39 | # Avoid ignoring Maven wrapper jar file (.jar files are usually ignored) 40 | !/.mvn/wrapper/maven-wrapper.jar 41 | 42 | -------------------------------------------------------------------------------- /backend/src/main/java/com/doitintl/banias/MapEvents.java: -------------------------------------------------------------------------------- 1 | package com.doitintl.banias; 2 | 3 | import com.google.api.services.bigquery.model.TableRow; 4 | import org.apache.beam.sdk.values.TupleTag; 5 | import org.json.JSONObject; 6 | 7 | import java.util.Iterator; 8 | 9 | class MapEvents extends BaseMap { 10 | private static final long serialVersionUID = -8423214840205956145L; 11 | 12 | MapEvents(TupleTag errorsTag) { 13 | super(errorsTag); 14 | } 15 | 16 | @Override 17 | String getType() { 18 | return "events"; 19 | } 20 | 21 | @Override 22 | void map(JSONObject json, TableRow tableRow) { 23 | Iterator keys = json.keys(); 24 | 25 | keys.forEachRemaining(key -> { 26 | if(json.get(key) instanceof JSONObject){ 27 | TableRow tmpTableRow = new TableRow(); 28 | map(json.getJSONObject(key), tmpTableRow); 29 | tableRow.set(key,tmpTableRow); 30 | } 31 | else{ 32 | tableRow.set(key, json.get(key)); 33 | } 34 | }); 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /backend/src/main/java/com/doitintl/banias/BaniasPipelineOptions.java: -------------------------------------------------------------------------------- 1 | package com.doitintl.banias; 2 | 3 | import org.apache.beam.runners.dataflow.options.DataflowPipelineOptions; 4 | import org.apache.beam.sdk.options.Description; 5 | import org.apache.beam.sdk.options.Validation; 6 | 7 | interface BaniasPipelineOptions extends DataflowPipelineOptions { 8 | @Description("Full path for 'events' PubSub subscription to accept messages from. Required. Format: projects//subscriptions/") 9 | @Validation.Required 10 | String getEventsSubscriptionPath(); 11 | void setEventsSubscriptionPath(String value); 12 | 13 | @Description("Full path for schemas GCS bucket. Required.") 14 | @Validation.Required 15 | String getGCSSchemasBucketName(); 16 | void setGCSSchemasBucketName(String value); 17 | 18 | @Description("BigQuery errors table name.") 19 | @Validation.Required 20 | String getErrorsTableName(); 21 | void setErrorsTableName(String value); 22 | 23 | @Description("List of BiqQuery dataset name.") 24 | @Validation.Required 25 | String getDataset(); 26 | void setDataset(String value); 27 | } 28 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2018 DoiT International 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /frontend/go.mod: -------------------------------------------------------------------------------- 1 | module github.com/doitintl/banias/frontend 2 | 3 | require ( 4 | cloud.google.com/go v0.34.0 5 | contrib.go.opencensus.io/exporter/ocagent v0.4.2 // indirect 6 | contrib.go.opencensus.io/exporter/stackdriver v0.9.0 7 | github.com/BurntSushi/toml v0.3.1 // indirect 8 | github.com/buger/jsonparser v0.0.0-20181115193947-bf1c66bbce23 9 | github.com/census-instrumentation/opencensus-proto v0.1.0 // indirect 10 | github.com/gopherjs/gopherjs v0.0.0-20181103185306-d547d1d9531e // indirect 11 | github.com/henrylee2cn/goutil v0.0.0-20190108065108-a4da700f03b3 12 | github.com/jtolds/gls v4.2.1+incompatible // indirect 13 | github.com/oklog/oklog v0.3.2 14 | github.com/oklog/run v1.0.0 // indirect 15 | github.com/pkg/errors v0.8.1 // indirect 16 | github.com/pquerna/ffjson v0.0.0-20181028064349-e517b90714f7 17 | github.com/smartystreets/assertions v0.0.0-20190116191733-b6c0e53d7304 // indirect 18 | github.com/smartystreets/goconvey v0.0.0-20181108003508-044398e4856c // indirect 19 | github.com/spf13/viper v1.3.1 20 | github.com/valyala/fasthttp v1.1.0 21 | go.opencensus.io v0.18.1-0.20181204023538-aab39bd6a98b 22 | go.uber.org/atomic v1.3.2 // indirect 23 | go.uber.org/multierr v1.1.0 // indirect 24 | go.uber.org/zap v1.9.1 25 | ) 26 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Banias 2 | 3 | ![](banias-logo-lowres-trimmed.png) 4 | 5 | [![License](https://img.shields.io/github/license/doitintl/banias.svg)](LICENSE) [![GitHub stars](https://img.shields.io/github/stars/doitintl/banias.svg?style=social&label=Stars&style=for-the-badge)](https://github.com/doitintl/banias) [![Build Status](https://secure.travis-ci.org/doitintl/banias.png?branch=master)](http://travis-ci.org/doitintl/banias) 6 | 7 | [Blog Post](https://blog.doit-intl.com/say-goodbye-to-mixpanel-meet-banias-12e09fecc44a) 8 | 9 | Banias (Arabic: بانياس الحولة‎; Hebrew: בניאס‬) is the Arabic and modern Hebrew name of an ancient site developed around a river once associated with the Greek god [Pan](https://www.wikiwand.com/en/Pan_(mythology)). 10 | 11 | So, like the flow of the Banias, events are flowing into our system. So we decided to build a reference architecture and actual implementation of event analytics pipeline. You can take the code as it is and use it or use it a design reference. 12 | 13 | Banias Architecture: 14 | * API receiving the events data from the producers (e.g. web apps, mobile app or backend servers) 15 | * The events are sent to Google Pub/Sub topic 16 | * Apache Beam/Google Cloud Dataflow streams the events into BigQuery with or without mutations or agregations 17 | 18 | ![](Banias_Architecture.png) 19 | 20 | ## Installation 21 | 22 | ### [API Deployment](frontend/README.md) 23 | 24 | ### [Apache Beam/Google Cloud Dataflow Deployment](backend/README.md) 25 | -------------------------------------------------------------------------------- /backend/src/test/test_events.txt: -------------------------------------------------------------------------------- 1 | {"SenderID":"my id","Event":{"type":{"event_version":"16","event_name":"transaction"},"payload":{"action":"sell","price":170,"screen":"welcome"}}} 2 | {"SenderID":"my id","Event":{"type":{"event_version":"32","event_name":"transaction"},"payload":{"action":"sell","date":"1967-03-31","price":170,"screen":"welcome"}}} 3 | {"SenderID":"my id","Event":{"type":{"event_version":"64","event_name":"click"}, "payload":{"action":"sell","date":"1967-03-31","price":170,"screen":"welcome"}}} 4 | 5 | { 6 | "SenderID":"my id", 7 | "Event":{ 8 | "type":{ 9 | "event_version":"16", 10 | "event_name":"transaction" 11 | }, 12 | "payload":{ 13 | "action":"sell", 14 | "price":170, 15 | "screen":"welcome" 16 | } 17 | } 18 | } 19 | 20 | { 21 | "SenderID":"my id", 22 | "Event":{ 23 | "type":{ 24 | "event_version":"32", 25 | "event_name":"transaction" 26 | }, 27 | "payload":{ 28 | "action":"sell", 29 | "date":"1967-03-31", 30 | "price":170, 31 | "screen":"welcome" 32 | } 33 | } 34 | } 35 | 36 | { 37 | "SenderID":"my id", 38 | "Event":{ 39 | "type":{ 40 | "event_version":"64", 41 | "event_name":"click" 42 | }, 43 | "payload":{ 44 | "action":"sell", 45 | "date":"1967-03-31", 46 | "price":170, 47 | "screen":"welcome" 48 | } 49 | } 50 | } -------------------------------------------------------------------------------- /frontend/types/types.go: -------------------------------------------------------------------------------- 1 | package types 2 | 3 | import ( 4 | "strconv" 5 | "strings" 6 | ) 7 | 8 | // {"sender_id",:"id", "events": [ e1, e2, ... ] } 9 | // 10 | // where e1 is an object with all the information you need. 11 | // { 12 | // "type": { "event_version": ..., "event_name": ...}, 13 | // "payload": { the event payload itself goes here in whatever structure you need }, 14 | // } 15 | 16 | type TrackRequest struct { 17 | SenderID string `json:"sender_id" valid:"notempty,required"` 18 | Events []Event `json:"events"` 19 | } 20 | 21 | type Event struct { 22 | TypeField Type `json:"type"` 23 | PayloadField Payload `json:"payload" valid:"-"` 24 | } 25 | 26 | // Type is an Event's metadata. 27 | type Type struct { 28 | EventVersionField string `json:"event_version" valid:"notempty,required"` 29 | EventNameField string `json:"event_name" valid:"notempty,required"` 30 | } 31 | 32 | type EventMsg struct { 33 | SenderID string 34 | Event Event 35 | } 36 | 37 | // Payload is the event's actual data inserted into data stores. 38 | type Payload map[string]interface{} 39 | 40 | // Error describes either an API error, or a rejected event. 41 | // If rejected event, then Index field will be included. 42 | type Error struct { 43 | Detail string `json:"detail"` 44 | // This is a pointer in order to marshal '0' value 45 | // but not if uninitialized. 46 | Index *uint64 `json:"index,omitempty"` 47 | } 48 | 49 | // NewError returns an initialized Error. 50 | func NewError(index uint64, detail string) *Error { 51 | return &Error{Index: &index, Detail: detail} 52 | } 53 | 54 | func (err *Error) Error() string { 55 | return strings.Join( 56 | []string{"item ", strconv.FormatUint(*err.Index, 10), ": ", err.Detail}, 57 | "") 58 | } 59 | 60 | 61 | -------------------------------------------------------------------------------- /backend/src/main/java/com/doitintl/banias/BaseMap.java: -------------------------------------------------------------------------------- 1 | package com.doitintl.banias; 2 | 3 | import com.google.api.services.bigquery.model.TableRow; 4 | import org.apache.beam.sdk.transforms.DoFn; 5 | import org.apache.beam.sdk.values.TupleTag; 6 | import org.json.JSONObject; 7 | import org.slf4j.Logger; 8 | import org.slf4j.LoggerFactory; 9 | 10 | abstract class BaseMap extends DoFn { 11 | private static final Logger LOG = LoggerFactory.getLogger(BaseMap.class); 12 | private static final long serialVersionUID = -7252861063735099405L; 13 | private TupleTag errorsTag; 14 | 15 | BaseMap(TupleTag errorsTag) { 16 | this.errorsTag = errorsTag; 17 | } 18 | 19 | abstract String getType(); 20 | 21 | abstract void map(JSONObject json, TableRow tableRow); 22 | 23 | @ProcessElement 24 | public void processElement(ProcessContext processContext) { 25 | TableRow tableRow = new TableRow(); 26 | 27 | try { 28 | JSONObject json = new JSONObject(processContext.element()); 29 | 30 | JSONObject eventJson = json.getJSONObject("Event"); 31 | JSONObject typeJson = eventJson.getJSONObject("type"); 32 | JSONObject payloadJson = eventJson.getJSONObject("payload"); 33 | 34 | tableRow.set("SenderID", json.getString("SenderID")); 35 | tableRow.set("event_version", typeJson.getString("event_version")); 36 | tableRow.set("event_name", typeJson.getString("event_name")); 37 | 38 | map(payloadJson, tableRow); 39 | processContext.output(tableRow); 40 | 41 | } catch (Exception e) { 42 | LOG.error(e.getMessage(), e); 43 | tableRow.clear(); 44 | tableRow.set(SchemaHelpers.ERROR_EVENT_TYPE_STR, getType()); 45 | tableRow.set(SchemaHelpers.ERROR_EVENT_RAW_INPUT_STR, processContext.element()); 46 | tableRow.set(SchemaHelpers.ERROR_EVENT_ERROR_STR, e.toString()); 47 | processContext.output(errorsTag, tableRow); 48 | } 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /frontend/README.md: -------------------------------------------------------------------------------- 1 | 2 | ## Installation 3 | 4 | * Setup a GKE cluster - `scripts/build_cluster.sh` 5 | * If you want to use Prometheus for monitoring, we recommend `https://github.com/camilb/prometheus-kubernetes` 6 | * Build the images: 7 | * `mkdir -p $GOPATH/src/github.com/doitintl/` 8 | * `git clone git@github.com:doitintl/banias.git` 9 | * `cd banias/frontend` 10 | * `export PROJECT_ID=your_project_id` 11 | * Build an deploy the image `scripts/dockerize.sh` 12 | * Create a service account with permission for Google Pub/Sub and Stackdiver. 13 | * Download the key file in JSON format 14 | * Create secret - `kubectl create secret generic pubsub-key --from-file=key.json=filename` 15 | * Replace `BANIAS_PROJECTID: "my-project"` to your project name in `deployfrontend-configmap.yaml` 16 | * Replace `image: "gcr.io/my-project/banias-frontend:test"` to your project name and version in `deploy/frontend-deployment.yaml` 17 | * Change the configuration in `deploy/frontend-configmap.yaml` 18 | * Deploy - `kubctl apply -f deploy/.` 19 | 20 | 21 | ### Configuration 22 | 23 | * **Project ID** - BANIAS_PROJECTID: "your-project-id" 24 | 25 | * **Debug flag for log prints** - default level is Info. - BANIAS_DEBUG: "false" 26 | 27 | * **Pub/Sub topic name** - BANIAS_TOPIC: "banias" 28 | 29 | * **HTTP port** - BANIAS_PORT: "8081" 30 | 31 | * **Prometheus port** - BANIAS_METRICSPORT: "8080" 32 | 33 | * **Maximum size of a single batch** - BANIAS_PUBSUBMAXBATCH: "1000" 34 | 35 | * **Number of pools that listen for events from users** - BANIAS_PUBSUBAGGRIGATORS: "30" 36 | 37 | * **Max time to pass in seconds before publishing.** - BANIAS_PUBSUBMAXPUBLISHDELAY: "5" 38 | 39 | * **The default maximum amount of goroutines for publishing** - BANIAS_MAXPUBSUBGOROUTINESAMOUNT: "262144" 40 | 41 | * **The default maximum idle duration of a goroutine** - BANIAS_MAXPUBSUBGOROUTINEIDLEDURATION: "10" 42 | -------------------------------------------------------------------------------- /frontend/config/config.go: -------------------------------------------------------------------------------- 1 | package config 2 | 3 | import ( 4 | "math" 5 | "time" 6 | 7 | "cloud.google.com/go/pubsub" 8 | "github.com/spf13/viper" 9 | ) 10 | 11 | type Config struct { 12 | // Project ID daa 13 | ProjectID string 14 | // Pub/Sub topic name 15 | Topic string 16 | // Debug flag for log prints - default level is Info. 17 | Debug bool 18 | // HTTP port. 19 | Port int 20 | // Prometheus port. 21 | MetricsPort int 22 | // Maximum size of a single batch. 23 | PubsubMaxBatch int 24 | // Number of pools that listen for events from users 25 | PubSubAggrigators int 26 | // Max time to pass in seconds before publishing. 27 | PubsubMaxPublishDelay time.Duration 28 | // The default maximum amount of goroutines for publishing. 29 | MaxPubSubGoroutinesAmount int 30 | // The default maximum idle duration of a goroutine. 31 | MaxPubSubGoroutineIdleDuration time.Duration 32 | } 33 | 34 | func setConfigDefaults() { 35 | viper.SetDefault("Topic", "banias") 36 | viper.SetDefault("Debug", false) 37 | viper.SetDefault("Port", 8081) 38 | viper.SetDefault("MetricsPort", 8080) 39 | viper.SetDefault("PubsubMaxBatch", 1000) 40 | viper.SetDefault("PubSubAggrigators", 30) 41 | viper.SetDefault("PubsubMaxPublishDelay", 5) 42 | viper.SetDefault("MaxPubSubGoroutinesAmount", 256*1024) 43 | viper.SetDefault("MaxPubSubGoroutineIdleDuration", 10) 44 | 45 | } 46 | 47 | func NewConfig() (*Config, error) { 48 | viper.SetEnvPrefix("banias") 49 | viper.AutomaticEnv() 50 | setConfigDefaults() 51 | c := Config{ 52 | ProjectID: viper.GetString("projectid"), 53 | Topic: viper.GetString("topic"), 54 | Debug: viper.GetBool("debug"), 55 | Port: viper.GetInt("port"), 56 | MetricsPort: viper.GetInt("metricsport"), 57 | PubsubMaxBatch: int(math.Min(float64(viper.GetInt("pubsubmaxbatch")), float64(pubsub.MaxPublishRequestCount))), 58 | PubSubAggrigators: viper.GetInt("pubsubaggrigators"), 59 | PubsubMaxPublishDelay: time.Duration(viper.GetInt("pubsubmaxpublishdelay")) * time.Second, 60 | MaxPubSubGoroutinesAmount: viper.GetInt("maxpubsubgoroutinesamount"), 61 | MaxPubSubGoroutineIdleDuration: time.Duration(viper.GetInt("maxpubsubgoroutineidleduration")), 62 | } 63 | return &c, nil 64 | } 65 | -------------------------------------------------------------------------------- /frontend/loadtest/10.json: -------------------------------------------------------------------------------- 1 | 2 | { 3 | "sender_id": "my id", 4 | "events": [ 5 | { 6 | "type": { 7 | "event_version": "16", 8 | "event_name": "transaction" 9 | }, 10 | "payload": { 11 | "action": "buy", 12 | "price": 170, 13 | "date": "1967-03-31" 14 | } 15 | }, 16 | { 17 | "type": { 18 | "event_version": "32", 19 | "event_name": "transaction" 20 | }, 21 | "payload": { 22 | "action": "sell", 23 | "price": 170, 24 | "date": "1967-03-31" 25 | } 26 | }, 27 | { 28 | "type": { 29 | "event_version": "32", 30 | "event_name": "transaction" 31 | }, 32 | "payload": { 33 | "action": "sell", 34 | "price": 1120, 35 | "date": "1967-03-31" 36 | } 37 | }, 38 | { 39 | "type": { 40 | "event_version": "32", 41 | "event_name": "transaction" 42 | }, 43 | "payload": { 44 | "action": "sell", 45 | "price": 171, 46 | "date": "1967-03-31" 47 | } 48 | }, 49 | { 50 | "type": { 51 | "event_version": "32", 52 | "event_name": "transaction" 53 | }, 54 | "payload": { 55 | "action": "sell", 56 | "price": 172, 57 | "date": "1967-03-31" 58 | } 59 | }, 60 | { 61 | "type": { 62 | "event_version": "32", 63 | "event_name": "transaction" 64 | }, 65 | "payload": { 66 | "action": "sell", 67 | "price": 173, 68 | "date": "03/30/1967" 69 | } 70 | }, 71 | { 72 | "type": { 73 | "event_version": "32", 74 | "event_name": "transaction" 75 | }, 76 | "payload": { 77 | "action": "sell", 78 | "price": 170, 79 | "date": "03/30/1967" 80 | } 81 | }, 82 | { 83 | "type": { 84 | "event_version": "32", 85 | "event_name": "transaction" 86 | }, 87 | "payload": { 88 | "action": "sell", 89 | "price": 170, 90 | "date": "03/31/1968" 91 | } 92 | }, 93 | { 94 | "type": { 95 | "event_version": "32", 96 | "event_name": "transaction" 97 | }, 98 | "payload": { 99 | "action": "sell", 100 | "price": 170, 101 | "date": "01/31/1967" 102 | } 103 | }, 104 | 105 | { 106 | "type": { 107 | "event_version": "64", 108 | "event_name": "click" 109 | }, 110 | "payload": { 111 | "screen": "welcome" 112 | } 113 | } 114 | ] 115 | } 116 | -------------------------------------------------------------------------------- /backend/src/main/java/com/doitintl/banias/SchemaHelpers.java: -------------------------------------------------------------------------------- 1 | package com.doitintl.banias; 2 | 3 | import com.google.api.services.bigquery.model.TableFieldSchema; 4 | import com.google.api.services.bigquery.model.TableSchema; 5 | import com.google.cloud.storage.Bucket; 6 | import com.google.cloud.storage.Storage; 7 | import com.google.cloud.storage.StorageOptions; 8 | import org.apache.beam.sdk.util.Transport; 9 | import org.slf4j.Logger; 10 | import org.slf4j.LoggerFactory; 11 | 12 | import java.io.IOException; 13 | import java.io.Serializable; 14 | import java.util.ArrayList; 15 | import java.util.List; 16 | import java.util.concurrent.ConcurrentHashMap; 17 | 18 | import static java.nio.charset.StandardCharsets.UTF_8; 19 | 20 | class SchemaHelpers implements Serializable { 21 | private static final Logger LOG = LoggerFactory.getLogger(SchemaHelpers.class); 22 | private static final long serialVersionUID = 4048366826024870134L; 23 | 24 | static final String ERROR_EVENT_TYPE_STR = "type"; 25 | static final String ERROR_EVENT_RAW_INPUT_STR = "raw_input"; 26 | static final String ERROR_EVENT_ERROR_STR = "error"; 27 | static final String ERROR_EVENT_DONT_HAVE_SCHEMA = "Event don't have a predefined schema"; 28 | 29 | static ConcurrentHashMap loadSchemaFromGCS(String bucketName){ 30 | ConcurrentHashMap schemas = new ConcurrentHashMap<>(); 31 | try{ 32 | Storage storage = StorageOptions.getDefaultInstance().getService(); 33 | Bucket bucket = storage.get(bucketName); 34 | 35 | bucket.list().iterateAll().forEach(blob -> { 36 | String gcsFileName = blob.getName(); 37 | String key = gcsFileName.substring(0,gcsFileName.indexOf(".json")); 38 | 39 | String val = new String(storage.readAllBytes(blob.getBlobId()), UTF_8); 40 | 41 | schemas.put(key, val); 42 | }); 43 | }catch (Exception e){ 44 | LOG.error(e.toString()); 45 | } 46 | 47 | return schemas; 48 | } 49 | 50 | static TableSchema fromJsonString(String json) { 51 | if (json == null) { 52 | return null; 53 | } 54 | try { 55 | return Transport.getJsonFactory().fromString(json, TableSchema.class); 56 | } catch (IOException e) { 57 | throw new RuntimeException( 58 | String.format("Cannot deserialize %s from a JSON string: %s.", TableSchema.class, json), e); 59 | } 60 | } 61 | 62 | static TableSchema getErrorTableSchema(){ 63 | List fields = new ArrayList<>(); 64 | fields.add(new TableFieldSchema().setName(ERROR_EVENT_TYPE_STR).setType("STRING").setMode("NULLABLE")); 65 | fields.add(new TableFieldSchema().setName(ERROR_EVENT_RAW_INPUT_STR).setType("STRING").setMode("NULLABLE")); 66 | fields.add(new TableFieldSchema().setName(ERROR_EVENT_ERROR_STR).setType("STRING").setMode("NULLABLE")); 67 | 68 | TableSchema tableSchema = new TableSchema(); 69 | tableSchema.setFields(fields); 70 | return tableSchema; 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /backend/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 4.0.0 6 | 7 | com.doitintl 8 | banias-dataflow 9 | 1.0-SNAPSHOT 10 | 11 | 1.8 12 | 1.8 13 | 2.9.0 14 | 1.7.25 15 | 20180130 16 | 17 | 18 | 19 | org.apache.beam 20 | beam-sdks-java-core 21 | ${beam.version} 22 | 23 | 24 | org.apache.beam 25 | beam-runners-direct-java 26 | ${beam.version} 27 | runtime 28 | 29 | 30 | org.apache.beam 31 | beam-runners-google-cloud-dataflow-java 32 | ${beam.version} 33 | 34 | 35 | org.apache.beam 36 | beam-sdks-java-io-google-cloud-platform 37 | ${beam.version} 38 | 39 | 40 | com.google.cloud 41 | google-cloud-core 42 | 1.28.0 43 | 44 | 45 | com.google.cloud 46 | google-cloud-storage 47 | 1.28.0 48 | 49 | 50 | com.google.apis 51 | google-api-services-storage 52 | v1-rev125-1.21.0 53 | 54 | 55 | org.slf4j 56 | slf4j-jdk14 57 | ${slf4j.version} 58 | 59 | 60 | org.json 61 | json 62 | ${json.version} 63 | 64 | 65 | com.googlecode.json-simple 66 | json-simple 67 | 1.1.1 68 | 69 | 70 | io.grpc 71 | grpc-context 72 | 1.11.0 73 | 74 | 75 | org.apache.commons 76 | commons-lang3 77 | 3.7 78 | 79 | 80 | 81 | -------------------------------------------------------------------------------- /backend/Makefile: -------------------------------------------------------------------------------- 1 | # To run this make file you will need to give the next arguments. PROJECT_ID, DATASET_NAME, TOPIC_NAME and SUBSCRIPTION_NAME. 2 | # If you are lazy (like me), you can just set them up in the following lines :-) 3 | 4 | # PROJECT_ID= 5 | # DATASET_NAME= 6 | # TOPIC_NAME= 7 | # SUBSCRIPTION_NAME= 8 | # SCHEMAS_BUCKET= 9 | # TEMP_BUCKET= 10 | # TEMPLATE_BUCKET= 11 | # TEMPLATE_FILE= 12 | # DATASET_LOCATION= 13 | 14 | TABLE_NAME=banias 15 | TEMP_FOLDER=tmp 16 | STAGING_FOLDER=pipeline-staging 17 | EVENTS_SUBSCRIPTION_PATH="projects/$(PROJECT_ID)/subscriptions/$(SUBSCRIPTION_NAME)" 18 | ERRORS_TABLE_NAME=error 19 | DATASET=banias 20 | 21 | 22 | env_setup: _create_buckets _create_topic _create_subscription _create_dataset 23 | 24 | install: 25 | mvn install 26 | 27 | build: install 28 | mvn package 29 | 30 | run_local: build 31 | mvn exec:java -Dexec.mainClass=com.doitintl.banias.BaniasPipeline \ 32 | -Dexec.cleanupDaemonThreads=false \ 33 | -Dexec.args=" \ 34 | --project=$(PROJECT_ID) \ 35 | --tempLocation=gs://$(TEMP_BUCKET)/ \ 36 | --gcpTempLocation=gs://$(TEMP_BUCKET)/$(TEMP_FOLDER) \ 37 | --runner=DirectRunner \ 38 | --defaultWorkerLogLevel=DEBUG \ 39 | --eventsSubscriptionPath=$(EVENTS_SUBSCRIPTION_PATH) \ 40 | --errorsTableName=$(ERRORS_TABLE_NAME) \ 41 | --GCSSchemasBucketName=$(SCHEMAS_BUCKET) \ 42 | --dataset=$(DATASET) \ 43 | --numWorkers=3 \ 44 | " 45 | 46 | run: build 47 | mvn exec:java -Dexec.mainClass=com.doitintl.banias.BaniasPipeline \ 48 | -Dexec.cleanupDaemonThreads=false \ 49 | -Dexec.args=" \ 50 | --project=$(PROJECT_ID) \ 51 | --tempLocation=gs://$(TEMP_BUCKET)/ \ 52 | --gcpTempLocation=gs://$(TEMP_BUCKET)/$(TEMP_FOLDER) \ 53 | --stagingLocation=gs://$(TEMP_BUCKET)/$(STAGING_FOLDER) \ 54 | --runner=DataflowRunner \ 55 | --defaultWorkerLogLevel=DEBUG \ 56 | --eventsSubscriptionPath=$(EVENTS_SUBSCRIPTION_PATH) \ 57 | --errorsTableName=$(ERRORS_TABLE_NAME) \ 58 | --GCSSchemasBucketName=$(SCHEMAS_BUCKET) \ 59 | --dataset=$(DATASET) \ 60 | --numWorkers=3 \ 61 | " 62 | 63 | create_template: build 64 | mvn exec:java -Dexec.mainClass=com.doitintl.banias.BaniasPipeline \ 65 | -Dexec.cleanupDaemonThreads=false \ 66 | -Dexec.args=" \ 67 | --jobName="BaniasPipeline" \ 68 | --project=$(PROJECT_ID) \ 69 | --tempLocation=gs://$(TEMP_BUCKET)/ \ 70 | --gcpTempLocation=gs://$(TEMP_BUCKET)/$(TEMP_FOLDER) \ 71 | --stagingLocation=gs://$(TEMP_BUCKET)/$(STAGING_FOLDER) \ 72 | --templateLocation=gs://$(TEMPLATE_BUCKET)/$(TEMPLATE_FILE) \ 73 | --runner=DataflowRunner \ 74 | --defaultWorkerLogLevel=DEBUG \ 75 | --eventsSubscriptionPath=$(EVENTS_SUBSCRIPTION_PATH) \ 76 | --errorsTableName=$(ERRORS_TABLE_NAME) \ 77 | --GCSSchemasBucketName=$(SCHEMAS_BUCKET) \ 78 | --dataset=$(DATASET) \ 79 | --numWorkers=3 \ 80 | " 81 | 82 | ### Helpers: 83 | _create_buckets: 84 | gsutil ls -b gs://$(TEMP_BUCKET) || gsutil mb gs://$(TEMP_BUCKET) 85 | gsutil ls -b gs://$(SCHEMAS_BUCKET) || gsutil mb gs://$(SCHEMAS_BUCKET) 86 | ifdef TEMPLATE_BUCKET 87 | gsutil ls -b gs://$(TEMPLATE_BUCKET) || gsutil mb gs://$(TEMPLATE_BUCKET) 88 | endif 89 | 90 | _create_dataset: 91 | bq ls | grep -w $(DATASET) || bq --location=$(DATASET_LOCATION) mk --dataset $(PROJECT_ID):$(DATASET) 92 | 93 | _create_topic: 94 | gcloud pubsub --project $(PROJECT_ID) topics list | grep -w $(TOPIC_NAME) || gcloud pubsub --project $(PROJECT_ID) topics create $(TOPIC_NAME) 95 | 96 | _create_subscription: 97 | gcloud pubsub --project $(PROJECT_ID) subscriptions list | grep -w $(SUBSCRIPTION_NAME) || gcloud pubsub --project $(PROJECT_ID) subscriptions create $(SUBSCRIPTION_NAME) --topic=$(TOPIC_NAME) 98 | -------------------------------------------------------------------------------- /frontend/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "fmt" 5 | "net" 6 | "net/http" 7 | "os" 8 | "os/signal" 9 | "strconv" 10 | "syscall" 11 | "time" 12 | 13 | cltr "github.com/doitintl/banias/frontend/collector" 14 | cfg "github.com/doitintl/banias/frontend/config" 15 | "github.com/oklog/oklog/pkg/group" 16 | 17 | "github.com/valyala/fasthttp" 18 | "go.opencensus.io/exporter/prometheus" 19 | "contrib.go.opencensus.io/exporter/stackdriver" 20 | "go.opencensus.io/stats/view" 21 | "go.uber.org/zap" 22 | "go.uber.org/zap/zapcore" 23 | ) 24 | 25 | var config *cfg.Config 26 | var metricsAddr string 27 | var httpAddr string 28 | var collector *cltr.Collector 29 | // We have to endpoints /track for reporting and /metrics for prometheus. 30 | 31 | func main() { 32 | config, _ = cfg.NewConfig() 33 | httpAddr = ":" + strconv.Itoa(config.Port) 34 | metricsAddr = ":" + strconv.Itoa(config.MetricsPort) 35 | atom := zap.NewAtomicLevel() 36 | if config.Debug { 37 | atom.SetLevel(zap.DebugLevel) 38 | } 39 | encoderCfg := zap.NewProductionEncoderConfig() 40 | logger := zap.New(zapcore.NewCore( 41 | zapcore.NewJSONEncoder(encoderCfg), 42 | zapcore.Lock(os.Stdout), 43 | atom, 44 | )) 45 | defer logger.Sync() 46 | logger.Info("Starting Banias....") 47 | var err error 48 | 49 | 50 | pExporter, err := prometheus.NewExporter(prometheus.Options{}) 51 | if err != nil { 52 | logger.Error("Error creating prometheus exporter ", zap.Error(err)) 53 | } else { 54 | // Export to Prometheus Monitoring. 55 | view.RegisterExporter(pExporter) 56 | } 57 | sExporter, err := stackdriver.NewExporter(stackdriver.Options{ProjectID: config.ProjectID}) 58 | if err != nil { 59 | logger.Error("Error creating stackdriver exporter ", zap.Error(err)) 60 | } else { 61 | // Export to Stackdriver Monitoring. 62 | view.RegisterExporter(sExporter) 63 | view.SetReportingPeriod(60 * time.Second) 64 | } 65 | collector, err = cltr.NewCollector(logger, config) 66 | if err != nil { 67 | logger.Fatal("Can't init Collector", zap.Error(err)) 68 | os.Exit(-1) 69 | } 70 | // create a run group. 71 | g := &group.Group{} 72 | 73 | initHttpHandler(g, logger) 74 | initMetricsEndpoint(g, logger, pExporter) 75 | initCancelInterrupt(g) 76 | // run the group and wait for exit. 77 | logger.Info("exit", zap.Error(g.Run())) 78 | 79 | } 80 | 81 | func initHttpHandler(g *group.Group, logger *zap.Logger) { 82 | requestHandler := func(ctx *fasthttp.RequestCtx) { 83 | switch string(ctx.Path()) { 84 | case "/track": 85 | collector.Collect(ctx) 86 | case "/": 87 | ctx.Response.SetStatusCode(200) 88 | default: 89 | ctx.Error("Unsupported path", fasthttp.StatusNotFound) 90 | } 91 | } 92 | g.Add(func() error { 93 | logger.Info("HTTP Server", zap.String("transport", "HTTP"), zap.String("addr", httpAddr)) 94 | return fasthttp.ListenAndServe(httpAddr, requestHandler) 95 | }, func(error) { 96 | logger.Error("Error start serving") 97 | }) 98 | 99 | } 100 | func initMetricsEndpoint(g *group.Group, logger *zap.Logger, exporter *prometheus.Exporter) { 101 | http.Handle("/metrics", exporter) 102 | debugListener, err := net.Listen("tcp", metricsAddr) 103 | if err != nil { 104 | logger.Info("Error ", zap.String("transport", "debug/HTTP"), zap.String("during", "Listen"), zap.Error(err)) 105 | } 106 | g.Add(func() error { 107 | logger.Info("Promhttp", zap.String("transport", "debug/HTTP"), zap.String("addr", metricsAddr)) 108 | return http.Serve(debugListener, http.DefaultServeMux) 109 | }, func(error) { 110 | debugListener.Close() 111 | }) 112 | } 113 | 114 | func initCancelInterrupt(g *group.Group) { 115 | cancelInterrupt := make(chan struct{}) 116 | g.Add(func() error { 117 | c := make(chan os.Signal, 1) 118 | signal.Notify(c, syscall.SIGINT, syscall.SIGTERM) 119 | select { 120 | case sig := <-c: 121 | collector.Stop() 122 | return fmt.Errorf("received signal %s", sig) 123 | case <-cancelInterrupt: 124 | return nil 125 | } 126 | }, func(error) { 127 | close(cancelInterrupt) 128 | }) 129 | } 130 | -------------------------------------------------------------------------------- /backend/README.md: -------------------------------------------------------------------------------- 1 | ## General 2 | BigQuery is Google's serverless, highly scalable, low cost enterprise data warehouse designed to make all your data analysts productive. Because there is no infrastructure to manage, you can focus on analyzing data to find meaningful insights using familiar SQL and you don't need a database administrator. 3 | 4 | Banias aim to provide an easy way to ingest events into Google's BigQuery with the ability to have new schemas as events evolve with minimum code changes. 5 | To achieve this we are using Apache Beam on top of Google's DataFlow as our backend engine. 6 | The code here is a baseline for any transformation graph you would like to create in the future. You can always extend the BaseMap or the MapEvents to get some funky stuff into the graph :-) 7 | 8 | Our deployment is driven by a Makefile so you don't need to type too much to get things running... 9 | 10 | ## Prerequisits 11 | * Java(TM) SE Runtime Environment 1.8 12 | * Apache Maven 3.5 13 | 14 | ## Configuration 15 | * Project ID: PROJECT_ID= 16 | * BigQuery Dataset name: DATASET_NAME= 17 | * PubSub topic name: TOPIC_NAME= 18 | * PubSub subscription name: SUBSCRIPTION_NAME= 19 | * Schema's bucket: SCHEMAS_BUCKET= 20 | * Temp bucket: TEMP_BUCKET= 21 | * Template bucket: TEMPLATE_BUCKET= 22 | * Template file name: TEMPLATE_FILE= 23 | * Dataset location: DATASET_LOCATION= Can be asia-northeast1, EU or US. More info [here](https://cloud.google.com/bigquery/docs/dataset-locations) 24 | 25 | All bucket names should contain only the name. No 'gs://' prefix. 26 | 27 | 28 | ## Schemas 29 | BigQuery allows you to specify a table's schema when you load data into a table, and when you create an empty table. 30 | When you specify a table schema, you must supply each column's name and data type. You may optionally supply a column's description and mode. 31 | You can find more information about schema and schema creation [here](https://cloud.google.com/bigquery/docs/schemas). 32 | Banias utilize the standard schema format used by Google's BigQuery. You can find sample schemas under the test folder. 33 | 34 | ### Schemas Guidelines 35 | * Once the pipeline is started, it will look for the schemas to work on in the SCHEMAS_BUCKET. 36 | * The schema files are in a json format. 37 | * The file name (without the '.json') defines the schema's key. It will be used to match the event with the schema. 38 | * The events will be mapped according to it's event_name + event_version --> schema key ('event_name'_'event_version' = schema key) 39 | * If you want to add a new schema, just: 40 | * Put the new json file in the bucket 41 | * Restart your pipeline (it is recommended here to use a template) 42 | * Schema cannot be modified. 43 | 44 | ### Error table 45 | In the error table you will find all the elements that had issues (not having a schema is not an issue...). 46 | The error table contains the the event type, content and the error that got this event into the error table. 47 | 48 | ## Running 49 | ### Setup environment 50 | ``` 51 | make env_setup PROJECT_ID=my-project TOPIC_NAME=topic-name SUBSCRIPTION_NAME=subscription-name SCHEMAS_BUCKET=bucket-with-my-schemas TEMP_BUCKET=mytmpbucket TEMPLATE_BUCKET=my-templates-bucket TEMPLATE_FILE=template-name DATASET_LOCATION=US 52 | ``` 53 | 54 | ### Running locally 55 | ``` 56 | make run_local PROJECT_ID=my-project DATASET_NAME=important-dataset TOPIC_NAME=topic-name SUBSCRIPTION_NAME=subscription-name SCHEMAS_BUCKET=bucket-with-my-schemas TEMP_BUCKET=mytmpbucket 57 | ``` 58 | > Note: Sending event with no schema to a pipeline running with DirectRunner will cause the runner to exit. 59 | 60 | ### Running on Google's Dataflow 61 | ``` 62 | make run PROJECT_ID=my-project DATASET_NAME=important-dataset TOPIC_NAME=topic-name SUBSCRIPTION_NAME=subscription-name SCHEMAS_BUCKET=bucket-with-my-schemas TEMP_BUCKET=mytmpbucket 63 | ``` 64 | 65 | ## Dataflow Pipeline Templates 66 | From [Google Cloud Dataflow Templates](https://cloud.google.com/dataflow/docs/templates/overview) 67 | > Cloud Dataflow templates allow you to stage your pipelines on Google Cloud Storage and execute them from a variety of environments. You can use one of the Google-provided templates or create your own. 68 | > 69 | > * Templates provide you with additional benefits compared to traditional Cloud Dataflow deployment, such as: 70 | > * Pipeline execution does not require you to recompile your code every time. 71 | > * You can execute your pipelines without the development environment and associated dependencies that are common with traditional deployment. This is useful for scheduling recurring batch jobs. 72 | > * Runtime parameters allow you to customize the execution. 73 | > * Non-technical users can execute templates with the Google Cloud Platform Console, gcloud command-line tool, or the REST API. 74 | 75 | ### Creating a template for Google DataFlow 76 | ``` 77 | make run PROJECT_ID=my-project DATASET_NAME=important-dataset TOPIC_NAME=topic-name SUBSCRIPTION_NAME=subscription-name SCHEMAS_BUCKET=bucket-with-my-schemas TEMP_BUCKET=mytmpbucket TEMPLATE_BUCKET=my-templates-bucket TEMPLATE_FILE=template-name 78 | ``` 79 | -------------------------------------------------------------------------------- /backend/src/main/java/com/doitintl/banias/BaniasPipeline.java: -------------------------------------------------------------------------------- 1 | package com.doitintl.banias; 2 | 3 | import com.google.api.services.bigquery.model.TableReference; 4 | import com.google.api.services.bigquery.model.TableRow; 5 | import com.google.api.services.bigquery.model.TableSchema; 6 | import org.apache.beam.sdk.Pipeline; 7 | import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO; 8 | import org.apache.beam.sdk.io.gcp.bigquery.DynamicDestinations; 9 | import org.apache.beam.sdk.io.gcp.bigquery.TableDestination; 10 | import org.apache.beam.sdk.io.gcp.bigquery.TableRowJsonCoder; 11 | import org.apache.beam.sdk.io.gcp.pubsub.PubsubIO; 12 | import org.apache.beam.sdk.options.PipelineOptionsFactory; 13 | import org.apache.beam.sdk.transforms.ParDo; 14 | import org.apache.beam.sdk.transforms.SerializableFunction; 15 | import org.apache.beam.sdk.values.*; 16 | import org.slf4j.Logger; 17 | import org.slf4j.LoggerFactory; 18 | 19 | import java.util.concurrent.ConcurrentHashMap; 20 | 21 | class BaniasPipeline { 22 | private static final Logger LOG = LoggerFactory.getLogger(BaniasPipeline.class); 23 | private static final TupleTag outputTag= new TupleTag(){ 24 | private static final long serialVersionUID = 4585472024291962044L; 25 | }; 26 | private static final TupleTag errorsTag = new TupleTag(){ 27 | private static final long serialVersionUID = -8629153140156879025L; 28 | }; 29 | 30 | public static void main(String[] args){ 31 | ConcurrentHashMap schemas; 32 | 33 | PipelineOptionsFactory.register(BaniasPipelineOptions.class); 34 | 35 | BaniasPipelineOptions pipelineOptions = PipelineOptionsFactory 36 | .fromArgs(args) 37 | .withValidation() 38 | .as(BaniasPipelineOptions.class); 39 | 40 | pipelineOptions.setStreaming(true); 41 | 42 | String tableDestinationPrefix = pipelineOptions.getProject() + ":" + pipelineOptions.getDataset() + "."; 43 | schemas = SchemaHelpers.loadSchemaFromGCS(pipelineOptions.getGCSSchemasBucketName()); 44 | 45 | // Define pipeline 46 | Pipeline pipeline = Pipeline.create(pipelineOptions); 47 | 48 | //Events handling 49 | PCollectionTuple mappedEvents = pipeline 50 | .apply("Read Events from PubSub Messages", PubsubIO 51 | .readStrings() 52 | .fromSubscription(pipelineOptions.getEventsSubscriptionPath())) 53 | .apply("Map Events", ParDo.of(new MapEvents(errorsTag)) 54 | .withOutputTags(outputTag, TupleTagList.of(errorsTag))); 55 | 56 | PCollection events = mappedEvents.get(outputTag); 57 | 58 | events.apply( 59 | "Write To Dynamic Table on BQ", 60 | BigQueryIO.write() 61 | .withCreateDisposition(BigQueryIO.Write.CreateDisposition.CREATE_IF_NEEDED) 62 | .withWriteDisposition(BigQueryIO.Write.WriteDisposition.WRITE_APPEND) 63 | .to(new EventDestinations(schemas, tableDestinationPrefix)) 64 | .withFormatFunction( 65 | (SerializableFunction) 66 | input -> { 67 | TableRow output = input.clone(); 68 | output.remove("event_version"); 69 | output.remove("event_name"); 70 | return output; 71 | })); 72 | 73 | //Error handling 74 | PCollection errors = mappedEvents.get(errorsTag); 75 | errors.setCoder(TableRowJsonCoder.of()); 76 | 77 | TableReference tableRef = new TableReference() 78 | .setProjectId(pipelineOptions.getProject()) 79 | .setDatasetId(pipelineOptions.getDataset()) 80 | .setTableId(pipelineOptions.getErrorsTableName()); 81 | 82 | errors.apply("Write Errors to BigQuery", 83 | BigQueryIO.writeTableRows().to(tableRef) 84 | .withSchema(SchemaHelpers.getErrorTableSchema()) 85 | .withCreateDisposition(BigQueryIO.Write.CreateDisposition.CREATE_IF_NEEDED) 86 | .withWriteDisposition(BigQueryIO.Write.WriteDisposition.WRITE_APPEND)); 87 | 88 | pipeline.run(); 89 | } 90 | 91 | private static class EventDestinations extends DynamicDestinations { 92 | private static final long serialVersionUID = -2839237244568662696L; 93 | private final ConcurrentHashMap schemas; 94 | private final String tableDestinationPrefix; 95 | 96 | private EventDestinations(ConcurrentHashMap schemas, String tableDestinationPrefix) { 97 | this.schemas = schemas; 98 | this.tableDestinationPrefix = tableDestinationPrefix; 99 | } 100 | 101 | @Override 102 | public String getDestination(ValueInSingleWindow event) { 103 | if (event.getValue() == null) 104 | return ""; 105 | return parseDestination(event.getValue()); 106 | } 107 | 108 | @Override 109 | public TableDestination getTable(String tableName) { 110 | return new TableDestination( 111 | tableDestinationPrefix + tableName, "Table " + tableName); 112 | } 113 | 114 | @Override 115 | public TableSchema getSchema(String tableName) { 116 | return SchemaHelpers.fromJsonString(schemas.get(tableName)); 117 | } 118 | 119 | private String parseDestination(TableRow row) { 120 | return row.get("event_name").toString() 121 | + "_" 122 | + row.get("event_version").toString(); 123 | } 124 | } 125 | } 126 | -------------------------------------------------------------------------------- /frontend/deploy/frontend-deployment.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: apps/v1 2 | kind: Deployment 3 | metadata: 4 | name: banias-frontend 5 | spec: 6 | replicas: 3 7 | selector: 8 | matchLabels: 9 | app: banias-frontend 10 | template: 11 | metadata: 12 | labels: 13 | app: banias-frontend 14 | k8s-app: banias-frontend 15 | spec: 16 | initContainers: 17 | - name: init-sysctl 18 | image: busybox 19 | command: 20 | - /bin/sh 21 | - -c 22 | - | 23 | sysctl -w net.ipv4.ip_forward=0 24 | sysctl -w net.ipv4.conf.default.rp_filter=1 25 | sysctl -w net.ipv4.conf.default.accept_source_route=0 26 | sysctl -w net.ipv4.icmp_echo_ignore_broadcasts=1 27 | sysctl -w net.ipv4.icmp_ignore_bogus_error_responses=1 28 | sysctl -w kernel.msgmnb=65536 29 | sysctl -w kernel.msgmax=65536 30 | sysctl -w kernel.shmmax=68719476736 31 | sysctl -w kernel.shmall=4294967296 32 | sysctl -w net.core.somaxconn=50000 33 | sysctl -w fs.file-max=100000 34 | sysctl -w net.ipv4.tcp_syncookies=1 35 | sysctl -w net.ipv4.conf.all.log_martians=0 36 | sysctl -w net.core.somaxconn=50000 37 | sysctl -w net.ipv4.tcp_max_syn_backlog=30000 38 | sysctl -w net.ipv4.conf.all.send_redirects=0 39 | sysctl -w net.ipv4.conf.all.accept_redirects=0 40 | sysctl -w net.ipv4.conf.all.accept_source_route=0 41 | sysctl -w net.ipv6.conf.all.forwarding=0 42 | sysctl -w net.ipv4.tcp_slow_start_after_idle=0 43 | sysctl -w net.ipv4.tcp_window_scaling=1 44 | sysctl -w net.ipv4.tcp_timestamp=1 45 | sysctl -w net.ipv4.tcp_sack=1 46 | sysctl -w net.ipv4.tcp_congestion_control=htcp 47 | sysctl -w net.ipv4.tcp_keepalive_time=60 48 | sysctl -w net.netfilter.nf_conntrack_tcp_timeout_time_wait=10 49 | sysctl -w net.netfilter.nf_conntrack_tcp_timeout_established=300 50 | sysctl -w net.netfilter.nf_conntrack_generic_timeout=300 51 | sysctl -w net.ipv4.tcp_max_tw_buckets=2000000 52 | sysctl -w net.ipv4.tcp_fin_timeout=10 53 | sysctl -w net.ipv4.tcp_tw_reuse=1 54 | sysctl -w net.ipv4.tcp_keepalive_intvl=15 55 | sysctl -w net.ipv4.tcp_keepalive_probes=5 56 | 57 | imagePullPolicy: IfNotPresent 58 | securityContext: 59 | privileged: true 60 | containers: 61 | - name: "banias-frontend" 62 | image: "gcr.io/my-project/banias-frontend:test" 63 | resources: 64 | limits: 65 | cpu: "3" 66 | memory: "12G" 67 | requests: 68 | cpu: 500m 69 | memory: 200Mi 70 | volumeMounts: 71 | - name: google-cloud-key 72 | mountPath: /var/secrets/google 73 | ports: 74 | - name: http 75 | containerPort: 8081 76 | - name: prom-metrics 77 | containerPort: 8080 78 | env: 79 | - name: "BANIAS_PROJECTID" 80 | valueFrom: 81 | configMapKeyRef: 82 | key: "BANIAS_PROJECTID" 83 | name: "banias-frontend-config" 84 | - name: "BANIAS_DEBUG" 85 | valueFrom: 86 | configMapKeyRef: 87 | key: "BANIAS_DEBUG" 88 | name: "banias-frontend-config" 89 | - name: "BANIAS_TOPIC" 90 | valueFrom: 91 | configMapKeyRef: 92 | key: "BANIAS_TOPIC" 93 | name: "banias-frontend-config" 94 | - name: "BANIAS_PORT" 95 | valueFrom: 96 | configMapKeyRef: 97 | key: "BANIAS_PORT" 98 | name: "banias-frontend-config" 99 | - name: "BANIAS_METRICSPORT" 100 | valueFrom: 101 | configMapKeyRef: 102 | key: "BANIAS_METRICSPORT" 103 | name: "banias-frontend-config" 104 | - name: "BANIAS_PUBSUBMAXBATCH" 105 | valueFrom: 106 | configMapKeyRef: 107 | key: "BANIAS_PUBSUBMAXBATCH" 108 | name: "banias-frontend-config" 109 | - name: "BANIAS_PUBSUBAGGRIGATORS" 110 | valueFrom: 111 | configMapKeyRef: 112 | key: "BANIAS_PUBSUBAGGRIGATORS" 113 | name: "banias-frontend-config" 114 | - name: "BANIAS_PUBSUBMAXPUBLISHDELAY" 115 | valueFrom: 116 | configMapKeyRef: 117 | key: "BANIAS_PUBSUBMAXPUBLISHDELAY" 118 | name: "banias-frontend-config" 119 | - name: "BANIAS_MAXPUBSUBGOROUTINESAMOUNT" 120 | valueFrom: 121 | configMapKeyRef: 122 | key: "BANIAS_MAXPUBSUBGOROUTINESAMOUNT" 123 | name: "banias-frontend-config" 124 | - name: "BANIAS_MAXPUBSUBGOROUTINEIDLEDURATION" 125 | valueFrom: 126 | configMapKeyRef: 127 | key: "BANIAS_MAXPUBSUBGOROUTINEIDLEDURATION" 128 | name: "banias-frontend-config" 129 | - name: GOOGLE_APPLICATION_CREDENTIALS 130 | value: /var/secrets/google/key.json 131 | imagePullPolicy: Always 132 | volumes: 133 | - name: google-cloud-key 134 | secret: 135 | secretName: pubsub-key 136 | 137 | 138 | -------------------------------------------------------------------------------- /frontend/publisher/publisher.go: -------------------------------------------------------------------------------- 1 | package publisher 2 | 3 | import ( 4 | "context" 5 | "sync" 6 | "time" 7 | 8 | gpubsub "cloud.google.com/go/pubsub" 9 | cfg "github.com/doitintl/banias/frontend/config" 10 | "github.com/doitintl/banias/frontend/types" 11 | "github.com/henrylee2cn/goutil/pool" 12 | "go.opencensus.io/stats" 13 | 14 | "go.opencensus.io/stats/view" 15 | "go.opencensus.io/tag" 16 | "go.uber.org/zap" 17 | ) 18 | 19 | var ( 20 | publisherCounter *stats.Float64Measure 21 | successKey tag.Key 22 | ) 23 | 24 | var msgPool *sync.Pool 25 | 26 | func init() { 27 | msgPool = &sync.Pool{ 28 | New: func() interface{} { 29 | return new(gpubsub.Message) 30 | }, 31 | } 32 | successKey, _ = tag.NewKey("banias/keys/code") 33 | publisherCounter = stats.Float64("banias/measures/published_count", "Count of pub sub published messages", "1") 34 | view.Register( 35 | &view.View{ 36 | Name: "publish_count", 37 | Description: "Count of pub sub published messages", 38 | TagKeys: []tag.Key{successKey}, 39 | Measure: publisherCounter, 40 | Aggregation: view.Sum(), 41 | }) 42 | view.SetReportingPeriod(60 * time.Second) 43 | 44 | } 45 | 46 | type Publisher struct { 47 | bqEvents <-chan types.EventMsg 48 | doneChan <-chan bool 49 | logger *zap.Logger 50 | gp *pool.GoPool 51 | gpubsubClient gpubsub.Client 52 | config *cfg.Config 53 | topic *gpubsub.Topic 54 | client *gpubsub.Client 55 | wg *sync.WaitGroup 56 | id int 57 | collectorPool *sync.Pool 58 | } 59 | 60 | func GetClient(projectid string) (*gpubsub.Client, error) { 61 | ctx := context.Background() 62 | client, err := gpubsub.NewClient(ctx, projectid) 63 | return client, err 64 | } 65 | func createTopicIfNotExists(topic string, logger *zap.Logger, client *gpubsub.Client) (*gpubsub.Topic, error) { 66 | ctx := context.Background() 67 | // Create a topic to subscribe to. 68 | t := client.Topic(topic) 69 | ok, err := t.Exists(ctx) 70 | if err != nil { 71 | logger.Error("Pub/Sub topic exists error", zap.Error(err)) 72 | return t, err 73 | } 74 | if ok { 75 | return t, err 76 | } 77 | t, err = client.CreateTopic(ctx, topic) 78 | if err != nil { 79 | return t, err 80 | } 81 | return t, err 82 | } 83 | 84 | func NewPublisher(logger *zap.Logger, bqEvents <-chan types.EventMsg, config *cfg.Config, collectorPool *sync.Pool, client *gpubsub.Client, id int) (*Publisher, error) { 85 | logger.Debug("Creating a new publisher", zap.Int("id", id)) 86 | gp := pool.NewGoPool(int(config.MaxPubSubGoroutinesAmount), config.MaxPubSubGoroutineIdleDuration*time.Second) 87 | topic, err := createTopicIfNotExists(config.Topic, logger, client) 88 | logger.Debug("Done with topic") 89 | p := Publisher{ 90 | bqEvents: bqEvents, 91 | logger: logger, 92 | gp: gp, 93 | config: config, 94 | topic: topic, 95 | wg: new(sync.WaitGroup), 96 | id: id, 97 | collectorPool: collectorPool, 98 | } 99 | if err != nil { 100 | logger.Error("Error creating topic", zap.Error(err)) 101 | } 102 | logger.Debug("Done with NewPublisher") 103 | return &p, err 104 | } 105 | 106 | func (c *Publisher) Publish(messages []gpubsub.Message, t *time.Timer, maxDelay time.Duration) { 107 | c.wg.Add(1) 108 | c.gp.Go(func() { 109 | var total int64 = 0 110 | var errnum int64 = 0 111 | ctx := context.Background() 112 | var results []*gpubsub.PublishResult 113 | for i := range messages { 114 | r := c.topic.Publish(ctx, &messages[i]) 115 | total++ 116 | results = append(results, r) 117 | } 118 | for _, r := range results { 119 | id, err := r.Get(ctx) 120 | if err != nil { 121 | c.logger.Error("Error Publishing", zap.Error(err), zap.String("ID", id)) 122 | errnum++ 123 | } 124 | } 125 | messages = nil 126 | ocCtx, _ := tag.New(ctx, tag.Insert(successKey, "Success")) 127 | stats.Record(ocCtx, publisherCounter.M(float64(total-errnum))) 128 | ocCtx, _ = tag.New(ctx, tag.Insert(successKey, "Failures")) 129 | stats.Record(ocCtx, publisherCounter.M(float64(errnum))) 130 | c.logger.Debug("Published ", zap.Int64("Success", total-errnum), zap.Int64("Failures", errnum)) 131 | t.Reset(maxDelay) 132 | c.wg.Done() 133 | }) 134 | 135 | } 136 | 137 | func (c *Publisher) Run() { 138 | 139 | c.logger.Debug("Starting Run") 140 | messages := make([]gpubsub.Message, 0, c.config.PubsubMaxBatch) 141 | t := time.NewTimer(c.config.PubsubMaxPublishDelay) 142 | for { 143 | select { 144 | case <-t.C: 145 | if len(messages) == 0 { 146 | c.logger.Debug("skipping publish due to no messages") 147 | t.Reset(c.config.PubsubMaxPublishDelay) 148 | continue 149 | } 150 | c.logger.Debug("Calling publish due to time", zap.Int("Number of message", len(messages)), zap.Int("Aggrigator ID", c.id)) 151 | c.Publish(messages, t, c.config.PubsubMaxPublishDelay) 152 | messages = nil 153 | case event := <-c.bqEvents: 154 | buf, err := event.MarshalJSON() 155 | if err != nil { 156 | c.logger.Error("Error Marshaling event", zap.Error(err)) 157 | continue 158 | } 159 | m := msgPool.Get().(*gpubsub.Message) 160 | m.Data = buf 161 | messages = append(messages, *m) 162 | msgPool.Put(m) 163 | c.collectorPool.Put(&event) 164 | if len(messages) == c.config.PubsubMaxBatch { 165 | 166 | c.logger.Debug("Calling publish due to capacity ", zap.Int("Number of message", len(messages)), zap.Int("Aggrigator ID", c.id)) 167 | c.Publish(messages, t, c.config.PubsubMaxPublishDelay) 168 | messages = nil 169 | } 170 | case <-c.doneChan: 171 | c.Stop() 172 | c.logger.Info("Got a done signal") 173 | break 174 | } 175 | } 176 | 177 | } 178 | 179 | func (c *Publisher) Stop() { 180 | c.logger.Info("Stopping topic publish") 181 | c.topic.Stop() 182 | c.logger.Info("Stopping worker pool") 183 | c.gp.Stop() 184 | 185 | } 186 | -------------------------------------------------------------------------------- /frontend/collector/collector.go: -------------------------------------------------------------------------------- 1 | package collector 2 | 3 | import ( 4 | "context" 5 | "encoding/json" 6 | "strconv" 7 | "sync" 8 | "time" 9 | 10 | "github.com/buger/jsonparser" 11 | cfg "github.com/doitintl/banias/frontend/config" 12 | "github.com/doitintl/banias/frontend/publisher" 13 | "github.com/doitintl/banias/frontend/types" 14 | "github.com/henrylee2cn/goutil/pool" 15 | "github.com/valyala/fasthttp" 16 | "go.opencensus.io/stats" 17 | "go.opencensus.io/tag" 18 | 19 | "go.opencensus.io/stats/view" 20 | "go.uber.org/zap" 21 | ) 22 | 23 | var ( 24 | strContentType = []byte("Content-Type") 25 | strApplicationJSON = []byte("application/json") 26 | msgPool *sync.Pool 27 | paths = [][]string{ 28 | []string{"type", "event_version"}, 29 | []string{"type", "event_name"}, 30 | []string{"payload"}, 31 | } 32 | DefaultLatencyDistribution = view.Distribution(0, 1, 2, 3, 4, 5, 6, 8, 10, 13, 16, 20, 25, 30, 40, 50, 65, 80, 100, 130, 160, 200, 250, 300, 400, 500, 650, 800, 1000, 2000, 5000, 10000, 20000, 50000, 100000) 33 | requestCounter *stats.Float64Measure 34 | requestlatency *stats.Float64Measure 35 | codeKey tag.Key 36 | ) 37 | 38 | func init() { 39 | msgPool = &sync.Pool{ 40 | New: func() interface{} { 41 | return new(types.EventMsg) 42 | }, 43 | } 44 | codeKey, _ = tag.NewKey("banias/keys/code") 45 | requestCounter= stats.Float64("banias/measures/request_count", "Count of HTTP requests processed", "1") 46 | requestlatency = stats.Float64("banias/measures/request_latency", "Latency distribution of HTTP requests", "ms") 47 | view.Register( 48 | &view.View{ 49 | Name: "request_count", 50 | Description: "Count of HTTP requests processed", 51 | TagKeys: []tag.Key{codeKey}, 52 | Measure: requestCounter, 53 | Aggregation: view.Count(), 54 | }) 55 | view.Register( 56 | &view.View{ 57 | Name: "request_latency", 58 | Description: "Latency distribution of HTTP requests", 59 | TagKeys: []tag.Key{codeKey}, 60 | Measure: requestlatency, 61 | Aggregation: DefaultLatencyDistribution, 62 | }) 63 | 64 | view.SetReportingPeriod(60 * time.Second) 65 | 66 | } 67 | 68 | type Collector struct { 69 | bqEvents chan<- types.EventMsg 70 | doneChan chan<- bool 71 | logger *zap.Logger 72 | config *cfg.Config 73 | gp *pool.GoPool 74 | } 75 | 76 | func NewCollector(logger *zap.Logger, config *cfg.Config) (*Collector, error) { 77 | bqEvents := make(chan types.EventMsg, int(config.PubsubMaxBatch*config.PubSubAggrigators)) 78 | doneChan := make(chan bool) 79 | gp := pool.NewGoPool(int(config.PubSubAggrigators), config.MaxPubSubGoroutineIdleDuration *time.Second) 80 | c := Collector{ 81 | bqEvents: bqEvents, 82 | doneChan: doneChan, 83 | logger: logger, 84 | config: config, 85 | gp: gp, 86 | } 87 | 88 | for i := 0; i < config.PubSubAggrigators; i++ { 89 | client, err := publisher.GetClient(config.ProjectID) 90 | pub, err := publisher.NewPublisher(logger, bqEvents, config, msgPool, client, i) 91 | if err == nil { 92 | c.gp.Go(pub.Run) 93 | } else { 94 | logger.Error("Error crating a publisher", zap.Error(err)) 95 | return &c, err 96 | } 97 | 98 | } 99 | 100 | return &c, nil 101 | } 102 | 103 | func (c *Collector) Collect(ctx *fasthttp.RequestCtx) { 104 | defer func(begin time.Time) { 105 | responseTime := float64(time.Since(begin).Nanoseconds() / 1000) 106 | occtx, _ := tag.New(context.Background(), tag.Insert(codeKey, strconv.Itoa(ctx.Response.StatusCode())), ) 107 | stats.Record(occtx, requestCounter.M(1)) 108 | stats.Record(occtx, requestlatency.M(responseTime)) 109 | 110 | c.logger.Debug(string(ctx.Path()), zap.String("method", string(ctx.Method())), zap.String("code", strconv.Itoa(ctx.Response.StatusCode()))) 111 | }(time.Now()) 112 | 113 | if !ctx.IsPost() { 114 | ctx.Error("Unsupported method", fasthttp.StatusMethodNotAllowed) 115 | } 116 | data := []byte(ctx.PostBody()) 117 | var errors []types.Error 118 | senderID, err := jsonparser.GetString(data, "sender_id") 119 | if err != nil { 120 | ctx.Error(err.Error(), fasthttp.StatusNotAcceptable) 121 | } 122 | i := 0 123 | jsonparser.ArrayEach(data, func(events []byte, dataType jsonparser.ValueType, offset int, err error) { 124 | 125 | msg := msgPool.Get().(*types.EventMsg) 126 | i++ 127 | counter := 0 128 | jsonparser.EachKey(events, func(idx int, value []byte, vt jsonparser.ValueType, err error) { 129 | switch idx { 130 | case 0: 131 | t, _ := jsonparser.ParseString(value) 132 | msg.Event.TypeField.EventVersionField = t 133 | counter = counter + 1 134 | case 1: 135 | t, _ := jsonparser.ParseString(value) 136 | msg.Event.TypeField.EventNameField = t 137 | counter = counter + 2 138 | case 2: 139 | counter = counter + 4 140 | t, _, _, _ := jsonparser.Get(value) 141 | err = json.Unmarshal(t, &msg.Event.PayloadField) 142 | if err != nil { 143 | c.logger.Error("Error getting payload ", zap.Error(err)) 144 | } 145 | 146 | } 147 | }, paths...) 148 | // if expect to get three fields so we have a poor man bit fields. All should sum up to 7 (1+2+4) if 149 | // this is not the case then we got a missing field error in our hands 150 | if counter != 7 { 151 | var errString string 152 | switch counter { 153 | case 0: 154 | errString = "type/event_version type/event_name payload" 155 | case 1: 156 | errString = "type/event_name payload" 157 | case 2: 158 | errString = "type/event_version payload" 159 | case 3: 160 | errString = "payload" 161 | case 4: 162 | errString = "type/event_version type/event_name" 163 | case 5: 164 | errString = "type/event_name" 165 | case 6: 166 | errString = "type/event_version" 167 | } 168 | var eventErr *types.Error 169 | eventErr = types.NewError(uint64(i), "Missing fields or ill formatted "+errString) 170 | errors = append(errors, *eventErr) 171 | 172 | } else { 173 | msg.SenderID = senderID 174 | c.bqEvents <- *msg 175 | 176 | } 177 | 178 | }, "events") 179 | ctx.Response.Header.SetCanonical(strContentType, strApplicationJSON) 180 | ctx.Response.SetStatusCode(202) 181 | json.NewEncoder(ctx).Encode(errors) 182 | 183 | } 184 | 185 | func (c *Collector) Stop() { 186 | c.gp.Stop() 187 | go func() { 188 | c.doneChan <- true 189 | }() 190 | } 191 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # See http://help.github.com/ignore-files/ for more about ignoring files. 2 | 3 | # compiled output 4 | /dist 5 | /dist-server 6 | /tmp 7 | /out-tsc 8 | 9 | # dependencies 10 | /node_modules 11 | 12 | # IDEs and editors 13 | /.idea 14 | .project 15 | .classpath 16 | .c9/ 17 | *.launch 18 | .settings/ 19 | *.sublime-workspace 20 | 21 | # IDE - VSCode 22 | .vscode/* 23 | !.vscode/settings.json 24 | !.vscode/tasks.json 25 | !.vscode/launch.json 26 | !.vscode/extensions.json 27 | 28 | # misc 29 | /.sass-cache 30 | /connect.lock 31 | /coverage 32 | /libpeerconnection.log 33 | npm-debug.log 34 | testem.log 35 | /typings 36 | 37 | # e2e 38 | /e2e/*.js 39 | /e2e/*.map 40 | 41 | # System Files 42 | .DS_Store 43 | Thumbs.db 44 | 45 | # Created by https://www.gitignore.io/api/go,node,python,angular,pycharm+all 46 | 47 | ### Angular ### 48 | ## Angular ## 49 | # compiled output 50 | /dist 51 | /tmp 52 | /app/**/*.js 53 | /app/**/*.js.map 54 | 55 | # dependencies 56 | /node_modules 57 | /bower_components 58 | 59 | # IDEs and editors 60 | /.idea 61 | 62 | # misc 63 | /.sass-cache 64 | /connect.lock 65 | /coverage/* 66 | /libpeerconnection.log 67 | npm-debug.log 68 | testem.log 69 | /typings 70 | 71 | # e2e 72 | /e2e/*.js 73 | /e2e/*.map 74 | 75 | #System Files 76 | .DS_Store 77 | 78 | ### Go ### 79 | # Binaries for programs and plugins 80 | *.exe 81 | *.dll 82 | *.so 83 | *.dylib 84 | 85 | # Test binary, build with `go test -c` 86 | *.test 87 | 88 | # Output of the go coverage tool, specifically when used with LiteIDE 89 | *.out 90 | 91 | # Project-local glide cache, RE: https://github.com/Masterminds/glide/issues/736 92 | .glide/ 93 | 94 | ### Node ### 95 | # Logs 96 | logs 97 | *.log 98 | npm-debug.log* 99 | yarn-debug.log* 100 | yarn-error.log* 101 | 102 | # Runtime data 103 | pids 104 | *.pid 105 | *.seed 106 | *.pid.lock 107 | 108 | # Directory for instrumented libs generated by jscoverage/JSCover 109 | lib-cov 110 | 111 | # Coverage directory used by tools like istanbul 112 | coverage 113 | 114 | # nyc test coverage 115 | .nyc_output 116 | 117 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) 118 | .grunt 119 | 120 | # Bower dependency directory (https://bower.io/) 121 | bower_components 122 | 123 | # node-waf configuration 124 | .lock-wscript 125 | 126 | # Compiled binary addons (http://nodejs.org/api/addons.html) 127 | build/Release 128 | 129 | # Dependency directories 130 | node_modules/ 131 | jspm_packages/ 132 | 133 | # Typescript v1 declaration files 134 | typings/ 135 | 136 | # Optional npm cache directory 137 | .npm 138 | 139 | # Optional eslint cache 140 | .eslintcache 141 | 142 | # Optional REPL history 143 | .node_repl_history 144 | 145 | # Output of 'npm pack' 146 | *.tgz 147 | 148 | # Yarn Integrity file 149 | .yarn-integrity 150 | 151 | # dotenv environment variables file 152 | .env 153 | 154 | 155 | ### PyCharm+all ### 156 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and Webstorm 157 | # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 158 | 159 | # User-specific stuff: 160 | .idea/**/workspace.xml 161 | .idea/**/tasks.xml 162 | .idea/dictionaries 163 | 164 | # Sensitive or high-churn files: 165 | .idea/**/dataSources/ 166 | .idea/**/dataSources.ids 167 | .idea/**/dataSources.xml 168 | .idea/**/dataSources.local.xml 169 | .idea/**/sqlDataSources.xml 170 | .idea/**/dynamic.xml 171 | .idea/**/uiDesigner.xml 172 | 173 | # Gradle: 174 | .idea/**/gradle.xml 175 | .idea/**/libraries 176 | 177 | # CMake 178 | cmake-build-debug/ 179 | 180 | # Mongo Explorer plugin: 181 | .idea/**/mongoSettings.xml 182 | 183 | ## File-based project format: 184 | *.iws 185 | 186 | ## Plugin-specific files: 187 | 188 | # IntelliJ 189 | /out/ 190 | 191 | # mpeltonen/sbt-idea plugin 192 | .idea_modules/ 193 | 194 | # JIRA plugin 195 | atlassian-ide-plugin.xml 196 | 197 | # Cursive Clojure plugin 198 | .idea/replstate.xml 199 | 200 | # Ruby plugin and RubyMine 201 | /.rakeTasks 202 | 203 | # Crashlytics plugin (for Android Studio and IntelliJ) 204 | com_crashlytics_export_strings.xml 205 | crashlytics.properties 206 | crashlytics-build.properties 207 | fabric.properties 208 | 209 | ### PyCharm+all Patch ### 210 | # Ignores the whole idea folder 211 | # See https://github.com/joeblau/gitignore.io/issues/186 and https://github.com/joeblau/gitignore.io/issues/360 212 | 213 | .idea/ 214 | 215 | ### Python ### 216 | # Byte-compiled / optimized / DLL files 217 | __pycache__/ 218 | *.py[cod] 219 | *$py.class 220 | 221 | # C extensions 222 | 223 | # Distribution / packaging 224 | .Python 225 | build/ 226 | develop-eggs/ 227 | dist/ 228 | downloads/ 229 | eggs/ 230 | .eggs/ 231 | lib/ 232 | lib64/ 233 | parts/ 234 | sdist/ 235 | var/ 236 | wheels/ 237 | *.egg-info/ 238 | .installed.cfg 239 | *.egg 240 | 241 | # PyInstaller 242 | # Usually these files are written by a python script from a template 243 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 244 | *.manifest 245 | *.spec 246 | 247 | # Installer logs 248 | pip-log.txt 249 | pip-delete-this-directory.txt 250 | 251 | # Unit test / coverage reports 252 | htmlcov/ 253 | .tox/ 254 | .coverage 255 | .coverage.* 256 | .cache 257 | nosetests.xml 258 | coverage.xml 259 | *.cover 260 | .hypothesis/ 261 | 262 | # Translations 263 | *.mo 264 | *.pot 265 | 266 | # Django stuff: 267 | local_settings.py 268 | 269 | # Flask stuff: 270 | instance/ 271 | .webassets-cache 272 | 273 | # Scrapy stuff: 274 | .scrapy 275 | 276 | # Sphinx documentation 277 | docs/_build/ 278 | 279 | # PyBuilder 280 | target/ 281 | 282 | # Jupyter Notebook 283 | .ipynb_checkpoints 284 | 285 | # pyenv 286 | .python-version 287 | 288 | # celery beat schedule file 289 | celerybeat-schedule.* 290 | 291 | # SageMath parsed files 292 | *.sage.py 293 | 294 | # Environments 295 | .venv 296 | env/ 297 | venv/ 298 | ENV/ 299 | env.bak/ 300 | venv.bak/ 301 | 302 | # Spyder project settings 303 | .spyderproject 304 | .spyproject 305 | 306 | # Rope project settings 307 | .ropeproject 308 | 309 | # mkdocs documentation 310 | /site 311 | 312 | # mypy 313 | .mypy_cache/ 314 | 315 | 316 | # End of https://www.gitignore.io/api/go,node,python,angular,pycharm+all 317 | /web-app/node_modules/ 318 | add 319 | .idea 320 | .idea/banias.iml 321 | .idea/inspectionProfiles 322 | .idea/modules.xml 323 | .idea/workspace.xml 324 | vendor 325 | /frontend/ignore.me 326 | -------------------------------------------------------------------------------- /frontend/go.sum: -------------------------------------------------------------------------------- 1 | cloud.google.com/go v0.23.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= 2 | cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= 3 | cloud.google.com/go v0.34.0 h1:eOI3/cP2VTU6uZLDYAoic+eyzzB9YyGmJ7eIjl8rOPg= 4 | cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= 5 | contrib.go.opencensus.io/exporter/ocagent v0.4.2 h1:EjvhWhqxJpIUEBcTJoUDUyScfZ/30ehPEvDmvj9v4DA= 6 | contrib.go.opencensus.io/exporter/ocagent v0.4.2/go.mod h1:YuG83h+XWwqWjvCqn7vK4KSyLKhThY3+gNGQ37iS2V0= 7 | contrib.go.opencensus.io/exporter/stackdriver v0.9.0 h1:ryYbINHiPdnAc8Zn8HGY7r3HXzqhQapzZc3zdhkaGqU= 8 | contrib.go.opencensus.io/exporter/stackdriver v0.9.0/go.mod h1:hNe5qQofPbg6bLQY5wHCvQ7o+2E5P8PkegEuQ+MyRw0= 9 | git.apache.org/thrift.git v0.0.0-20180902110319-2566ecd5d999/go.mod h1:fPE2ZNJGynbRyZ4dJvy6G277gSllfV2HJqblrnkyeyg= 10 | github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ= 11 | github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= 12 | github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8= 13 | github.com/aws/aws-sdk-go v1.15.31 h1:ExgD8W8QDeD8Y4CPVlcP/laumxvikDbkVWB+VCHgXxA= 14 | github.com/aws/aws-sdk-go v1.15.31/go.mod h1:mFuSZ37Z9YOHbQEwBWztmVzqXrEkub65tZoCYDt7FT0= 15 | github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973 h1:xJ4a3vCFaGF/jqvzLMYoU8P317H5OQ+Via4RmuPwCS0= 16 | github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= 17 | github.com/buger/jsonparser v0.0.0-20181115193947-bf1c66bbce23 h1:D21IyuvjDCshj1/qq+pCNd3VZOAEI9jy6Bi131YlXgI= 18 | github.com/buger/jsonparser v0.0.0-20181115193947-bf1c66bbce23/go.mod h1:bbYlZJ7hK1yFx9hf58LP0zeX7UjIGs20ufpu3evjr+s= 19 | github.com/census-instrumentation/opencensus-proto v0.1.0-0.20181214143942-ba49f56771b8/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= 20 | github.com/census-instrumentation/opencensus-proto v0.1.0 h1:VwZ9smxzX8u14/125wHIX7ARV+YhR+L4JADswwxWK0Y= 21 | github.com/census-instrumentation/opencensus-proto v0.1.0/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= 22 | github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= 23 | github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= 24 | github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk= 25 | github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= 26 | github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= 27 | github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= 28 | github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I= 29 | github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= 30 | github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= 31 | github.com/go-ini/ini v1.25.4 h1:Mujh4R/dH6YL8bxuISne3xX2+qcQ9p0IxKAP6ExWoUo= 32 | github.com/go-ini/ini v1.25.4/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8= 33 | github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b h1:VKtxabqXZkF25pY9ekfRL6a582T4P37/31XEstQ5p58= 34 | github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= 35 | github.com/golang/lint v0.0.0-20180702182130-06c8688daad7/go.mod h1:tluoj9z5200jBnyusfRPU2LqT6J+DAorxEvtC7LHB+E= 36 | github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= 37 | github.com/golang/protobuf v1.1.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= 38 | github.com/golang/protobuf v1.2.0 h1:P3YflyNX/ehuJFLhxviNdFxQPkGK5cDcApsge1SqnvM= 39 | github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= 40 | github.com/google/go-cmp v0.2.0 h1:+dTQ8DZQJz0Mb/HjFlkptS1FeQ4cWSnN941F8aEG4SQ= 41 | github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= 42 | github.com/googleapis/gax-go v2.0.0+incompatible h1:j0GKcs05QVmm7yesiZq2+9cxHkNK9YM6zKx4D2qucQU= 43 | github.com/googleapis/gax-go v2.0.0+incompatible/go.mod h1:SFVmujtThgffbyetf+mdk2eWhX2bMyUtNHzFKcPA9HY= 44 | github.com/gopherjs/gopherjs v0.0.0-20181103185306-d547d1d9531e h1:JKmoR8x90Iww1ks85zJ1lfDGgIiMDuIptTOhJq+zKyg= 45 | github.com/gopherjs/gopherjs v0.0.0-20181103185306-d547d1d9531e/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= 46 | github.com/grpc-ecosystem/grpc-gateway v1.5.0/go.mod h1:RSKVYQBd5MCa4OVpNdGskqpgL2+G+NZTnrVHpWWfpdw= 47 | github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= 48 | github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= 49 | github.com/henrylee2cn/goutil v0.0.0-20190108065108-a4da700f03b3 h1:CpX8bDTDk7CIo6c+HGX2wA0oct0wAmxkiryk+WTEawQ= 50 | github.com/henrylee2cn/goutil v0.0.0-20190108065108-a4da700f03b3/go.mod h1:I9qYeMYwdKC7UFXMECNzCEv0fYuolqLeBMqsmeG7IVo= 51 | github.com/jmespath/go-jmespath v0.0.0-20160202185014-0b12d6b521d8 h1:12VvqtR6Aowv3l/EQUlocDHW2Cp4G9WJVH7uyH8QFJE= 52 | github.com/jmespath/go-jmespath v0.0.0-20160202185014-0b12d6b521d8/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= 53 | github.com/jtolds/gls v4.2.1+incompatible h1:fSuqC+Gmlu6l/ZYAoZzx2pyucC8Xza35fpRVWLVmUEE= 54 | github.com/jtolds/gls v4.2.1+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= 55 | github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= 56 | github.com/klauspost/compress v1.4.0 h1:8nsMz3tWa9SWWPL60G1V6CUsf4lLjWLTNEtibhe8gh8= 57 | github.com/klauspost/compress v1.4.0/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= 58 | github.com/klauspost/cpuid v0.0.0-20180405133222-e7e905edc00e h1:+lIPJOWl+jSiJOc70QXJ07+2eg2Jy2EC7Mi11BWujeM= 59 | github.com/klauspost/cpuid v0.0.0-20180405133222-e7e905edc00e/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek= 60 | github.com/magiconair/properties v1.8.0 h1:LLgXmsheXeRoUOBOjtwPQCWIYqM/LU1ayDtDePerRcY= 61 | github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= 62 | github.com/matttproud/golang_protobuf_extensions v1.0.1 h1:4hp9jkHxhMHkqkrB3Ix0jegS5sx/RkqARlsWZ6pIwiU= 63 | github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= 64 | github.com/mitchellh/mapstructure v1.1.2 h1:fmNYVwqnSfB9mZU6OS2O6GsXM+wcskZDuKQzvN1EDeE= 65 | github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= 66 | github.com/oklog/oklog v0.3.2 h1:wVfs8F+in6nTBMkA7CbRw+zZMIB7nNM825cM1wuzoTk= 67 | github.com/oklog/oklog v0.3.2/go.mod h1:FCV+B7mhrz4o+ueLpx+KqkyXRGMWOYEvfiXtdGtbWGs= 68 | github.com/oklog/run v1.0.0 h1:Ru7dDtJNOyC66gQ5dQmaCa0qIsAUFY3sFpK1Xk8igrw= 69 | github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA= 70 | github.com/openzipkin/zipkin-go v0.1.1/go.mod h1:NtoC/o8u3JlF1lSlyPNswIbeQH9bJTmOf0Erfk+hxe8= 71 | github.com/pelletier/go-toml v1.2.0 h1:T5zMGML61Wp+FlcbWjRDT7yAxhJNAiPPLOFECq181zc= 72 | github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic= 73 | github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I= 74 | github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= 75 | github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= 76 | github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= 77 | github.com/pquerna/ffjson v0.0.0-20181028064349-e517b90714f7 h1:gGBSHPOU7g8YjTbhwn+lvFm2VDEhhA+PwDIlstkgSxE= 78 | github.com/pquerna/ffjson v0.0.0-20181028064349-e517b90714f7/go.mod h1:YARuvh7BUWHNhzDq2OM5tzR2RiCcN2D7sapiKyCel/M= 79 | github.com/prometheus/client_golang v0.8.0 h1:1921Yw9Gc3iSc4VQh3PIoOqgPCZS7G/4xQNVUp8Mda8= 80 | github.com/prometheus/client_golang v0.8.0/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= 81 | github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910 h1:idejC8f05m9MGOsuEi1ATq9shN03HrxNkD/luQvxCv8= 82 | github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= 83 | github.com/prometheus/common v0.0.0-20180801064454-c7de2306084e h1:n/3MEhJQjQxrOUCzh1Y3Re6aJUUWRp2M9+Oc3eVn/54= 84 | github.com/prometheus/common v0.0.0-20180801064454-c7de2306084e/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro= 85 | github.com/prometheus/procfs v0.0.0-20180725123919-05ee40e3a273 h1:agujYaXJSxSo18YNX3jzl+4G6Bstwt+kqv47GS12uL0= 86 | github.com/prometheus/procfs v0.0.0-20180725123919-05ee40e3a273/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= 87 | github.com/smartystreets/assertions v0.0.0-20190116191733-b6c0e53d7304 h1:Jpy1PXuP99tXNrhbq2BaPz9B+jNAvH1JPQQpG/9GCXY= 88 | github.com/smartystreets/assertions v0.0.0-20190116191733-b6c0e53d7304/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= 89 | github.com/smartystreets/goconvey v0.0.0-20181108003508-044398e4856c h1:Ho+uVpkel/udgjbwB5Lktg9BtvJSh2DT0Hi6LPSyI2w= 90 | github.com/smartystreets/goconvey v0.0.0-20181108003508-044398e4856c/go.mod h1:XDJAKZRPZ1CvBcN2aX5YOUTYGHki24fSF0Iv48Ibg0s= 91 | github.com/spf13/afero v1.1.2 h1:m8/z1t7/fwjysjQRYbP0RD+bUIF/8tJwPdEZsI83ACI= 92 | github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ= 93 | github.com/spf13/cast v1.3.0 h1:oget//CVOEoFewqQxwr0Ej5yjygnqGkvggSE/gB35Q8= 94 | github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= 95 | github.com/spf13/jwalterweatherman v1.0.0 h1:XHEdyB+EcvlqZamSM4ZOMGlc93t6AcsBEu9Gc1vn7yk= 96 | github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo= 97 | github.com/spf13/pflag v1.0.3 h1:zPAT6CGy6wXeQ7NtTnaTerfKOsV6V6F8agHXFiazDkg= 98 | github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= 99 | github.com/spf13/viper v1.3.1 h1:5+8j8FTpnFV4nEImW/ofkzEt8VoOiLXxdYIDsB73T38= 100 | github.com/spf13/viper v1.3.1/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s= 101 | github.com/stretchr/testify v1.2.2 h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1w= 102 | github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= 103 | github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0= 104 | github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= 105 | github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= 106 | github.com/valyala/fasthttp v1.1.0 h1:3BohG7mqwj4lq7PTX//7gLbUlzNvZSPmuHFnloXT0lw= 107 | github.com/valyala/fasthttp v1.1.0/go.mod h1:4vX61m6KN+xDduDNwXrhIAVZaZaZiQ1luJk8LWSxF3s= 108 | github.com/valyala/tcplisten v0.0.0-20161114210144-ceec8f93295a/go.mod h1:v3UYOV9WzVtRmSR+PDvWpU/qWl4Wa5LApYYX4ZtKbio= 109 | github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q= 110 | go.opencensus.io v0.15.0/go.mod h1:UffZAU+4sDEINUGP/B7UfBBkq4fqLu9zXAX7ke6CHW0= 111 | go.opencensus.io v0.18.1-0.20181204023538-aab39bd6a98b h1:6ayHMBPtdP3jNuk+Sfhso+PTB7ZJQ5E1FBo403m2H8w= 112 | go.opencensus.io v0.18.1-0.20181204023538-aab39bd6a98b/go.mod h1:vKdFvxhtzZ9onBp9VKHK8z/sRpBMnKAsufL7wlDrCOA= 113 | go.uber.org/atomic v1.3.2 h1:2Oa65PReHzfn29GpvgsYwloV9AVFHPDk8tYxt2c2tr4= 114 | go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= 115 | go.uber.org/multierr v1.1.0 h1:HoEmRHQPVSqub6w2z2d2EOVs2fjyFRGyofhKuyDq0QI= 116 | go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= 117 | go.uber.org/zap v1.9.1 h1:XCJQEf3W6eZaVwhRBof6ImoYGJSITeKWsyeh3HFu/5o= 118 | go.uber.org/zap v1.9.1/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= 119 | golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= 120 | golang.org/x/lint v0.0.0-20180702182130-06c8688daad7/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= 121 | golang.org/x/net v0.0.0-20180530234432-1e491301e022/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= 122 | golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= 123 | golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= 124 | golang.org/x/net v0.0.0-20180911220305-26e67e76b6c3 h1:czFLhve3vsQetD6JOJ8NZZvGQIXlnN3/yXxbT6/awxI= 125 | golang.org/x/net v0.0.0-20180911220305-26e67e76b6c3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= 126 | golang.org/x/oauth2 v0.0.0-20180603041954-1e0a3fa8ba9a h1:1Fy38jwe/QZhQfFQBy6dMj9F/WU1C+jo3/zLNr/WhW4= 127 | golang.org/x/oauth2 v0.0.0-20180603041954-1e0a3fa8ba9a/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= 128 | golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be h1:vEDujvNQGv4jgYKudGeI/+DAX4Jffq6hpD55MmoEvKs= 129 | golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= 130 | golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f h1:wMNYb4v58l5UBM7MYRLPG6ZhfOqbKu7X5eyFl8ZhKvA= 131 | golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= 132 | golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= 133 | golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= 134 | golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a h1:1n5lsVfiQW3yfsRGu98756EH1YthsFqr/5mxHduZW2A= 135 | golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= 136 | golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg= 137 | golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= 138 | golang.org/x/tools v0.0.0-20180828015842-6cd1fcedba52/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= 139 | google.golang.org/api v0.0.0-20180603000442-8e296ef26005/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0= 140 | google.golang.org/api v0.0.0-20180910000450-7ca32eb868bf h1:rjxqQmxjyqerRKEj+tZW+MCm4LgpFXu18bsEoCMgDsk= 141 | google.golang.org/api v0.0.0-20180910000450-7ca32eb868bf/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0= 142 | google.golang.org/appengine v1.0.0 h1:dN4LljjBKVChsv0XCSI+zbyzdqrkEwX5LQFUMRSGqOc= 143 | google.golang.org/appengine v1.0.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= 144 | google.golang.org/appengine v1.1.0 h1:igQkv0AAhEIvTEpD5LIpAfav2eeVO9HBTjvKHVJPRSs= 145 | google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= 146 | google.golang.org/genproto v0.0.0-20180601223552-81158efcc9f2/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= 147 | google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= 148 | google.golang.org/genproto v0.0.0-20180831171423-11092d34479b h1:lohp5blsw53GBXtLyLNaTXPXS9pJ1tiTw61ZHUoE9Qw= 149 | google.golang.org/genproto v0.0.0-20180831171423-11092d34479b/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= 150 | google.golang.org/grpc v1.12.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= 151 | google.golang.org/grpc v1.14.0 h1:ArxJuB1NWfPY6r9Gp9gqwplT0Ge7nqv9msgu03lHLmo= 152 | google.golang.org/grpc v1.14.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= 153 | google.golang.org/grpc v1.15.0 h1:Az/KuahOM4NAidTEuJCv/RonAA7rYsTPkqXVjr+8OOw= 154 | google.golang.org/grpc v1.15.0/go.mod h1:0JHn/cJsOMiMfNA9+DeHDlAU7KAAB5GDlYFpa9MZMio= 155 | gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= 156 | gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= 157 | gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= 158 | gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw= 159 | gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= 160 | honnef.co/go/tools v0.0.0-20180728063816-88497007e858/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= 161 | -------------------------------------------------------------------------------- /frontend/types/types_ffjson.go: -------------------------------------------------------------------------------- 1 | // Code generated by ffjson . DO NOT EDIT. 2 | // source: types/types.go 3 | 4 | package types 5 | 6 | import ( 7 | "bytes" 8 | "encoding/json" 9 | "fmt" 10 | fflib "github.com/pquerna/ffjson/fflib/v1" 11 | ) 12 | 13 | // MarshalJSON marshal bytes to json - template 14 | func (j *Error) MarshalJSON() ([]byte, error) { 15 | var buf fflib.Buffer 16 | if j == nil { 17 | buf.WriteString("null") 18 | return buf.Bytes(), nil 19 | } 20 | err := j.MarshalJSONBuf(&buf) 21 | if err != nil { 22 | return nil, err 23 | } 24 | return buf.Bytes(), nil 25 | } 26 | 27 | // MarshalJSONBuf marshal buff to json - template 28 | func (j *Error) MarshalJSONBuf(buf fflib.EncodingBuffer) error { 29 | if j == nil { 30 | buf.WriteString("null") 31 | return nil 32 | } 33 | var err error 34 | var obj []byte 35 | _ = obj 36 | _ = err 37 | buf.WriteString(`{ "detail":`) 38 | fflib.WriteJsonString(buf, string(j.Detail)) 39 | buf.WriteByte(',') 40 | if j.Index != nil { 41 | if true { 42 | buf.WriteString(`"index":`) 43 | fflib.FormatBits2(buf, uint64(*j.Index), 10, false) 44 | buf.WriteByte(',') 45 | } 46 | } 47 | buf.Rewind(1) 48 | buf.WriteByte('}') 49 | return nil 50 | } 51 | 52 | const ( 53 | ffjtErrorbase = iota 54 | ffjtErrornosuchkey 55 | 56 | ffjtErrorDetail 57 | 58 | ffjtErrorIndex 59 | ) 60 | 61 | var ffjKeyErrorDetail = []byte("detail") 62 | 63 | var ffjKeyErrorIndex = []byte("index") 64 | 65 | // UnmarshalJSON umarshall json - template of ffjson 66 | func (j *Error) UnmarshalJSON(input []byte) error { 67 | fs := fflib.NewFFLexer(input) 68 | return j.UnmarshalJSONFFLexer(fs, fflib.FFParse_map_start) 69 | } 70 | 71 | // UnmarshalJSONFFLexer fast json unmarshall - template ffjson 72 | func (j *Error) UnmarshalJSONFFLexer(fs *fflib.FFLexer, state fflib.FFParseState) error { 73 | var err error 74 | currentKey := ffjtErrorbase 75 | _ = currentKey 76 | tok := fflib.FFTok_init 77 | wantedTok := fflib.FFTok_init 78 | 79 | mainparse: 80 | for { 81 | tok = fs.Scan() 82 | // println(fmt.Sprintf("debug: tok: %v state: %v", tok, state)) 83 | if tok == fflib.FFTok_error { 84 | goto tokerror 85 | } 86 | 87 | switch state { 88 | 89 | case fflib.FFParse_map_start: 90 | if tok != fflib.FFTok_left_bracket { 91 | wantedTok = fflib.FFTok_left_bracket 92 | goto wrongtokenerror 93 | } 94 | state = fflib.FFParse_want_key 95 | continue 96 | 97 | case fflib.FFParse_after_value: 98 | if tok == fflib.FFTok_comma { 99 | state = fflib.FFParse_want_key 100 | } else if tok == fflib.FFTok_right_bracket { 101 | goto done 102 | } else { 103 | wantedTok = fflib.FFTok_comma 104 | goto wrongtokenerror 105 | } 106 | 107 | case fflib.FFParse_want_key: 108 | // json {} ended. goto exit. woo. 109 | if tok == fflib.FFTok_right_bracket { 110 | goto done 111 | } 112 | if tok != fflib.FFTok_string { 113 | wantedTok = fflib.FFTok_string 114 | goto wrongtokenerror 115 | } 116 | 117 | kn := fs.Output.Bytes() 118 | if len(kn) <= 0 { 119 | // "" case. hrm. 120 | currentKey = ffjtErrornosuchkey 121 | state = fflib.FFParse_want_colon 122 | goto mainparse 123 | } else { 124 | switch kn[0] { 125 | 126 | case 'd': 127 | 128 | if bytes.Equal(ffjKeyErrorDetail, kn) { 129 | currentKey = ffjtErrorDetail 130 | state = fflib.FFParse_want_colon 131 | goto mainparse 132 | } 133 | 134 | case 'i': 135 | 136 | if bytes.Equal(ffjKeyErrorIndex, kn) { 137 | currentKey = ffjtErrorIndex 138 | state = fflib.FFParse_want_colon 139 | goto mainparse 140 | } 141 | 142 | } 143 | 144 | if fflib.SimpleLetterEqualFold(ffjKeyErrorIndex, kn) { 145 | currentKey = ffjtErrorIndex 146 | state = fflib.FFParse_want_colon 147 | goto mainparse 148 | } 149 | 150 | if fflib.SimpleLetterEqualFold(ffjKeyErrorDetail, kn) { 151 | currentKey = ffjtErrorDetail 152 | state = fflib.FFParse_want_colon 153 | goto mainparse 154 | } 155 | 156 | currentKey = ffjtErrornosuchkey 157 | state = fflib.FFParse_want_colon 158 | goto mainparse 159 | } 160 | 161 | case fflib.FFParse_want_colon: 162 | if tok != fflib.FFTok_colon { 163 | wantedTok = fflib.FFTok_colon 164 | goto wrongtokenerror 165 | } 166 | state = fflib.FFParse_want_value 167 | continue 168 | case fflib.FFParse_want_value: 169 | 170 | if tok == fflib.FFTok_left_brace || tok == fflib.FFTok_left_bracket || tok == fflib.FFTok_integer || tok == fflib.FFTok_double || tok == fflib.FFTok_string || tok == fflib.FFTok_bool || tok == fflib.FFTok_null { 171 | switch currentKey { 172 | 173 | case ffjtErrorDetail: 174 | goto handle_Detail 175 | 176 | case ffjtErrorIndex: 177 | goto handle_Index 178 | 179 | case ffjtErrornosuchkey: 180 | err = fs.SkipField(tok) 181 | if err != nil { 182 | return fs.WrapErr(err) 183 | } 184 | state = fflib.FFParse_after_value 185 | goto mainparse 186 | } 187 | } else { 188 | goto wantedvalue 189 | } 190 | } 191 | } 192 | 193 | handle_Detail: 194 | 195 | /* handler: j.Detail type=string kind=string quoted=false*/ 196 | 197 | { 198 | 199 | { 200 | if tok != fflib.FFTok_string && tok != fflib.FFTok_null { 201 | return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for string", tok)) 202 | } 203 | } 204 | 205 | if tok == fflib.FFTok_null { 206 | 207 | } else { 208 | 209 | outBuf := fs.Output.Bytes() 210 | 211 | j.Detail = string(string(outBuf)) 212 | 213 | } 214 | } 215 | 216 | state = fflib.FFParse_after_value 217 | goto mainparse 218 | 219 | handle_Index: 220 | 221 | /* handler: j.Index type=uint64 kind=uint64 quoted=false*/ 222 | 223 | { 224 | if tok != fflib.FFTok_integer && tok != fflib.FFTok_null { 225 | return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for uint64", tok)) 226 | } 227 | } 228 | 229 | { 230 | 231 | if tok == fflib.FFTok_null { 232 | 233 | j.Index = nil 234 | 235 | } else { 236 | 237 | tval, err := fflib.ParseUint(fs.Output.Bytes(), 10, 64) 238 | 239 | if err != nil { 240 | return fs.WrapErr(err) 241 | } 242 | 243 | ttypval := uint64(tval) 244 | j.Index = &ttypval 245 | 246 | } 247 | } 248 | 249 | state = fflib.FFParse_after_value 250 | goto mainparse 251 | 252 | wantedvalue: 253 | return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok)) 254 | wrongtokenerror: 255 | return fs.WrapErr(fmt.Errorf("ffjson: wanted token: %v, but got token: %v output=%s", wantedTok, tok, fs.Output.String())) 256 | tokerror: 257 | if fs.BigError != nil { 258 | return fs.WrapErr(fs.BigError) 259 | } 260 | err = fs.Error.ToError() 261 | if err != nil { 262 | return fs.WrapErr(err) 263 | } 264 | panic("ffjson-generated: unreachable, please report bug.") 265 | done: 266 | 267 | return nil 268 | } 269 | 270 | // MarshalJSON marshal bytes to json - template 271 | func (j *Event) MarshalJSON() ([]byte, error) { 272 | var buf fflib.Buffer 273 | if j == nil { 274 | buf.WriteString("null") 275 | return buf.Bytes(), nil 276 | } 277 | err := j.MarshalJSONBuf(&buf) 278 | if err != nil { 279 | return nil, err 280 | } 281 | return buf.Bytes(), nil 282 | } 283 | 284 | // MarshalJSONBuf marshal buff to json - template 285 | func (j *Event) MarshalJSONBuf(buf fflib.EncodingBuffer) error { 286 | if j == nil { 287 | buf.WriteString("null") 288 | return nil 289 | } 290 | var err error 291 | var obj []byte 292 | _ = obj 293 | _ = err 294 | buf.WriteString(`{"type":`) 295 | 296 | { 297 | 298 | err = j.TypeField.MarshalJSONBuf(buf) 299 | if err != nil { 300 | return err 301 | } 302 | 303 | } 304 | buf.WriteString(`,"payload":`) 305 | /* Falling back. type=types.Payload kind=map */ 306 | err = buf.Encode(j.PayloadField) 307 | if err != nil { 308 | return err 309 | } 310 | buf.WriteByte('}') 311 | return nil 312 | } 313 | 314 | const ( 315 | ffjtEventbase = iota 316 | ffjtEventnosuchkey 317 | 318 | ffjtEventTypeField 319 | 320 | ffjtEventPayloadField 321 | ) 322 | 323 | var ffjKeyEventTypeField = []byte("type") 324 | 325 | var ffjKeyEventPayloadField = []byte("payload") 326 | 327 | // UnmarshalJSON umarshall json - template of ffjson 328 | func (j *Event) UnmarshalJSON(input []byte) error { 329 | fs := fflib.NewFFLexer(input) 330 | return j.UnmarshalJSONFFLexer(fs, fflib.FFParse_map_start) 331 | } 332 | 333 | // UnmarshalJSONFFLexer fast json unmarshall - template ffjson 334 | func (j *Event) UnmarshalJSONFFLexer(fs *fflib.FFLexer, state fflib.FFParseState) error { 335 | var err error 336 | currentKey := ffjtEventbase 337 | _ = currentKey 338 | tok := fflib.FFTok_init 339 | wantedTok := fflib.FFTok_init 340 | 341 | mainparse: 342 | for { 343 | tok = fs.Scan() 344 | // println(fmt.Sprintf("debug: tok: %v state: %v", tok, state)) 345 | if tok == fflib.FFTok_error { 346 | goto tokerror 347 | } 348 | 349 | switch state { 350 | 351 | case fflib.FFParse_map_start: 352 | if tok != fflib.FFTok_left_bracket { 353 | wantedTok = fflib.FFTok_left_bracket 354 | goto wrongtokenerror 355 | } 356 | state = fflib.FFParse_want_key 357 | continue 358 | 359 | case fflib.FFParse_after_value: 360 | if tok == fflib.FFTok_comma { 361 | state = fflib.FFParse_want_key 362 | } else if tok == fflib.FFTok_right_bracket { 363 | goto done 364 | } else { 365 | wantedTok = fflib.FFTok_comma 366 | goto wrongtokenerror 367 | } 368 | 369 | case fflib.FFParse_want_key: 370 | // json {} ended. goto exit. woo. 371 | if tok == fflib.FFTok_right_bracket { 372 | goto done 373 | } 374 | if tok != fflib.FFTok_string { 375 | wantedTok = fflib.FFTok_string 376 | goto wrongtokenerror 377 | } 378 | 379 | kn := fs.Output.Bytes() 380 | if len(kn) <= 0 { 381 | // "" case. hrm. 382 | currentKey = ffjtEventnosuchkey 383 | state = fflib.FFParse_want_colon 384 | goto mainparse 385 | } else { 386 | switch kn[0] { 387 | 388 | case 'p': 389 | 390 | if bytes.Equal(ffjKeyEventPayloadField, kn) { 391 | currentKey = ffjtEventPayloadField 392 | state = fflib.FFParse_want_colon 393 | goto mainparse 394 | } 395 | 396 | case 't': 397 | 398 | if bytes.Equal(ffjKeyEventTypeField, kn) { 399 | currentKey = ffjtEventTypeField 400 | state = fflib.FFParse_want_colon 401 | goto mainparse 402 | } 403 | 404 | } 405 | 406 | if fflib.SimpleLetterEqualFold(ffjKeyEventPayloadField, kn) { 407 | currentKey = ffjtEventPayloadField 408 | state = fflib.FFParse_want_colon 409 | goto mainparse 410 | } 411 | 412 | if fflib.SimpleLetterEqualFold(ffjKeyEventTypeField, kn) { 413 | currentKey = ffjtEventTypeField 414 | state = fflib.FFParse_want_colon 415 | goto mainparse 416 | } 417 | 418 | currentKey = ffjtEventnosuchkey 419 | state = fflib.FFParse_want_colon 420 | goto mainparse 421 | } 422 | 423 | case fflib.FFParse_want_colon: 424 | if tok != fflib.FFTok_colon { 425 | wantedTok = fflib.FFTok_colon 426 | goto wrongtokenerror 427 | } 428 | state = fflib.FFParse_want_value 429 | continue 430 | case fflib.FFParse_want_value: 431 | 432 | if tok == fflib.FFTok_left_brace || tok == fflib.FFTok_left_bracket || tok == fflib.FFTok_integer || tok == fflib.FFTok_double || tok == fflib.FFTok_string || tok == fflib.FFTok_bool || tok == fflib.FFTok_null { 433 | switch currentKey { 434 | 435 | case ffjtEventTypeField: 436 | goto handle_TypeField 437 | 438 | case ffjtEventPayloadField: 439 | goto handle_PayloadField 440 | 441 | case ffjtEventnosuchkey: 442 | err = fs.SkipField(tok) 443 | if err != nil { 444 | return fs.WrapErr(err) 445 | } 446 | state = fflib.FFParse_after_value 447 | goto mainparse 448 | } 449 | } else { 450 | goto wantedvalue 451 | } 452 | } 453 | } 454 | 455 | handle_TypeField: 456 | 457 | /* handler: j.TypeField type=types.Type kind=struct quoted=false*/ 458 | 459 | { 460 | if tok == fflib.FFTok_null { 461 | 462 | } else { 463 | 464 | err = j.TypeField.UnmarshalJSONFFLexer(fs, fflib.FFParse_want_key) 465 | if err != nil { 466 | return err 467 | } 468 | } 469 | state = fflib.FFParse_after_value 470 | } 471 | 472 | state = fflib.FFParse_after_value 473 | goto mainparse 474 | 475 | handle_PayloadField: 476 | 477 | /* handler: j.PayloadField type=types.Payload kind=map quoted=false*/ 478 | 479 | { 480 | 481 | { 482 | if tok != fflib.FFTok_left_bracket && tok != fflib.FFTok_null { 483 | return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for Payload", tok)) 484 | } 485 | } 486 | 487 | if tok == fflib.FFTok_null { 488 | j.PayloadField = nil 489 | } else { 490 | 491 | j.PayloadField = make(map[string]interface{}, 0) 492 | 493 | wantVal := true 494 | 495 | for { 496 | 497 | var k string 498 | 499 | var tmpJPayloadField interface{} 500 | 501 | tok = fs.Scan() 502 | if tok == fflib.FFTok_error { 503 | goto tokerror 504 | } 505 | if tok == fflib.FFTok_right_bracket { 506 | break 507 | } 508 | 509 | if tok == fflib.FFTok_comma { 510 | if wantVal == true { 511 | // TODO(pquerna): this isn't an ideal error message, this handles 512 | // things like [,,,] as an array value. 513 | return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok)) 514 | } 515 | continue 516 | } else { 517 | wantVal = true 518 | } 519 | 520 | /* handler: k type=string kind=string quoted=false*/ 521 | 522 | { 523 | 524 | { 525 | if tok != fflib.FFTok_string && tok != fflib.FFTok_null { 526 | return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for string", tok)) 527 | } 528 | } 529 | 530 | if tok == fflib.FFTok_null { 531 | 532 | } else { 533 | 534 | outBuf := fs.Output.Bytes() 535 | 536 | k = string(string(outBuf)) 537 | 538 | } 539 | } 540 | 541 | // Expect ':' after key 542 | tok = fs.Scan() 543 | if tok != fflib.FFTok_colon { 544 | return fs.WrapErr(fmt.Errorf("wanted colon token, but got token: %v", tok)) 545 | } 546 | 547 | tok = fs.Scan() 548 | /* handler: tmpJPayloadField type=interface {} kind=interface quoted=false*/ 549 | 550 | { 551 | /* Falling back. type=interface {} kind=interface */ 552 | tbuf, err := fs.CaptureField(tok) 553 | if err != nil { 554 | return fs.WrapErr(err) 555 | } 556 | 557 | err = json.Unmarshal(tbuf, &tmpJPayloadField) 558 | if err != nil { 559 | return fs.WrapErr(err) 560 | } 561 | } 562 | 563 | j.PayloadField[k] = tmpJPayloadField 564 | 565 | wantVal = false 566 | } 567 | 568 | } 569 | } 570 | 571 | state = fflib.FFParse_after_value 572 | goto mainparse 573 | 574 | wantedvalue: 575 | return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok)) 576 | wrongtokenerror: 577 | return fs.WrapErr(fmt.Errorf("ffjson: wanted token: %v, but got token: %v output=%s", wantedTok, tok, fs.Output.String())) 578 | tokerror: 579 | if fs.BigError != nil { 580 | return fs.WrapErr(fs.BigError) 581 | } 582 | err = fs.Error.ToError() 583 | if err != nil { 584 | return fs.WrapErr(err) 585 | } 586 | panic("ffjson-generated: unreachable, please report bug.") 587 | done: 588 | 589 | return nil 590 | } 591 | 592 | // MarshalJSON marshal bytes to json - template 593 | func (j *EventMsg) MarshalJSON() ([]byte, error) { 594 | var buf fflib.Buffer 595 | if j == nil { 596 | buf.WriteString("null") 597 | return buf.Bytes(), nil 598 | } 599 | err := j.MarshalJSONBuf(&buf) 600 | if err != nil { 601 | return nil, err 602 | } 603 | return buf.Bytes(), nil 604 | } 605 | 606 | // MarshalJSONBuf marshal buff to json - template 607 | func (j *EventMsg) MarshalJSONBuf(buf fflib.EncodingBuffer) error { 608 | if j == nil { 609 | buf.WriteString("null") 610 | return nil 611 | } 612 | var err error 613 | var obj []byte 614 | _ = obj 615 | _ = err 616 | buf.WriteString(`{"SenderID":`) 617 | fflib.WriteJsonString(buf, string(j.SenderID)) 618 | buf.WriteString(`,"Event":`) 619 | 620 | { 621 | 622 | err = j.Event.MarshalJSONBuf(buf) 623 | if err != nil { 624 | return err 625 | } 626 | 627 | } 628 | buf.WriteByte('}') 629 | return nil 630 | } 631 | 632 | const ( 633 | ffjtEventMsgbase = iota 634 | ffjtEventMsgnosuchkey 635 | 636 | ffjtEventMsgSenderID 637 | 638 | ffjtEventMsgEvent 639 | ) 640 | 641 | var ffjKeyEventMsgSenderID = []byte("SenderID") 642 | 643 | var ffjKeyEventMsgEvent = []byte("Event") 644 | 645 | // UnmarshalJSON umarshall json - template of ffjson 646 | func (j *EventMsg) UnmarshalJSON(input []byte) error { 647 | fs := fflib.NewFFLexer(input) 648 | return j.UnmarshalJSONFFLexer(fs, fflib.FFParse_map_start) 649 | } 650 | 651 | // UnmarshalJSONFFLexer fast json unmarshall - template ffjson 652 | func (j *EventMsg) UnmarshalJSONFFLexer(fs *fflib.FFLexer, state fflib.FFParseState) error { 653 | var err error 654 | currentKey := ffjtEventMsgbase 655 | _ = currentKey 656 | tok := fflib.FFTok_init 657 | wantedTok := fflib.FFTok_init 658 | 659 | mainparse: 660 | for { 661 | tok = fs.Scan() 662 | // println(fmt.Sprintf("debug: tok: %v state: %v", tok, state)) 663 | if tok == fflib.FFTok_error { 664 | goto tokerror 665 | } 666 | 667 | switch state { 668 | 669 | case fflib.FFParse_map_start: 670 | if tok != fflib.FFTok_left_bracket { 671 | wantedTok = fflib.FFTok_left_bracket 672 | goto wrongtokenerror 673 | } 674 | state = fflib.FFParse_want_key 675 | continue 676 | 677 | case fflib.FFParse_after_value: 678 | if tok == fflib.FFTok_comma { 679 | state = fflib.FFParse_want_key 680 | } else if tok == fflib.FFTok_right_bracket { 681 | goto done 682 | } else { 683 | wantedTok = fflib.FFTok_comma 684 | goto wrongtokenerror 685 | } 686 | 687 | case fflib.FFParse_want_key: 688 | // json {} ended. goto exit. woo. 689 | if tok == fflib.FFTok_right_bracket { 690 | goto done 691 | } 692 | if tok != fflib.FFTok_string { 693 | wantedTok = fflib.FFTok_string 694 | goto wrongtokenerror 695 | } 696 | 697 | kn := fs.Output.Bytes() 698 | if len(kn) <= 0 { 699 | // "" case. hrm. 700 | currentKey = ffjtEventMsgnosuchkey 701 | state = fflib.FFParse_want_colon 702 | goto mainparse 703 | } else { 704 | switch kn[0] { 705 | 706 | case 'E': 707 | 708 | if bytes.Equal(ffjKeyEventMsgEvent, kn) { 709 | currentKey = ffjtEventMsgEvent 710 | state = fflib.FFParse_want_colon 711 | goto mainparse 712 | } 713 | 714 | case 'S': 715 | 716 | if bytes.Equal(ffjKeyEventMsgSenderID, kn) { 717 | currentKey = ffjtEventMsgSenderID 718 | state = fflib.FFParse_want_colon 719 | goto mainparse 720 | } 721 | 722 | } 723 | 724 | if fflib.SimpleLetterEqualFold(ffjKeyEventMsgEvent, kn) { 725 | currentKey = ffjtEventMsgEvent 726 | state = fflib.FFParse_want_colon 727 | goto mainparse 728 | } 729 | 730 | if fflib.EqualFoldRight(ffjKeyEventMsgSenderID, kn) { 731 | currentKey = ffjtEventMsgSenderID 732 | state = fflib.FFParse_want_colon 733 | goto mainparse 734 | } 735 | 736 | currentKey = ffjtEventMsgnosuchkey 737 | state = fflib.FFParse_want_colon 738 | goto mainparse 739 | } 740 | 741 | case fflib.FFParse_want_colon: 742 | if tok != fflib.FFTok_colon { 743 | wantedTok = fflib.FFTok_colon 744 | goto wrongtokenerror 745 | } 746 | state = fflib.FFParse_want_value 747 | continue 748 | case fflib.FFParse_want_value: 749 | 750 | if tok == fflib.FFTok_left_brace || tok == fflib.FFTok_left_bracket || tok == fflib.FFTok_integer || tok == fflib.FFTok_double || tok == fflib.FFTok_string || tok == fflib.FFTok_bool || tok == fflib.FFTok_null { 751 | switch currentKey { 752 | 753 | case ffjtEventMsgSenderID: 754 | goto handle_SenderID 755 | 756 | case ffjtEventMsgEvent: 757 | goto handle_Event 758 | 759 | case ffjtEventMsgnosuchkey: 760 | err = fs.SkipField(tok) 761 | if err != nil { 762 | return fs.WrapErr(err) 763 | } 764 | state = fflib.FFParse_after_value 765 | goto mainparse 766 | } 767 | } else { 768 | goto wantedvalue 769 | } 770 | } 771 | } 772 | 773 | handle_SenderID: 774 | 775 | /* handler: j.SenderID type=string kind=string quoted=false*/ 776 | 777 | { 778 | 779 | { 780 | if tok != fflib.FFTok_string && tok != fflib.FFTok_null { 781 | return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for string", tok)) 782 | } 783 | } 784 | 785 | if tok == fflib.FFTok_null { 786 | 787 | } else { 788 | 789 | outBuf := fs.Output.Bytes() 790 | 791 | j.SenderID = string(string(outBuf)) 792 | 793 | } 794 | } 795 | 796 | state = fflib.FFParse_after_value 797 | goto mainparse 798 | 799 | handle_Event: 800 | 801 | /* handler: j.Event type=types.Event kind=struct quoted=false*/ 802 | 803 | { 804 | if tok == fflib.FFTok_null { 805 | 806 | } else { 807 | 808 | err = j.Event.UnmarshalJSONFFLexer(fs, fflib.FFParse_want_key) 809 | if err != nil { 810 | return err 811 | } 812 | } 813 | state = fflib.FFParse_after_value 814 | } 815 | 816 | state = fflib.FFParse_after_value 817 | goto mainparse 818 | 819 | wantedvalue: 820 | return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok)) 821 | wrongtokenerror: 822 | return fs.WrapErr(fmt.Errorf("ffjson: wanted token: %v, but got token: %v output=%s", wantedTok, tok, fs.Output.String())) 823 | tokerror: 824 | if fs.BigError != nil { 825 | return fs.WrapErr(fs.BigError) 826 | } 827 | err = fs.Error.ToError() 828 | if err != nil { 829 | return fs.WrapErr(err) 830 | } 831 | panic("ffjson-generated: unreachable, please report bug.") 832 | done: 833 | 834 | return nil 835 | } 836 | 837 | // MarshalJSON marshal bytes to json - template 838 | func (j *TrackRequest) MarshalJSON() ([]byte, error) { 839 | var buf fflib.Buffer 840 | if j == nil { 841 | buf.WriteString("null") 842 | return buf.Bytes(), nil 843 | } 844 | err := j.MarshalJSONBuf(&buf) 845 | if err != nil { 846 | return nil, err 847 | } 848 | return buf.Bytes(), nil 849 | } 850 | 851 | // MarshalJSONBuf marshal buff to json - template 852 | func (j *TrackRequest) MarshalJSONBuf(buf fflib.EncodingBuffer) error { 853 | if j == nil { 854 | buf.WriteString("null") 855 | return nil 856 | } 857 | var err error 858 | var obj []byte 859 | _ = obj 860 | _ = err 861 | buf.WriteString(`{"sender_id":`) 862 | fflib.WriteJsonString(buf, string(j.SenderID)) 863 | buf.WriteString(`,"events":`) 864 | if j.Events != nil { 865 | buf.WriteString(`[`) 866 | for i, v := range j.Events { 867 | if i != 0 { 868 | buf.WriteString(`,`) 869 | } 870 | 871 | { 872 | 873 | err = v.MarshalJSONBuf(buf) 874 | if err != nil { 875 | return err 876 | } 877 | 878 | } 879 | } 880 | buf.WriteString(`]`) 881 | } else { 882 | buf.WriteString(`null`) 883 | } 884 | buf.WriteByte('}') 885 | return nil 886 | } 887 | 888 | const ( 889 | ffjtTrackRequestbase = iota 890 | ffjtTrackRequestnosuchkey 891 | 892 | ffjtTrackRequestSenderID 893 | 894 | ffjtTrackRequestEvents 895 | ) 896 | 897 | var ffjKeyTrackRequestSenderID = []byte("sender_id") 898 | 899 | var ffjKeyTrackRequestEvents = []byte("events") 900 | 901 | // UnmarshalJSON umarshall json - template of ffjson 902 | func (j *TrackRequest) UnmarshalJSON(input []byte) error { 903 | fs := fflib.NewFFLexer(input) 904 | return j.UnmarshalJSONFFLexer(fs, fflib.FFParse_map_start) 905 | } 906 | 907 | // UnmarshalJSONFFLexer fast json unmarshall - template ffjson 908 | func (j *TrackRequest) UnmarshalJSONFFLexer(fs *fflib.FFLexer, state fflib.FFParseState) error { 909 | var err error 910 | currentKey := ffjtTrackRequestbase 911 | _ = currentKey 912 | tok := fflib.FFTok_init 913 | wantedTok := fflib.FFTok_init 914 | 915 | mainparse: 916 | for { 917 | tok = fs.Scan() 918 | // println(fmt.Sprintf("debug: tok: %v state: %v", tok, state)) 919 | if tok == fflib.FFTok_error { 920 | goto tokerror 921 | } 922 | 923 | switch state { 924 | 925 | case fflib.FFParse_map_start: 926 | if tok != fflib.FFTok_left_bracket { 927 | wantedTok = fflib.FFTok_left_bracket 928 | goto wrongtokenerror 929 | } 930 | state = fflib.FFParse_want_key 931 | continue 932 | 933 | case fflib.FFParse_after_value: 934 | if tok == fflib.FFTok_comma { 935 | state = fflib.FFParse_want_key 936 | } else if tok == fflib.FFTok_right_bracket { 937 | goto done 938 | } else { 939 | wantedTok = fflib.FFTok_comma 940 | goto wrongtokenerror 941 | } 942 | 943 | case fflib.FFParse_want_key: 944 | // json {} ended. goto exit. woo. 945 | if tok == fflib.FFTok_right_bracket { 946 | goto done 947 | } 948 | if tok != fflib.FFTok_string { 949 | wantedTok = fflib.FFTok_string 950 | goto wrongtokenerror 951 | } 952 | 953 | kn := fs.Output.Bytes() 954 | if len(kn) <= 0 { 955 | // "" case. hrm. 956 | currentKey = ffjtTrackRequestnosuchkey 957 | state = fflib.FFParse_want_colon 958 | goto mainparse 959 | } else { 960 | switch kn[0] { 961 | 962 | case 'e': 963 | 964 | if bytes.Equal(ffjKeyTrackRequestEvents, kn) { 965 | currentKey = ffjtTrackRequestEvents 966 | state = fflib.FFParse_want_colon 967 | goto mainparse 968 | } 969 | 970 | case 's': 971 | 972 | if bytes.Equal(ffjKeyTrackRequestSenderID, kn) { 973 | currentKey = ffjtTrackRequestSenderID 974 | state = fflib.FFParse_want_colon 975 | goto mainparse 976 | } 977 | 978 | } 979 | 980 | if fflib.EqualFoldRight(ffjKeyTrackRequestEvents, kn) { 981 | currentKey = ffjtTrackRequestEvents 982 | state = fflib.FFParse_want_colon 983 | goto mainparse 984 | } 985 | 986 | if fflib.EqualFoldRight(ffjKeyTrackRequestSenderID, kn) { 987 | currentKey = ffjtTrackRequestSenderID 988 | state = fflib.FFParse_want_colon 989 | goto mainparse 990 | } 991 | 992 | currentKey = ffjtTrackRequestnosuchkey 993 | state = fflib.FFParse_want_colon 994 | goto mainparse 995 | } 996 | 997 | case fflib.FFParse_want_colon: 998 | if tok != fflib.FFTok_colon { 999 | wantedTok = fflib.FFTok_colon 1000 | goto wrongtokenerror 1001 | } 1002 | state = fflib.FFParse_want_value 1003 | continue 1004 | case fflib.FFParse_want_value: 1005 | 1006 | if tok == fflib.FFTok_left_brace || tok == fflib.FFTok_left_bracket || tok == fflib.FFTok_integer || tok == fflib.FFTok_double || tok == fflib.FFTok_string || tok == fflib.FFTok_bool || tok == fflib.FFTok_null { 1007 | switch currentKey { 1008 | 1009 | case ffjtTrackRequestSenderID: 1010 | goto handle_SenderID 1011 | 1012 | case ffjtTrackRequestEvents: 1013 | goto handle_Events 1014 | 1015 | case ffjtTrackRequestnosuchkey: 1016 | err = fs.SkipField(tok) 1017 | if err != nil { 1018 | return fs.WrapErr(err) 1019 | } 1020 | state = fflib.FFParse_after_value 1021 | goto mainparse 1022 | } 1023 | } else { 1024 | goto wantedvalue 1025 | } 1026 | } 1027 | } 1028 | 1029 | handle_SenderID: 1030 | 1031 | /* handler: j.SenderID type=string kind=string quoted=false*/ 1032 | 1033 | { 1034 | 1035 | { 1036 | if tok != fflib.FFTok_string && tok != fflib.FFTok_null { 1037 | return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for string", tok)) 1038 | } 1039 | } 1040 | 1041 | if tok == fflib.FFTok_null { 1042 | 1043 | } else { 1044 | 1045 | outBuf := fs.Output.Bytes() 1046 | 1047 | j.SenderID = string(string(outBuf)) 1048 | 1049 | } 1050 | } 1051 | 1052 | state = fflib.FFParse_after_value 1053 | goto mainparse 1054 | 1055 | handle_Events: 1056 | 1057 | /* handler: j.Events type=[]types.Event kind=slice quoted=false*/ 1058 | 1059 | { 1060 | 1061 | { 1062 | if tok != fflib.FFTok_left_brace && tok != fflib.FFTok_null { 1063 | return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for ", tok)) 1064 | } 1065 | } 1066 | 1067 | if tok == fflib.FFTok_null { 1068 | j.Events = nil 1069 | } else { 1070 | 1071 | j.Events = []Event{} 1072 | 1073 | wantVal := true 1074 | 1075 | for { 1076 | 1077 | var tmpJEvents Event 1078 | 1079 | tok = fs.Scan() 1080 | if tok == fflib.FFTok_error { 1081 | goto tokerror 1082 | } 1083 | if tok == fflib.FFTok_right_brace { 1084 | break 1085 | } 1086 | 1087 | if tok == fflib.FFTok_comma { 1088 | if wantVal == true { 1089 | // TODO(pquerna): this isn't an ideal error message, this handles 1090 | // things like [,,,] as an array value. 1091 | return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok)) 1092 | } 1093 | continue 1094 | } else { 1095 | wantVal = true 1096 | } 1097 | 1098 | /* handler: tmpJEvents type=types.Event kind=struct quoted=false*/ 1099 | 1100 | { 1101 | if tok == fflib.FFTok_null { 1102 | 1103 | } else { 1104 | 1105 | err = tmpJEvents.UnmarshalJSONFFLexer(fs, fflib.FFParse_want_key) 1106 | if err != nil { 1107 | return err 1108 | } 1109 | } 1110 | state = fflib.FFParse_after_value 1111 | } 1112 | 1113 | j.Events = append(j.Events, tmpJEvents) 1114 | 1115 | wantVal = false 1116 | } 1117 | } 1118 | } 1119 | 1120 | state = fflib.FFParse_after_value 1121 | goto mainparse 1122 | 1123 | wantedvalue: 1124 | return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok)) 1125 | wrongtokenerror: 1126 | return fs.WrapErr(fmt.Errorf("ffjson: wanted token: %v, but got token: %v output=%s", wantedTok, tok, fs.Output.String())) 1127 | tokerror: 1128 | if fs.BigError != nil { 1129 | return fs.WrapErr(fs.BigError) 1130 | } 1131 | err = fs.Error.ToError() 1132 | if err != nil { 1133 | return fs.WrapErr(err) 1134 | } 1135 | panic("ffjson-generated: unreachable, please report bug.") 1136 | done: 1137 | 1138 | return nil 1139 | } 1140 | 1141 | // MarshalJSON marshal bytes to json - template 1142 | func (j *Type) MarshalJSON() ([]byte, error) { 1143 | var buf fflib.Buffer 1144 | if j == nil { 1145 | buf.WriteString("null") 1146 | return buf.Bytes(), nil 1147 | } 1148 | err := j.MarshalJSONBuf(&buf) 1149 | if err != nil { 1150 | return nil, err 1151 | } 1152 | return buf.Bytes(), nil 1153 | } 1154 | 1155 | // MarshalJSONBuf marshal buff to json - template 1156 | func (j *Type) MarshalJSONBuf(buf fflib.EncodingBuffer) error { 1157 | if j == nil { 1158 | buf.WriteString("null") 1159 | return nil 1160 | } 1161 | var err error 1162 | var obj []byte 1163 | _ = obj 1164 | _ = err 1165 | buf.WriteString(`{"event_version":`) 1166 | fflib.WriteJsonString(buf, string(j.EventVersionField)) 1167 | buf.WriteString(`,"event_name":`) 1168 | fflib.WriteJsonString(buf, string(j.EventNameField)) 1169 | buf.WriteByte('}') 1170 | return nil 1171 | } 1172 | 1173 | const ( 1174 | ffjtTypebase = iota 1175 | ffjtTypenosuchkey 1176 | 1177 | ffjtTypeEventVersionField 1178 | 1179 | ffjtTypeEventNameField 1180 | ) 1181 | 1182 | var ffjKeyTypeEventVersionField = []byte("event_version") 1183 | 1184 | var ffjKeyTypeEventNameField = []byte("event_name") 1185 | 1186 | // UnmarshalJSON umarshall json - template of ffjson 1187 | func (j *Type) UnmarshalJSON(input []byte) error { 1188 | fs := fflib.NewFFLexer(input) 1189 | return j.UnmarshalJSONFFLexer(fs, fflib.FFParse_map_start) 1190 | } 1191 | 1192 | // UnmarshalJSONFFLexer fast json unmarshall - template ffjson 1193 | func (j *Type) UnmarshalJSONFFLexer(fs *fflib.FFLexer, state fflib.FFParseState) error { 1194 | var err error 1195 | currentKey := ffjtTypebase 1196 | _ = currentKey 1197 | tok := fflib.FFTok_init 1198 | wantedTok := fflib.FFTok_init 1199 | 1200 | mainparse: 1201 | for { 1202 | tok = fs.Scan() 1203 | // println(fmt.Sprintf("debug: tok: %v state: %v", tok, state)) 1204 | if tok == fflib.FFTok_error { 1205 | goto tokerror 1206 | } 1207 | 1208 | switch state { 1209 | 1210 | case fflib.FFParse_map_start: 1211 | if tok != fflib.FFTok_left_bracket { 1212 | wantedTok = fflib.FFTok_left_bracket 1213 | goto wrongtokenerror 1214 | } 1215 | state = fflib.FFParse_want_key 1216 | continue 1217 | 1218 | case fflib.FFParse_after_value: 1219 | if tok == fflib.FFTok_comma { 1220 | state = fflib.FFParse_want_key 1221 | } else if tok == fflib.FFTok_right_bracket { 1222 | goto done 1223 | } else { 1224 | wantedTok = fflib.FFTok_comma 1225 | goto wrongtokenerror 1226 | } 1227 | 1228 | case fflib.FFParse_want_key: 1229 | // json {} ended. goto exit. woo. 1230 | if tok == fflib.FFTok_right_bracket { 1231 | goto done 1232 | } 1233 | if tok != fflib.FFTok_string { 1234 | wantedTok = fflib.FFTok_string 1235 | goto wrongtokenerror 1236 | } 1237 | 1238 | kn := fs.Output.Bytes() 1239 | if len(kn) <= 0 { 1240 | // "" case. hrm. 1241 | currentKey = ffjtTypenosuchkey 1242 | state = fflib.FFParse_want_colon 1243 | goto mainparse 1244 | } else { 1245 | switch kn[0] { 1246 | 1247 | case 'e': 1248 | 1249 | if bytes.Equal(ffjKeyTypeEventVersionField, kn) { 1250 | currentKey = ffjtTypeEventVersionField 1251 | state = fflib.FFParse_want_colon 1252 | goto mainparse 1253 | 1254 | } else if bytes.Equal(ffjKeyTypeEventNameField, kn) { 1255 | currentKey = ffjtTypeEventNameField 1256 | state = fflib.FFParse_want_colon 1257 | goto mainparse 1258 | } 1259 | 1260 | } 1261 | 1262 | if fflib.AsciiEqualFold(ffjKeyTypeEventNameField, kn) { 1263 | currentKey = ffjtTypeEventNameField 1264 | state = fflib.FFParse_want_colon 1265 | goto mainparse 1266 | } 1267 | 1268 | if fflib.EqualFoldRight(ffjKeyTypeEventVersionField, kn) { 1269 | currentKey = ffjtTypeEventVersionField 1270 | state = fflib.FFParse_want_colon 1271 | goto mainparse 1272 | } 1273 | 1274 | currentKey = ffjtTypenosuchkey 1275 | state = fflib.FFParse_want_colon 1276 | goto mainparse 1277 | } 1278 | 1279 | case fflib.FFParse_want_colon: 1280 | if tok != fflib.FFTok_colon { 1281 | wantedTok = fflib.FFTok_colon 1282 | goto wrongtokenerror 1283 | } 1284 | state = fflib.FFParse_want_value 1285 | continue 1286 | case fflib.FFParse_want_value: 1287 | 1288 | if tok == fflib.FFTok_left_brace || tok == fflib.FFTok_left_bracket || tok == fflib.FFTok_integer || tok == fflib.FFTok_double || tok == fflib.FFTok_string || tok == fflib.FFTok_bool || tok == fflib.FFTok_null { 1289 | switch currentKey { 1290 | 1291 | case ffjtTypeEventVersionField: 1292 | goto handle_EventVersionField 1293 | 1294 | case ffjtTypeEventNameField: 1295 | goto handle_EventNameField 1296 | 1297 | case ffjtTypenosuchkey: 1298 | err = fs.SkipField(tok) 1299 | if err != nil { 1300 | return fs.WrapErr(err) 1301 | } 1302 | state = fflib.FFParse_after_value 1303 | goto mainparse 1304 | } 1305 | } else { 1306 | goto wantedvalue 1307 | } 1308 | } 1309 | } 1310 | 1311 | handle_EventVersionField: 1312 | 1313 | /* handler: j.EventVersionField type=string kind=string quoted=false*/ 1314 | 1315 | { 1316 | 1317 | { 1318 | if tok != fflib.FFTok_string && tok != fflib.FFTok_null { 1319 | return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for string", tok)) 1320 | } 1321 | } 1322 | 1323 | if tok == fflib.FFTok_null { 1324 | 1325 | } else { 1326 | 1327 | outBuf := fs.Output.Bytes() 1328 | 1329 | j.EventVersionField = string(string(outBuf)) 1330 | 1331 | } 1332 | } 1333 | 1334 | state = fflib.FFParse_after_value 1335 | goto mainparse 1336 | 1337 | handle_EventNameField: 1338 | 1339 | /* handler: j.EventNameField type=string kind=string quoted=false*/ 1340 | 1341 | { 1342 | 1343 | { 1344 | if tok != fflib.FFTok_string && tok != fflib.FFTok_null { 1345 | return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for string", tok)) 1346 | } 1347 | } 1348 | 1349 | if tok == fflib.FFTok_null { 1350 | 1351 | } else { 1352 | 1353 | outBuf := fs.Output.Bytes() 1354 | 1355 | j.EventNameField = string(string(outBuf)) 1356 | 1357 | } 1358 | } 1359 | 1360 | state = fflib.FFParse_after_value 1361 | goto mainparse 1362 | 1363 | wantedvalue: 1364 | return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok)) 1365 | wrongtokenerror: 1366 | return fs.WrapErr(fmt.Errorf("ffjson: wanted token: %v, but got token: %v output=%s", wantedTok, tok, fs.Output.String())) 1367 | tokerror: 1368 | if fs.BigError != nil { 1369 | return fs.WrapErr(fs.BigError) 1370 | } 1371 | err = fs.Error.ToError() 1372 | if err != nil { 1373 | return fs.WrapErr(err) 1374 | } 1375 | panic("ffjson-generated: unreachable, please report bug.") 1376 | done: 1377 | 1378 | return nil 1379 | } 1380 | --------------------------------------------------------------------------------