├── .gitignore ├── .mvn ├── README.md ├── extensions.xml ├── jvm.config ├── maven.config └── wrapper │ ├── maven-wrapper.jar │ └── maven-wrapper.properties ├── .travis.yml ├── .travis_settings.xml ├── Dockerfile ├── LICENSE ├── Procfile ├── README.md ├── app.json ├── app.yml ├── docker-compose.yml ├── entrypoint.sh ├── jvm.config ├── log.properties ├── mapper ├── rakam-mapper-geoip-ip2location │ ├── pom.xml │ └── src │ │ └── main │ │ └── java │ │ └── org │ │ └── rakam │ │ └── collection │ │ └── mapper │ │ └── geoip │ │ └── maxmind │ │ └── ip2location │ │ ├── CSV.java │ │ ├── Coordination.java │ │ ├── GeoIPModuleConfig.java │ │ ├── GeoLocation.java │ │ ├── HttpDownloadHelper.java │ │ ├── IP2LocationGeoIPEventMapper.java │ │ ├── IP2LocationGeoIPModule.java │ │ ├── IPReader.java │ │ └── utils │ │ └── IP4Converter.java ├── rakam-mapper-geoip-maxmind │ ├── pom.xml │ └── src │ │ ├── main │ │ ├── java │ │ │ └── org │ │ │ │ └── rakam │ │ │ │ └── collection │ │ │ │ └── mapper │ │ │ │ └── geoip │ │ │ │ └── maxmind │ │ │ │ ├── HttpDownloadHelper.java │ │ │ │ ├── MaxmindGeoIPEventMapper.java │ │ │ │ ├── MaxmindGeoIPModule.java │ │ │ │ └── MaxmindGeoIPModuleConfig.java │ │ └── resources │ │ │ └── META-INF │ │ │ └── services │ │ │ └── org.rakam.plugin.RakamModule │ │ └── test │ │ └── java │ │ └── org │ │ └── rakam │ │ └── collection │ │ └── mapper │ │ └── geoip │ │ └── maxmind │ │ └── TestGeoIPEventMapper.java └── rakam-mapper-website │ ├── pom.xml │ └── src │ ├── main │ ├── java │ │ └── org │ │ │ └── rakam │ │ │ └── module │ │ │ └── website │ │ │ ├── ReferrerEventMapper.java │ │ │ ├── UserAgentEventMapper.java │ │ │ ├── UserIdEventMapper.java │ │ │ ├── WebsiteEventMapperModule.java │ │ │ └── WebsiteMapperConfig.java │ └── resources │ │ ├── META-INF │ │ └── services │ │ │ └── org.rakam.plugin.RakamModule │ │ └── ua_parser │ │ └── regexes.yaml │ └── test │ └── java │ └── org │ └── rakam │ └── module │ └── website │ ├── TestReferrerEventMapper.java │ └── TestUserAgentEventMapper.java ├── mvnw ├── pom.xml ├── rakam-aws ├── pom.xml └── src │ ├── main │ └── java │ │ └── org │ │ ├── apache │ │ └── avro │ │ │ └── generic │ │ │ └── FilteredRecordWriter.java │ │ └── rakam │ │ └── aws │ │ ├── AWSConfig.java │ │ ├── kinesis │ │ ├── AWSKinesisEventStore.java │ │ ├── AWSKinesisModule.java │ │ ├── ForStreamer.java │ │ ├── KinesisUtils.java │ │ └── StreamQuery.java │ │ └── s3 │ │ └── S3BulkEventStore.java │ └── test │ └── java │ └── org │ └── rakam │ └── aws │ └── DynamodbUtil.java ├── rakam-postgresql ├── pom.xml └── src │ ├── main │ └── java │ │ └── org │ │ └── rakam │ │ └── postgresql │ │ ├── PostgresqlApiKeyService.java │ │ ├── PostgresqlConfigManager.java │ │ ├── PostgresqlModule.java │ │ ├── analysis │ │ ├── JDBCApiKeyService.java │ │ ├── PostgresqlConfig.java │ │ ├── PostgresqlEventStore.java │ │ └── PostgresqlMetastore.java │ │ └── plugin │ │ └── user │ │ ├── PostgresqlUserModule.java │ │ ├── PostgresqlUserService.java │ │ └── PostgresqlUserStorage.java │ └── test │ └── java │ └── org │ └── rakam │ ├── pg10 │ ├── TestingEnvironmentPg10.java │ ├── analysis │ │ └── TestPostgresqlApiKeyService.java │ └── collection │ │ └── TestPostgresqlUserStorage.java │ └── pg9 │ ├── TestingEnvironmentPg9.java │ ├── analysis │ └── TestPostgresqlApiKeyService.java │ └── collection │ └── TestPostgresqlUserStorage.java ├── rakam-presto-kafka ├── pom.xml └── src │ └── main │ └── java │ └── org │ ├── apache │ └── avro │ │ └── generic │ │ └── SourceFilteredRecordWriter.java │ └── rakam │ └── kafka │ └── collection │ ├── KafkaCollectorModule.java │ ├── KafkaConfig.java │ ├── KafkaEventStore.java │ ├── KafkaOffsetManager.java │ └── KafkaSimpleConsumerManager.java ├── rakam-presto ├── pom.xml └── src │ ├── main │ └── java │ │ └── org │ │ └── rakam │ │ └── presto │ │ ├── MetadataModule.java │ │ ├── PrestoType.java │ │ └── analysis │ │ ├── MetadataDao.java │ │ ├── MysqlConfigManager.java │ │ ├── MysqlExplicitMetastore.java │ │ └── SchemaDao.java │ └── test │ └── resources │ ├── keystore.jks │ └── s3proxy.conf ├── rakam-spi ├── pom.xml └── src │ ├── main │ └── java │ │ └── org │ │ └── rakam │ │ ├── Access.java │ │ ├── Mapper.java │ │ ├── TestingConfigManager.java │ │ ├── analysis │ │ ├── ApiKeyService.java │ │ ├── ConfigManager.java │ │ ├── CustomParameter.java │ │ ├── EscapeIdentifier.java │ │ ├── InMemoryApiKeyService.java │ │ ├── InMemoryEventStore.java │ │ ├── InMemoryMetastore.java │ │ ├── InternalConfig.java │ │ ├── JDBCPoolDataSource.java │ │ ├── RequestContext.java │ │ ├── RequestPreProcessorItem.java │ │ └── metadata │ │ │ ├── AbstractMetastore.java │ │ │ ├── Metastore.java │ │ │ └── SchemaChecker.java │ │ ├── collection │ │ ├── Event.java │ │ ├── EventList.java │ │ ├── FieldDependencyBuilder.java │ │ ├── FieldType.java │ │ └── SchemaField.java │ │ ├── config │ │ ├── EncryptionConfig.java │ │ ├── JDBCConfig.java │ │ ├── MetadataConfig.java │ │ ├── ProjectConfig.java │ │ └── TaskConfig.java │ │ ├── plugin │ │ ├── DummyEventStore.java │ │ ├── EventMapper.java │ │ ├── EventStore.java │ │ ├── InjectionHook.java │ │ ├── Parameter.java │ │ ├── ProjectItem.java │ │ ├── RakamModule.java │ │ ├── SyncEventMapper.java │ │ ├── SyncEventStore.java │ │ ├── SystemEvents.java │ │ ├── TimestampEventMapper.java │ │ ├── stream │ │ │ ├── CollectionStreamQuery.java │ │ │ ├── EventStream.java │ │ │ ├── EventStreamConfig.java │ │ │ └── StreamResponse.java │ │ └── user │ │ │ ├── AbstractUserService.java │ │ │ ├── ISingleUserBatchOperation.java │ │ │ ├── User.java │ │ │ ├── UserPluginConfig.java │ │ │ ├── UserPropertyMapper.java │ │ │ └── UserStorage.java │ │ ├── report │ │ ├── QueryError.java │ │ └── QueryResult.java │ │ ├── server │ │ └── http │ │ │ └── RakamServletWrapper.java │ │ └── util │ │ ├── AllowCookie.java │ │ ├── AlreadyExistsException.java │ │ ├── AvroUtil.java │ │ ├── ConditionalModule.java │ │ ├── CryptUtil.java │ │ ├── DateTimeUtils.java │ │ ├── JDBCUtil.java │ │ ├── JsonHelper.java │ │ ├── LogUtil.java │ │ ├── MailSender.java │ │ ├── MapProxyGenericRecord.java │ │ ├── NotExistsException.java │ │ ├── NotFoundHandler.java │ │ ├── ProjectCollection.java │ │ ├── RAsyncHttpClient.java │ │ ├── RakamClient.java │ │ ├── RakamException.java │ │ ├── StandardErrors.java │ │ ├── SuccessMessage.java │ │ └── ValidationUtil.java │ └── test │ └── java │ └── org │ └── rakam │ ├── EventBuilder.java │ ├── analysis │ ├── TestApiKeyService.java │ └── TestConfigManager.java │ └── collection │ ├── TestMetastore.java │ └── TestUserStorage.java ├── rakam ├── bin │ ├── launcher │ ├── launcher.properties │ ├── launcher.py │ └── procname │ │ └── Linux-x86_64 │ │ └── libprocname.so ├── pom.xml └── src │ ├── main │ ├── java │ │ └── org │ │ │ └── rakam │ │ │ ├── LogModule.java │ │ │ ├── ServiceStarter.java │ │ │ ├── SystemRegistryGenerator.java │ │ │ ├── analysis │ │ │ ├── AdminHttpService.java │ │ │ ├── ProjectHttpService.java │ │ │ ├── SchemaConverter.java │ │ │ └── webhook │ │ │ │ ├── WebhookConfig.java │ │ │ │ ├── WebhookEventMapper.java │ │ │ │ └── WebhookModule.java │ │ │ ├── bootstrap │ │ │ ├── ProxyBootstrap.java │ │ │ └── SystemRegistry.java │ │ │ ├── collection │ │ │ ├── AvroEventDeserializer.java │ │ │ ├── CsvEventDeserializer.java │ │ │ ├── EventCollectionHttpService.java │ │ │ ├── EventListDeserializer.java │ │ │ ├── HeaderDefaultFullHttpResponse.java │ │ │ ├── JsonEventDeserializer.java │ │ │ └── WebHookHttpService.java │ │ │ ├── http │ │ │ ├── ForHttpServer.java │ │ │ ├── HttpServerConfig.java │ │ │ ├── OptionMethodHttpService.java │ │ │ └── WebServiceModule.java │ │ │ ├── plugin │ │ │ ├── CustomEventMapperHttpService.java │ │ │ ├── JSEventMapperModule.java │ │ │ ├── PluginClassLoader.java │ │ │ ├── PluginConfig.java │ │ │ ├── WebhookModule.java │ │ │ └── user │ │ │ │ ├── UserActionService.java │ │ │ │ ├── UserHttpService.java │ │ │ │ └── UserModule.java │ │ │ └── util │ │ │ ├── ExportUtil.java │ │ │ ├── RakamHealthCheckModule.java │ │ │ ├── StringTemplate.java │ │ │ └── javascript │ │ │ ├── ILogger.java │ │ │ ├── JSCodeCompiler.java │ │ │ ├── JSCodeJDBCLoggerService.java │ │ │ ├── JSConfigManager.java │ │ │ ├── JSLoggerService.java │ │ │ ├── JavascriptConfig.java │ │ │ ├── JavascriptModule.java │ │ │ └── MonitorThread.java │ └── resources │ │ ├── assembly.xml │ │ └── log4j2.properties │ └── test │ └── java │ ├── Test.java │ ├── TestCSVParser.java │ ├── TestEventJsonParser.java │ ├── TestJSCodeCompiler.java │ └── bloom │ ├── OoaBFilter.java │ └── TestFilter.java └── system.properties /.gitignore: -------------------------------------------------------------------------------- 1 | rakam-presto/.test/ 2 | .test/ 3 | plugins/* 4 | _ui/* 5 | data/* 6 | 7 | .idea/ 8 | *.iml 9 | *.ipr 10 | *.iws 11 | .classpath 12 | .project 13 | .settings/ 14 | lib/* 15 | 16 | .gradle/ 17 | build/ 18 | target/ 19 | 20 | mods/ 21 | ======= 22 | *.class 23 | *.log 24 | 25 | # sbt specific 26 | dist/* 27 | lib_managed/ 28 | src_managed/ 29 | project/boot/ 30 | project/plugins/project/ 31 | 32 | # Scala-IDE specific 33 | .scala_dependencies 34 | 35 | git.properties 36 | .DS_Store 37 | dependency-reduced-pom.xml 38 | log.properties 39 | -------------------------------------------------------------------------------- /.mvn/README.md: -------------------------------------------------------------------------------- 1 | # Taken from facebook/presto 2 | 3 | ## Parallel Build with Smart Builder 4 | 5 | Add this line if you want to use the Smart Builder with 8 threads: 6 | 7 | `-T 8 -b smart` 8 | 9 | Enabling this by default causes issues with tests as it usually consumes more resources than a normal laptop has, and it also causes issues with the maven-release-plugin. 10 | 11 | If you are working in a mode where you are not running the tests than turning on this option may be more convenient. 12 | 13 | Note that you can still use this option from the command line and it will override the default single threaded builder listed the `.mvn/maven.config`. 14 | 15 | 16 | -------------------------------------------------------------------------------- /.mvn/extensions.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | io.takari.maven 5 | takari-smart-builder 6 | 0.4.0 7 | 8 | 9 | io.takari.aether 10 | takari-concurrent-localrepo 11 | 0.0.7 12 | 13 | 14 | -------------------------------------------------------------------------------- /.mvn/jvm.config: -------------------------------------------------------------------------------- 1 | -Xmx8192m 2 | -------------------------------------------------------------------------------- /.mvn/maven.config: -------------------------------------------------------------------------------- 1 | -b multithreaded 2 | 3 | -------------------------------------------------------------------------------- /.mvn/wrapper/maven-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rakam-io/rakam-api/ef75341e5337a2c78b4ae2b30cd78cd01ac1a362/.mvn/wrapper/maven-wrapper.jar -------------------------------------------------------------------------------- /.mvn/wrapper/maven-wrapper.properties: -------------------------------------------------------------------------------- 1 | distributionUrl=https://repo1.maven.org/maven2/org/apache/maven/apache-maven/3.3.3/apache-maven-3.3.3-bin.zip -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | sudo: required 2 | dist: trusty 3 | language: java 4 | jdk: 5 | - oraclejdk8 6 | services: 7 | - docker 8 | before_install: 9 | - cp .travis_settings.xml ~/.m2/settings.xml 10 | install: ./mvnw install -DskipTests --quiet 11 | script: ./mvnw test 12 | addons: 13 | apt: 14 | packages: 15 | - oracle-java8-installer 16 | notifications: 17 | webhooks: 18 | urls: 19 | - https://webhooks.gitter.im/e/5bfce3ef6a29f96a2ded 20 | on_success: change # options: [always|never|change] default: always 21 | on_failure: always # options: [always|never|change] default: always 22 | on_start: false # default: false 23 | after_success: 24 | - > 25 | [ "$TRAVIS_BRANCH" == "master" ] 26 | && curl -X POST -H "Content-Type: application/json" 27 | --data '{"docker_tag_name": ${TRAVIS_TAG:=latest}}' 28 | https://registry.hub.docker.com/u/buremba/rakam/trigger/$DOCKER_HUB_TOKEN/ -------------------------------------------------------------------------------- /.travis_settings.xml: -------------------------------------------------------------------------------- 1 | 2 | 4 | 5 | 6 | 7 | 8 | 9 | false 10 | 11 | bintray-buremba-maven 12 | bintray 13 | http://dl.bintray.com/buremba/maven 14 | 15 | 16 | 17 | 18 | 19 | false 20 | 21 | bintray-buremba-maven 22 | bintray 23 | http://dl.bintray.com/buremba/maven 24 | 25 | 26 | bintray 27 | 28 | 29 | 30 | bintray 31 | 32 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM maven:3-jdk-8 as build 2 | MAINTAINER Burak Emre Kabakci "emre@rakam.io" 3 | 4 | WORKDIR /var/app 5 | 6 | ADD pom.xml pom.xml 7 | ADD ./rakam/pom.xml rakam/pom.xml 8 | ADD ./rakam-aws/pom.xml rakam-aws/pom.xml 9 | ADD ./rakam-postgresql/pom.xml rakam-postgresql/pom.xml 10 | ADD ./rakam-presto/pom.xml rakam-presto/pom.xml 11 | ADD ./rakam-presto-kafka/pom.xml rakam-presto-kafka/pom.xml 12 | ADD ./rakam-spi/pom.xml rakam-spi/pom.xml 13 | ADD ./mapper/rakam-mapper-geoip-ip2location/pom.xml mapper/rakam-mapper-geoip-ip2location/pom.xml 14 | ADD ./mapper/rakam-mapper-geoip-maxmind/pom.xml mapper/rakam-mapper-geoip-maxmind/pom.xml 15 | ADD ./mapper/rakam-mapper-website/pom.xml mapper/rakam-mapper-website/pom.xml 16 | RUN mvn verify clean --fail-never 17 | 18 | ADD ./rakam/ rakam 19 | ADD ./rakam-aws/ rakam-aws 20 | ADD ./rakam-postgresql/ rakam-postgresql 21 | ADD ./rakam-presto/ rakam-presto 22 | ADD ./rakam-presto-kafka/ rakam-presto-kafka 23 | ADD ./rakam-spi/ rakam-spi 24 | ADD ./mapper/rakam-mapper-geoip-ip2location/ mapper/rakam-mapper-geoip-ip2location 25 | ADD ./mapper/rakam-mapper-geoip-maxmind/ mapper/rakam-mapper-geoip-maxmind 26 | ADD ./mapper/rakam-mapper-website/ mapper/rakam-mapper-website 27 | RUN mvn package -T 1C -DskipTests=true 28 | 29 | RUN apt-get update 30 | 31 | # Make environment variable active 32 | RUN cd /var/app/rakam/target/rakam-*-bundle/rakam-*/ && \ 33 | mkdir etc && \ 34 | echo '\n-Denv=RAKAM_CONFIG' >> ./etc/jvm.config 35 | 36 | FROM openjdk:8-jre 37 | COPY --from=build /var/app/rakam/target/ /rtmp 38 | ADD ./entrypoint.sh /app/entrypoint.sh 39 | 40 | RUN cp -r /rtmp/rakam-*-bundle/rakam-*/* /app/ && \ 41 | chmod +x /app/entrypoint.sh && \ 42 | rm -rf /rtmp 43 | 44 | ENTRYPOINT ["/app/entrypoint.sh"] 45 | EXPOSE 9999 46 | -------------------------------------------------------------------------------- /Procfile: -------------------------------------------------------------------------------- 1 | web: java $JAVA_OPTS -cp $(echo rakam/target/rakam-*-bundle/rakam-*/lib)/*: -Dstore.adapter=postgresql -Dstore.adapter.postgresql.url="${JDBC_DATABASE_URL}&ssl=true&sslfactory=org.postgresql.ssl.NonValidatingFactory" -Dstore.adapter.postgresql.username=${JDBC_DATABASE_USERNAME} -Dstore.adapter.postgresql.password=${JDBC_DATABASE_PASSWORD} -Dplugin.user.enabled=${ENABLE_USER_PLUGIN} -Devent-stream=server -Devent-explorer.enabled=${ENABLE_EVENT_EXPLORER_PLUGIN} -Duser.funnel-analysis.enabled=${ENABLE_FUNNEL_PLUGIN} -Duser.retention-analysis.enabled=${ENABLE_RETENTION_ANALYSIS_PLUGIN} -Dhttp.server.address=0.0.0.0:${PORT} -Dplugin.geoip.enabled=${ENABLE_GEOIP_PLUGIN} -Dstore.adapter=postgresql -Dplugin.user.storage=postgresql -Dmodule.website.mapper=true -Dmodule.website.mapper.user-agent=true -Dmodule.website.mapper.referrer=true -Dplugin.user.storage.identifier-column=id -Dstore.adapter.postgresql.max-connection=8 -Dplugin.geoip.database.url=${GEOIP_DATABASE_URL} -Dplugin.geoip.connection-type-database.url=${GEOIP_CONNECTION_TYPE_URL} -Dui.enable=false -Dautomation.enabled=false -Dmail.smtp.host=127.0.0.1 -Dmail.smtp.user=test -Dplugin.user.actions=email -Dlock-key=${LOCK_KEY} -Dplugin.user.enable_user_mapping=true -Dcustom-data-source.enabled=${ENABLE_CUSTOM_DATA_SOURCES} -Dallow-project-deletion=${ALLOW_PROJECT_DELETION} -Dplugin.user.enable-user-mapping=true -Dlog-identifier=${HEROKU_APP_NAME} -Denv=RAKAM_CONFIG org.rakam.ServiceStarter -------------------------------------------------------------------------------- /app.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "Rakam", 3 | "description": "Your analytics service", 4 | "website": "http://rakam.io", 5 | "repository": "https://github.com/rakam-io/rakam", 6 | "logo": "https://avatars3.githubusercontent.com/u/18196166?v=3&s=200", 7 | "success_url": "/", 8 | "keywords": [ 9 | "analytics", 10 | "chart", 11 | "sql", 12 | "statistics", 13 | "graph", 14 | "realtime", 15 | "crm" 16 | ], 17 | "addons": [ 18 | "heroku-postgresql:hobby-dev", 19 | "papertrail:choklad" 20 | ], 21 | "env": { 22 | "LOCK_KEY": { 23 | "description": "Lock key for creating projects. You will need this key when you create a project in your Rakam cluster", 24 | "required": true 25 | }, 26 | "ENABLE_GEOIP_PLUGIN": { 27 | "description": "GeoIP attaches location information to collections that has 'ip' attribute.", 28 | "value": "true" 29 | }, 30 | "GEOIP_DATABASE_URL": { 31 | "description": "Maxmind GeoIP database url. Community version of Maxmind GeoIP is used by default but you can also provide your database URL", 32 | "value": "http://geolite.maxmind.com/download/geoip/database/GeoLite2-City.mmdb.gz" 33 | }, 34 | "GEOIP_CONNECTION_TYPE_URL": { 35 | "description": "Maxmind GeoIP connection type url. Community version of Maxmind GeoIP is used by default but you can also provide your database URL", 36 | "value": "https://github.com/maxmind/MaxMind-DB/blob/master/test-data/GeoIP2-Connection-Type-Test.mmdb?raw=true" 37 | }, 38 | "ENABLE_CUSTOM_DATA_SOURCES": { 39 | "description": "Access data from your database", 40 | "value": "true" 41 | }, 42 | "ALLOW_PROJECT_DELETION": { 43 | "description": "Allow deleting project", 44 | "value": "false" 45 | }, 46 | "ENABLE_EVENT_EXPLORER_PLUGIN": { 47 | "description": "Event explorer plugin subscribes event streams and notifies new events to you in real-time.", 48 | "value": "true" 49 | }, 50 | "ENABLE_FUNNEL_PLUGIN": { 51 | "description": "Funnel plugin depends on user attribute of events and allows you to run funnel queries.", 52 | "value": "true" 53 | }, 54 | "ENABLE_USER_PLUGIN": { 55 | "description": "Enable user attribution module", 56 | "value": "true" 57 | }, 58 | "ENABLE_RETENTION_ANALYSIS_PLUGIN": { 59 | "description": "Retention analysis plugin depends on user attribute of events and allows you to run retention queries.", 60 | "value": "true" 61 | } 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /app.yml: -------------------------------------------------------------------------------- 1 | name: rakam 2 | image: ubuntu-14-04-x64 3 | min_size: 1gb 4 | config: 5 | runcmd: 6 | - wget -qO- https://get.docker.com/ | sh 7 | - docker run -d --name rakam-db -e POSTGRES_PASSWORD=dummy -e POSTGRES_USER=rakam postgres:9.6.1 && docker run --link rakam-db --name rakam -d -p 80:9999 -e RAKAM_CONFIG_LOCK__KEY=mylockKey buremba/rakam 8 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '2' 2 | services: 3 | rakam-db: 4 | image: postgres:11.4 5 | environment: 6 | - POSTGRES_PASSWORD=dummy 7 | - POSTGRES_USER=rakam 8 | rakam-api: 9 | build: . 10 | ports: 11 | - "9999:9999" 12 | depends_on: 13 | - rakam-db -------------------------------------------------------------------------------- /entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | echo "$1" 4 | if [[ $1 == "custom" ]]; then 5 | java $JAVA_OPTS -cp /app/lib/*: -Denv=RAKAM_CONFIG -Dplugin.geoip.enabled=true -Dplugin.geoip.database.url=file://tmp/GeoLite2-City.mmdb org.rakam.ServiceStarter 6 | else 7 | java $JAVA_OPTS -cp /app/lib/*: -Denv=RAKAM_CONFIG -Dstore.adapter=postgresql -Dplugin.user.enabled=true -Devent-explorer.enabled=true -Dcustom-data-source.enabled=true -Duser.funnel-analysis.enabled=true -Dplugin.user.enable-user-mapping=true -Duser.retention-analysis.enabled=true -Dplugin.geoip.enabled=true -Dplugin.user.storage=postgresql -Dhttp.server.address=0.0.0.0:9999 -Dplugin.user.storage.identifier-column=id -Dplugin.geoip.database.url=file://tmp/GeoLite2-City.mmdb org.rakam.ServiceStarter 8 | fi 9 | -------------------------------------------------------------------------------- /jvm.config: -------------------------------------------------------------------------------- 1 | -server 2 | -Xmx2G 3 | -XX:+HeapDumpOnOutOfMemoryError 4 | -XX:OnOutOfMemoryError=kill -9 %p -------------------------------------------------------------------------------- /log.properties: -------------------------------------------------------------------------------- 1 | org.rakam=DEBUG 2 | io.netty=DEBUG -------------------------------------------------------------------------------- /mapper/rakam-mapper-geoip-ip2location/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | rakam-root 5 | org.rakam 6 | 0.14-SNAPSHOT 7 | ../../pom.xml 8 | 9 | 4.0.0 10 | 11 | rakam-mapper-geoip-ip2location 12 | 13 | 14 | 15 | com.google.inject 16 | guice 17 | 18 | 19 | com.google.auto.service 20 | auto-service 21 | true 22 | 23 | 24 | org.rakam 25 | rakam-spi 26 | ${project.version} 27 | provided 28 | 29 | 30 | com.fasterxml.jackson.core 31 | jackson-databind 32 | 33 | 34 | org.apache.avro 35 | avro 36 | 37 | 38 | io.airlift 39 | configuration 40 | 41 | 42 | com.google.guava 43 | guava 44 | 45 | 46 | io.netty 47 | netty-all 48 | 49 | 50 | io.airlift 51 | log 52 | 53 | 54 | com.google.inject.extensions 55 | guice-multibindings 56 | 57 | 58 | org.testng 59 | testng 60 | test 61 | 62 | 63 | 64 | -------------------------------------------------------------------------------- /mapper/rakam-mapper-geoip-ip2location/src/main/java/org/rakam/collection/mapper/geoip/maxmind/ip2location/CSV.java: -------------------------------------------------------------------------------- 1 | package org.rakam.collection.mapper.geoip.maxmind.ip2location; 2 | 3 | import java.util.regex.Matcher; 4 | import java.util.regex.Pattern; 5 | 6 | public class CSV { 7 | private static final java.lang.String CSV_PATTERN = "\"([0-9]+)\",\"([0-9]+)\",\"([^\"]+)\",\"([^\"]+)\",\"([^\"]+)\",\"([^\"]+)\",\"([0-9.-]+)\",\"([0-9.-]+)\""; 8 | 9 | public final long ipStart; 10 | public final long ipEnd; 11 | public final String country; 12 | public final String stateProv; 13 | public final String city; 14 | public final double latitude; 15 | public final double longitude; 16 | 17 | private CSV(long ipStart, long ipEnd, 18 | String country, String stateProv, String city, 19 | double latitude, double longitude) { 20 | this.ipStart = ipStart; 21 | this.ipEnd = ipEnd; 22 | this.country = country; 23 | this.stateProv = stateProv; 24 | this.city = city; 25 | this.latitude = latitude; 26 | this.longitude = longitude; 27 | } 28 | 29 | public static CSV parse(String csv) { 30 | Pattern r = Pattern.compile(CSV_PATTERN); 31 | Matcher m = r.matcher(csv); 32 | 33 | if (m.find()) { 34 | return new CSV( 35 | Long.parseLong(m.group(1)), 36 | Long.parseLong(m.group(2)), 37 | m.group(4), 38 | m.group(5), 39 | m.group(6), 40 | Double.parseDouble(m.group(7)), 41 | Double.parseDouble(m.group(8)) 42 | ); 43 | } 44 | 45 | throw new IllegalArgumentException(String.format("CSV doesn't match the pattern. (%s)", csv)); 46 | } 47 | } -------------------------------------------------------------------------------- /mapper/rakam-mapper-geoip-ip2location/src/main/java/org/rakam/collection/mapper/geoip/maxmind/ip2location/Coordination.java: -------------------------------------------------------------------------------- 1 | package org.rakam.collection.mapper.geoip.maxmind.ip2location; 2 | 3 | public class Coordination { 4 | public final double latitude; 5 | public final double longitude; 6 | 7 | private Coordination(double lat, double lon) { 8 | this.latitude = lat; 9 | this.longitude = lon; 10 | } 11 | 12 | public static Coordination of(double lat, double lon) { 13 | return new Coordination(lat, lon); 14 | } 15 | 16 | @Override 17 | public String toString() { 18 | return "Coordination{" + 19 | "latitude=" + latitude + 20 | ", longitude=" + longitude + 21 | '}'; 22 | } 23 | 24 | @Override 25 | public boolean equals(Object o) { 26 | if (this == o) { 27 | return true; 28 | } 29 | if (o == null || getClass() != o.getClass()) { 30 | return false; 31 | } 32 | 33 | Coordination that = (Coordination) o; 34 | 35 | if (Double.compare(that.latitude, latitude) != 0) { 36 | return false; 37 | } 38 | return Double.compare(that.longitude, longitude) == 0; 39 | } 40 | 41 | @Override 42 | public int hashCode() { 43 | int result; 44 | long temp; 45 | temp = Double.doubleToLongBits(latitude); 46 | result = (int) (temp ^ (temp >>> 32)); 47 | temp = Double.doubleToLongBits(longitude); 48 | result = 31 * result + (int) (temp ^ (temp >>> 32)); 49 | return result; 50 | } 51 | } -------------------------------------------------------------------------------- /mapper/rakam-mapper-geoip-ip2location/src/main/java/org/rakam/collection/mapper/geoip/maxmind/ip2location/GeoIPModuleConfig.java: -------------------------------------------------------------------------------- 1 | package org.rakam.collection.mapper.geoip.maxmind.ip2location; 2 | 3 | import io.airlift.configuration.Config; 4 | 5 | import java.util.List; 6 | 7 | public class GeoIPModuleConfig { 8 | private List attributes; 9 | private String databaseUrl = null; 10 | 11 | public String getDatabaseUrl() { 12 | return databaseUrl; 13 | } 14 | 15 | @Config("plugin.geoip.database.url") 16 | public GeoIPModuleConfig setDatabaseUrl(String url) { 17 | this.databaseUrl = url; 18 | return this; 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /mapper/rakam-mapper-geoip-ip2location/src/main/java/org/rakam/collection/mapper/geoip/maxmind/ip2location/GeoLocation.java: -------------------------------------------------------------------------------- 1 | package org.rakam.collection.mapper.geoip.maxmind.ip2location; 2 | 3 | import java.util.Map; 4 | import java.util.concurrent.ConcurrentHashMap; 5 | 6 | public class GeoLocation { 7 | private static final Map GEO_LOCATION_MAP = new ConcurrentHashMap<>(); 8 | 9 | public final String country; 10 | public final String stateProv; 11 | public final String city; 12 | public final Coordination coordination; 13 | 14 | private GeoLocation(String country, String stateProv, String city, Coordination coordination) { 15 | this.country = country; 16 | this.stateProv = stateProv; 17 | this.city = city; 18 | this.coordination = coordination; 19 | } 20 | 21 | public static GeoLocation of(String country, String stateProv, String city, Coordination coordination) { 22 | if (GEO_LOCATION_MAP.containsKey(coordination)) { 23 | return GEO_LOCATION_MAP.get(coordination); 24 | } 25 | GeoLocation newLocation = new GeoLocation(country, stateProv, city, coordination); 26 | GEO_LOCATION_MAP.put(coordination, newLocation); 27 | 28 | return newLocation; 29 | } 30 | } -------------------------------------------------------------------------------- /mapper/rakam-mapper-geoip-ip2location/src/main/java/org/rakam/collection/mapper/geoip/maxmind/ip2location/IPReader.java: -------------------------------------------------------------------------------- 1 | package org.rakam.collection.mapper.geoip.maxmind.ip2location; 2 | 3 | import org.rakam.collection.mapper.geoip.maxmind.ip2location.utils.IP4Converter; 4 | 5 | import java.io.*; 6 | import java.math.BigInteger; 7 | import java.net.InetAddress; 8 | import java.net.UnknownHostException; 9 | import java.util.Map; 10 | import java.util.NavigableMap; 11 | import java.util.concurrent.ConcurrentSkipListMap; 12 | 13 | public class IPReader { 14 | private final NavigableMap ipLookup; 15 | 16 | private IPReader(NavigableMap ipLookup) { 17 | this.ipLookup = ipLookup; 18 | } 19 | 20 | public static IPReader build(String dbPath) 21 | throws IOException { 22 | File ipdb = new File(dbPath); 23 | InputStream inputStream = new FileInputStream(ipdb); 24 | return build(inputStream); 25 | } 26 | 27 | public static IPReader build(InputStream inputStream) 28 | throws IOException { 29 | NavigableMap lookup = new ConcurrentSkipListMap<>(); 30 | 31 | BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream)); 32 | while (reader.ready()) { 33 | CSV csv = CSV.parse(reader.readLine()); 34 | lookup.put(csv.ipStart, csv); 35 | } 36 | reader.close(); 37 | 38 | return new IPReader(lookup); 39 | } 40 | 41 | public GeoLocation lookup(String ipAddress) 42 | throws UnknownHostException { 43 | InetAddress inetAddress = InetAddress.getByName(ipAddress); 44 | return lookup(inetAddress); 45 | } 46 | 47 | public GeoLocation lookup(InetAddress inetAddress) { 48 | return lookup(IP4Converter.toLong(inetAddress.getAddress())); 49 | } 50 | 51 | private GeoLocation lookup(Long address) { 52 | Map.Entry entry = ipLookup.lowerEntry(address); 53 | if (entry == null) { 54 | return null; 55 | } 56 | 57 | CSV csv = entry.getValue(); 58 | if (csv.ipEnd < address) { 59 | return null; 60 | } 61 | 62 | return GeoLocation.of(csv.country, csv.stateProv, csv.city, Coordination.of(csv.latitude, csv.longitude)); 63 | } 64 | 65 | private Long toLong(byte[] address) { 66 | return new BigInteger(address).longValue(); 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /mapper/rakam-mapper-geoip-ip2location/src/main/java/org/rakam/collection/mapper/geoip/maxmind/ip2location/utils/IP4Converter.java: -------------------------------------------------------------------------------- 1 | package org.rakam.collection.mapper.geoip.maxmind.ip2location.utils; 2 | 3 | import java.math.BigInteger; 4 | import java.net.InetAddress; 5 | import java.net.UnknownHostException; 6 | 7 | public class IP4Converter { 8 | public static String toIP(Long longValue) { 9 | return toIP(BigInteger.valueOf(longValue).toByteArray()); 10 | } 11 | 12 | public static String toIP(byte[] address) { 13 | int startIdx = (address.length > 4 ? 1 : 0); 14 | return String.format("%d.%d.%d.%d", address[startIdx] & 0xFF, address[startIdx + 1] & 0xFF, address[startIdx + 2] & 0xFF, address[startIdx + 3] & 0xFF); 15 | } 16 | 17 | public static long toLong(String ip) 18 | throws UnknownHostException { 19 | return toLong(InetAddress.getByName(ip).getAddress()); 20 | } 21 | 22 | public static long toLong(byte[] address) { 23 | long longValue = 0; 24 | int startIdx = (address.length > 4 ? 1 : 0); 25 | for (int i = startIdx; i < address.length; i++) { 26 | longValue <<= 8; 27 | longValue += (long) (address[i] & 0xFF); 28 | } 29 | 30 | return longValue; 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /mapper/rakam-mapper-geoip-maxmind/src/main/java/org/rakam/collection/mapper/geoip/maxmind/MaxmindGeoIPModuleConfig.java: -------------------------------------------------------------------------------- 1 | package org.rakam.collection.mapper.geoip.maxmind; 2 | 3 | import com.google.common.base.Splitter; 4 | import com.google.common.base.Throwables; 5 | import com.google.common.collect.ImmutableList; 6 | import io.airlift.configuration.Config; 7 | import io.airlift.configuration.ConfigDescription; 8 | 9 | import javax.validation.constraints.NotNull; 10 | import java.net.MalformedURLException; 11 | import java.net.URL; 12 | import java.util.List; 13 | 14 | public class MaxmindGeoIPModuleConfig { 15 | 16 | private List attributes; 17 | private URL databaseUrl = null; 18 | private URL ispDatabaseUrl; 19 | private URL connectionTypeDatabaseUrl; 20 | private boolean useExistingFields; 21 | 22 | public URL getIspDatabaseUrl() { 23 | return ispDatabaseUrl; 24 | } 25 | 26 | @Config("plugin.geoip.isp-database.url") 27 | public MaxmindGeoIPModuleConfig setIspDatabaseUrl(URL url) { 28 | this.ispDatabaseUrl = url; 29 | return this; 30 | } 31 | 32 | public URL getConnectionTypeDatabaseUrl() { 33 | return connectionTypeDatabaseUrl; 34 | } 35 | 36 | @Config("plugin.geoip.connection-type-database.url") 37 | public MaxmindGeoIPModuleConfig setConnectionTypeDatabaseUrl(URL url) { 38 | this.connectionTypeDatabaseUrl = url; 39 | return this; 40 | } 41 | 42 | @NotNull 43 | public URL getDatabaseUrl() { 44 | return databaseUrl; 45 | } 46 | 47 | @Config("plugin.geoip.database.url") 48 | public MaxmindGeoIPModuleConfig setDatabaseUrl(URL url) { 49 | this.databaseUrl = url; 50 | return this; 51 | } 52 | 53 | public List getAttributes() { 54 | return attributes; 55 | } 56 | 57 | @Config("plugin.geoip.attributes") 58 | @ConfigDescription("The list of attributes that will be attached to event. " + 59 | "Available attributes: country, country_code, region,city, latitude, longitude, timezone") 60 | public MaxmindGeoIPModuleConfig setAttributes(String attributes) { 61 | this.attributes = ImmutableList.copyOf(Splitter.on(',').omitEmptyStrings().trimResults().split(attributes)); 62 | return this; 63 | } 64 | 65 | public boolean getUseExistingFields() { 66 | return useExistingFields; 67 | } 68 | 69 | @Config("plugin.geoip.use-existing-fields") 70 | public MaxmindGeoIPModuleConfig setUseExistingFields(boolean useExistingFields) { 71 | this.useExistingFields = useExistingFields; 72 | return this; 73 | } 74 | } 75 | -------------------------------------------------------------------------------- /mapper/rakam-mapper-geoip-maxmind/src/main/resources/META-INF/services/org.rakam.plugin.RakamModule: -------------------------------------------------------------------------------- 1 | org.rakam.collection.mapper.geoip.maxmind.MaxmindGeoIPModule 2 | -------------------------------------------------------------------------------- /mapper/rakam-mapper-website/src/main/java/org/rakam/module/website/WebsiteEventMapperModule.java: -------------------------------------------------------------------------------- 1 | package org.rakam.module.website; 2 | 3 | import com.google.auto.service.AutoService; 4 | import com.google.inject.Binder; 5 | import com.google.inject.multibindings.Multibinder; 6 | import org.rakam.plugin.EventMapper; 7 | import org.rakam.plugin.RakamModule; 8 | import org.rakam.plugin.user.UserPropertyMapper; 9 | import org.rakam.util.ConditionalModule; 10 | 11 | @AutoService(RakamModule.class) 12 | @ConditionalModule(config = "module.website.mapper", value = "true") 13 | public class WebsiteEventMapperModule 14 | extends RakamModule { 15 | @Override 16 | protected void setup(Binder binder) { 17 | Multibinder userPropertyMappers = Multibinder.newSetBinder(binder, UserPropertyMapper.class); 18 | Multibinder eventMappers = Multibinder.newSetBinder(binder, EventMapper.class); 19 | 20 | WebsiteMapperConfig config = buildConfigObject(WebsiteMapperConfig.class); 21 | if (config.getReferrer()) { 22 | ReferrerEventMapper referrerEventMapper = new ReferrerEventMapper(); 23 | eventMappers.addBinding().toInstance(referrerEventMapper); 24 | userPropertyMappers.addBinding().toInstance(referrerEventMapper); 25 | } 26 | if (config.getUserAgent()) { 27 | UserAgentEventMapper userAgentEventMapper = new UserAgentEventMapper(config); 28 | eventMappers.addBinding().toInstance(userAgentEventMapper); 29 | userPropertyMappers.addBinding().toInstance(userAgentEventMapper); 30 | } 31 | 32 | // eventMappers.addBinding().to(UserIdEventMapper.class).in(Scopes.SINGLETON); 33 | // userPropertyMappers.addBinding().to(UserIdEventMapper.class).in(Scopes.SINGLETON); 34 | } 35 | 36 | @Override 37 | public String name() { 38 | return "Event website related attribute mapping module"; 39 | } 40 | 41 | @Override 42 | public String description() { 43 | return "Resolves _referrer, _user_agent attributes and related fields such as user_agent_version, referrer_medium to the event."; 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /mapper/rakam-mapper-website/src/main/java/org/rakam/module/website/WebsiteMapperConfig.java: -------------------------------------------------------------------------------- 1 | package org.rakam.module.website; 2 | 3 | import io.airlift.configuration.Config; 4 | 5 | public class WebsiteMapperConfig { 6 | private boolean userAgent = true; 7 | private boolean referrer = true; 8 | private boolean trackSpiders = false; 9 | 10 | public boolean getReferrer() { 11 | return referrer; 12 | } 13 | 14 | @Config("module.website.mapper.referrer") 15 | public WebsiteMapperConfig setReferrer(boolean referrer) { 16 | this.referrer = true; 17 | return this; 18 | } 19 | 20 | public boolean getUserAgent() { 21 | return userAgent; 22 | } 23 | 24 | @Config("module.website.mapper.user-agent") 25 | public WebsiteMapperConfig setUserAgent(boolean enabled) { 26 | this.userAgent = enabled; 27 | return this; 28 | } 29 | 30 | public boolean getTrackSpiders() { 31 | return trackSpiders; 32 | } 33 | 34 | @Config("module.website.mapper.user_agent.track_spiders") 35 | public WebsiteMapperConfig setTrackSpiders(boolean trackSpiders) { 36 | this.trackSpiders = true; 37 | return this; 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /mapper/rakam-mapper-website/src/main/resources/META-INF/services/org.rakam.plugin.RakamModule: -------------------------------------------------------------------------------- 1 | org.rakam.module.website.WebsiteEventMapperModule 2 | -------------------------------------------------------------------------------- /rakam-aws/src/main/java/org/apache/avro/generic/FilteredRecordWriter.java: -------------------------------------------------------------------------------- 1 | package org.apache.avro.generic; 2 | 3 | import org.apache.avro.Schema; 4 | import org.apache.avro.io.Encoder; 5 | 6 | import java.io.IOException; 7 | 8 | public class FilteredRecordWriter extends GenericDatumWriter { 9 | private final GenericData data; 10 | 11 | public FilteredRecordWriter(Schema root, GenericData data) { 12 | super(root, data); 13 | this.data = data; 14 | } 15 | 16 | @Override 17 | public void writeRecord(Schema schema, Object datum, Encoder out) throws IOException { 18 | Object state = data.getRecordState(datum, schema); 19 | for (Schema.Field f : schema.getFields()) { 20 | if (f.schema().getType() != Schema.Type.NULL) { 21 | writeField(datum, f, out, state); 22 | } 23 | } 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /rakam-aws/src/main/java/org/rakam/aws/AWSConfig.java: -------------------------------------------------------------------------------- 1 | package org.rakam.aws; 2 | 3 | import com.amazonaws.auth.AWSCredentialsProvider; 4 | import com.amazonaws.auth.BasicAWSCredentials; 5 | import com.amazonaws.auth.DefaultAWSCredentialsProviderChain; 6 | import com.amazonaws.internal.StaticCredentialsProvider; 7 | import com.amazonaws.regions.Region; 8 | import io.airlift.configuration.Config; 9 | 10 | import javax.validation.constraints.NotNull; 11 | 12 | public class AWSConfig { 13 | private String accessKey; 14 | private String secretAccessKey; 15 | private String eventStoreStreamName; 16 | private String region; 17 | private String eventStoreBulkS3Bucket; 18 | private String s3Endpoint; 19 | private String kinesisEndpoint; 20 | private String dynamodbEndpoint; 21 | 22 | public String getEventStoreStreamName() { 23 | return eventStoreStreamName; 24 | } 25 | 26 | @Config("event.store.kinesis.stream") 27 | public AWSConfig setEventStoreStreamName(String eventStoreStreamName) { 28 | this.eventStoreStreamName = eventStoreStreamName; 29 | return this; 30 | } 31 | 32 | public String getEventStoreBulkS3Bucket() { 33 | return eventStoreBulkS3Bucket; 34 | } 35 | 36 | @Config("event.store.bulk.s3-bucket") 37 | public AWSConfig setEventStoreBulkS3Bucket(String eventStoreBulkS3Bucket) { 38 | this.eventStoreBulkS3Bucket = eventStoreBulkS3Bucket; 39 | return this; 40 | } 41 | 42 | public String getAccessKey() { 43 | return accessKey; 44 | } 45 | 46 | @Config("aws.access-key") 47 | public AWSConfig setAccessKey(String accessKey) { 48 | this.accessKey = accessKey; 49 | return this; 50 | } 51 | 52 | public String getRegion() { 53 | return region; 54 | } 55 | 56 | @Config("aws.region") 57 | public AWSConfig setRegion(String region) { 58 | this.region = region; 59 | return this; 60 | } 61 | 62 | public String getS3Endpoint() { 63 | return s3Endpoint; 64 | } 65 | 66 | @Config("aws.s3-endpoint") 67 | public AWSConfig setS3Endpoint(String s3Endpoint) { 68 | this.s3Endpoint = s3Endpoint; 69 | return this; 70 | } 71 | 72 | public String getKinesisEndpoint() { 73 | return kinesisEndpoint; 74 | } 75 | 76 | @Config("aws.kinesis-endpoint") 77 | public AWSConfig setKinesisEndpoint(String kinesisEndpoint) { 78 | this.kinesisEndpoint = kinesisEndpoint; 79 | return this; 80 | } 81 | 82 | public String getSecretAccessKey() { 83 | return secretAccessKey; 84 | } 85 | 86 | @Config("aws.secret-access-key") 87 | public AWSConfig setSecretAccessKey(String secretAccessKey) { 88 | this.secretAccessKey = secretAccessKey; 89 | return this; 90 | } 91 | 92 | public AWSCredentialsProvider getCredentials() { 93 | // TODO: add an extra option the allow these values to be NULL. 94 | if (accessKey == null || secretAccessKey == null) { 95 | return new DefaultAWSCredentialsProviderChain(); 96 | } 97 | 98 | return new StaticCredentialsProvider(new BasicAWSCredentials(getAccessKey(), getSecretAccessKey())); 99 | } 100 | } 101 | -------------------------------------------------------------------------------- /rakam-aws/src/main/java/org/rakam/aws/kinesis/AWSKinesisModule.java: -------------------------------------------------------------------------------- 1 | package org.rakam.aws.kinesis; 2 | 3 | import com.google.auto.service.AutoService; 4 | import com.google.inject.Binder; 5 | import com.google.inject.Scopes; 6 | import io.airlift.configuration.Config; 7 | import org.rakam.aws.AWSConfig; 8 | import org.rakam.plugin.EventStore; 9 | import org.rakam.plugin.RakamModule; 10 | import org.rakam.util.ConditionalModule; 11 | 12 | import static io.airlift.configuration.ConfigBinder.configBinder; 13 | 14 | @AutoService(RakamModule.class) 15 | @ConditionalModule(config = "event.store", value = "kinesis") 16 | public class AWSKinesisModule extends RakamModule { 17 | @Override 18 | protected void setup(Binder binder) { 19 | configBinder(binder).bindConfig(AWSConfig.class); 20 | configBinder(binder).bindConfig(PrestoStreamConfig.class); 21 | binder.bind(EventStore.class).to(AWSKinesisEventStore.class).in(Scopes.SINGLETON); 22 | } 23 | 24 | @Override 25 | public String name() { 26 | return "AWS Kinesis event store module"; 27 | } 28 | 29 | @Override 30 | public String description() { 31 | return "Puts your events directly to AWS Kinesis streams."; 32 | } 33 | 34 | public static class PrestoStreamConfig { 35 | private int port; 36 | 37 | public int getPort() { 38 | return port; 39 | } 40 | 41 | @Config("presto.streaming.port") 42 | public void setPort(int port) { 43 | this.port = port; 44 | } 45 | } 46 | 47 | } 48 | -------------------------------------------------------------------------------- /rakam-aws/src/main/java/org/rakam/aws/kinesis/ForStreamer.java: -------------------------------------------------------------------------------- 1 | package org.rakam.aws.kinesis; 2 | 3 | import javax.inject.Qualifier; 4 | import java.lang.annotation.Retention; 5 | import java.lang.annotation.Target; 6 | 7 | import static java.lang.annotation.ElementType.*; 8 | import static java.lang.annotation.RetentionPolicy.RUNTIME; 9 | 10 | @Retention(RUNTIME) 11 | @Target({FIELD, PARAMETER, METHOD}) 12 | @Qualifier 13 | public @interface ForStreamer { 14 | } 15 | -------------------------------------------------------------------------------- /rakam-aws/src/main/java/org/rakam/aws/kinesis/StreamQuery.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed under the Apache License, Version 2.0 (the "License"); 3 | * you may not use this file except in compliance with the License. 4 | * You may obtain a copy of the License at 5 | * 6 | * http://www.apache.org/licenses/LICENSE-2.0 7 | * 8 | * Unless required by applicable law or agreed to in writing, software 9 | * distributed under the License is distributed on an "AS IS" BASIS, 10 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | * See the License for the specific language governing permissions and 12 | * limitations under the License. 13 | */ 14 | package org.rakam.aws.kinesis; 15 | 16 | import com.fasterxml.jackson.annotation.JsonCreator; 17 | import com.fasterxml.jackson.annotation.JsonProperty; 18 | import org.rakam.plugin.stream.CollectionStreamQuery; 19 | import org.rakam.server.http.annotations.ApiParam; 20 | 21 | import java.util.List; 22 | 23 | public class StreamQuery { 24 | public final String project; 25 | public final List collections; 26 | 27 | @JsonCreator 28 | public StreamQuery(@ApiParam("project") String project, 29 | @ApiParam("collections") List collections) { 30 | this.project = project; 31 | this.collections = collections; 32 | } 33 | 34 | @JsonProperty 35 | public String getProject() { 36 | return project; 37 | } 38 | 39 | @JsonProperty 40 | public List getCollections() { 41 | return collections; 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /rakam-aws/src/test/java/org/rakam/aws/DynamodbUtil.java: -------------------------------------------------------------------------------- 1 | package org.rakam.aws; 2 | 3 | import io.airlift.log.Logger; 4 | 5 | import java.io.File; 6 | import java.io.IOException; 7 | import java.net.ServerSocket; 8 | import java.nio.file.Path; 9 | 10 | import static com.google.common.collect.ImmutableList.of; 11 | import static java.lang.String.format; 12 | import static java.lang.System.getProperty; 13 | 14 | public class DynamodbUtil { 15 | private final static Logger LOGGER = Logger.get(DynamodbUtil.class); 16 | 17 | public static int randomPort() 18 | throws IOException { 19 | try (ServerSocket socket = new ServerSocket(0)) { 20 | return socket.getLocalPort(); 21 | } 22 | } 23 | 24 | public static DynamodbProcess createDynamodbProcess() 25 | throws Exception { 26 | int randomPort = randomPort(); 27 | Path mainDir = new File(getProperty("user.dir"), ".test/dynamodb").toPath(); 28 | 29 | Process dynamodbServer = new ProcessBuilder(of("java", format("-Djava.library.path=%s", 30 | mainDir.resolve("DynamoDBLocal_lib").toFile().getAbsolutePath()), 31 | "-jar", mainDir.resolve("DynamoDBLocal.jar").toFile().getAbsolutePath(), 32 | "-inMemory", "--port", Integer.toString(randomPort))) 33 | .start(); 34 | 35 | LOGGER.info("Dynamodb local started at %d port", randomPort); 36 | 37 | return new DynamodbProcess(dynamodbServer, randomPort); 38 | } 39 | 40 | public static class DynamodbProcess { 41 | public final Process process; 42 | public final int port; 43 | 44 | public DynamodbProcess(Process process, int port) { 45 | this.process = process; 46 | this.port = port; 47 | } 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /rakam-postgresql/src/main/java/org/rakam/postgresql/PostgresqlApiKeyService.java: -------------------------------------------------------------------------------- 1 | package org.rakam.postgresql; 2 | 3 | import org.rakam.analysis.JDBCPoolDataSource; 4 | import org.rakam.postgresql.analysis.JDBCApiKeyService; 5 | 6 | import javax.inject.Inject; 7 | import java.sql.Connection; 8 | import java.sql.SQLException; 9 | import java.sql.Statement; 10 | 11 | public class PostgresqlApiKeyService extends JDBCApiKeyService { 12 | @Inject 13 | public PostgresqlApiKeyService(JDBCPoolDataSource connectionPool) { 14 | super(connectionPool); 15 | } 16 | 17 | @Override 18 | public void setup() { 19 | try (Connection connection = connectionPool.getConnection()) { 20 | Statement statement = connection.createStatement(); 21 | statement.execute("CREATE TABLE IF NOT EXISTS api_key (" + 22 | " id SERIAL NOT NULL,\n" + 23 | " project VARCHAR(255) NOT NULL,\n" + 24 | " write_key VARCHAR(255) NOT NULL,\n" + 25 | " master_key VARCHAR(255) NOT NULL,\n" + 26 | " created_at TIMESTAMP default current_timestamp NOT NULL," + 27 | "PRIMARY KEY (id)\n" + 28 | " )"); 29 | } catch (SQLException e) { 30 | throw new RuntimeException(e); 31 | } 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /rakam-postgresql/src/main/java/org/rakam/postgresql/PostgresqlConfigManager.java: -------------------------------------------------------------------------------- 1 | package org.rakam.postgresql; 2 | 3 | import com.google.common.base.Throwables; 4 | import com.google.inject.name.Named; 5 | import org.rakam.analysis.ConfigManager; 6 | import org.rakam.analysis.JDBCPoolDataSource; 7 | import org.rakam.util.JsonHelper; 8 | import org.skife.jdbi.v2.DBI; 9 | import org.skife.jdbi.v2.Handle; 10 | 11 | import javax.annotation.PostConstruct; 12 | import javax.inject.Inject; 13 | import java.util.Locale; 14 | 15 | public class PostgresqlConfigManager implements ConfigManager { 16 | 17 | private final DBI dbi; 18 | 19 | @Inject 20 | public PostgresqlConfigManager(@Named("metadata.store.jdbc") JDBCPoolDataSource dataSource) { 21 | this.dbi = new DBI(dataSource); 22 | } 23 | 24 | @PostConstruct 25 | public void setup() { 26 | try (Handle handle = dbi.open()) { 27 | handle.createStatement("CREATE TABLE IF NOT EXISTS config (" + 28 | " project VARCHAR(255) NOT NULL," + 29 | " name VARCHAR(255) NOT NULL," + 30 | " value TEXT," + 31 | " PRIMARY KEY (project, name)" + 32 | " )") 33 | .execute(); 34 | } 35 | } 36 | 37 | @Override 38 | public T getConfig(String project, String configName, Class clazz) { 39 | try (Handle handle = dbi.open()) { 40 | return handle.createQuery("SELECT value FROM config WHERE project = :project AND name = :name") 41 | .bind("project", project) 42 | .bind("name", configName.toUpperCase(Locale.ENGLISH)).map((i, resultSet, statementContext) -> { 43 | return JsonHelper.read(resultSet.getString(1), clazz); 44 | }).first(); 45 | } 46 | } 47 | 48 | @Override 49 | public T setConfigOnce(String project, String configName, T value) { 50 | try (Handle handle = dbi.open()) { 51 | T config = getConfig(project, configName, (Class) value.getClass()); 52 | 53 | if (config == null) { 54 | try { 55 | handle.createStatement("INSERT INTO config (project, name, value) VALUES (:project, :name, :value)") 56 | .bind("project", project) 57 | .bind("name", configName.toUpperCase(Locale.ENGLISH)) 58 | .bind("value", JsonHelper.encode(value)).execute(); 59 | return value; 60 | } catch (Exception e) { 61 | // handle race condition 62 | T lastValue = getConfig(project, configName, (Class) value.getClass()); 63 | if (lastValue == null) { 64 | throw Throwables.propagate(e); 65 | } 66 | return lastValue; 67 | } 68 | } else { 69 | return config; 70 | } 71 | } 72 | } 73 | 74 | @Override 75 | public void clear() { 76 | try (Handle handle = dbi.open()) { 77 | handle.createStatement("DELETE FROM config").execute(); 78 | } 79 | } 80 | 81 | @Override 82 | public void setConfig(String project, String configName, T value) { 83 | try (Handle handle = dbi.open()) { 84 | try { 85 | handle.createStatement("INSERT INTO config (project, name, value) VALUES (:project, :name, :value)") 86 | .bind("project", project) 87 | .bind("name", configName.toUpperCase(Locale.ENGLISH)) 88 | .bind("value", JsonHelper.encode(value)).execute(); 89 | } catch (Exception e) { 90 | handle.createStatement("UPDATE config SET value = :value WHERE project = :project AND name = :name") 91 | .bind("project", project) 92 | .bind("name", configName.toUpperCase(Locale.ENGLISH)) 93 | .bind("value", JsonHelper.encode(value)).execute(); 94 | } 95 | } 96 | } 97 | } 98 | -------------------------------------------------------------------------------- /rakam-postgresql/src/main/java/org/rakam/postgresql/analysis/PostgresqlConfig.java: -------------------------------------------------------------------------------- 1 | package org.rakam.postgresql.analysis; 2 | 3 | import io.airlift.configuration.Config; 4 | 5 | public class PostgresqlConfig { 6 | 7 | private boolean autoIndexColumns = true; 8 | private boolean enableEventStore = true; 9 | 10 | public boolean isAutoIndexColumns() { 11 | return autoIndexColumns; 12 | } 13 | 14 | @Config("postgresql.auto-index-columns") 15 | public PostgresqlConfig setAutoIndexColumns(boolean indexColumns) { 16 | this.autoIndexColumns = indexColumns; 17 | return this; 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /rakam-postgresql/src/main/java/org/rakam/postgresql/plugin/user/PostgresqlUserModule.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed under the Apache License, Version 2.0 (the "License"); 3 | * you may not use this file except in compliance with the License. 4 | * You may obtain a copy of the License at 5 | * 6 | * http://www.apache.org/licenses/LICENSE-2.0 7 | * 8 | * Unless required by applicable law or agreed to in writing, software 9 | * distributed under the License is distributed on an "AS IS" BASIS, 10 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | * See the License for the specific language governing permissions and 12 | * limitations under the License. 13 | */ 14 | package org.rakam.postgresql.plugin.user; 15 | 16 | import com.google.auto.service.AutoService; 17 | import com.google.inject.Binder; 18 | import com.google.inject.Scopes; 19 | import com.google.inject.name.Names; 20 | import org.rakam.analysis.JDBCPoolDataSource; 21 | import org.rakam.config.JDBCConfig; 22 | import org.rakam.plugin.RakamModule; 23 | import org.rakam.plugin.user.UserStorage; 24 | import org.rakam.util.ConditionalModule; 25 | 26 | @AutoService(RakamModule.class) 27 | @ConditionalModule(config = "plugin.user.storage", value = "postgresql") 28 | public class PostgresqlUserModule extends RakamModule { 29 | @Override 30 | protected void setup(Binder binder) { 31 | JDBCConfig config = buildConfigObject(JDBCConfig.class, "store.adapter.postgresql"); 32 | 33 | binder.bind(JDBCPoolDataSource.class) 34 | .annotatedWith(Names.named("store.adapter.postgresql")) 35 | .toInstance(JDBCPoolDataSource.getOrCreateDataSource(config)); 36 | 37 | binder.bind(UserStorage.class).to(PostgresqlUserStorage.class) 38 | .in(Scopes.SINGLETON); 39 | 40 | binder.bind(boolean.class).annotatedWith(Names.named("user.storage.postgresql")) 41 | .toInstance("postgresql".equals(true)); 42 | } 43 | 44 | @Override 45 | public String name() { 46 | return "Postgresql backend for user storage"; 47 | } 48 | 49 | @Override 50 | public String description() { 51 | return "Postgresql user storage backend for basic CRUD and search operations."; 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /rakam-postgresql/src/main/java/org/rakam/postgresql/plugin/user/PostgresqlUserService.java: -------------------------------------------------------------------------------- 1 | package org.rakam.postgresql.plugin.user; 2 | 3 | import com.google.common.collect.ImmutableMap; 4 | import org.rakam.collection.FieldType; 5 | import org.rakam.collection.SchemaField; 6 | import org.rakam.plugin.user.AbstractUserService; 7 | import org.rakam.plugin.user.ISingleUserBatchOperation; 8 | 9 | import javax.inject.Inject; 10 | import java.util.List; 11 | import java.util.Map; 12 | import java.util.concurrent.CompletableFuture; 13 | 14 | import static com.google.common.collect.ImmutableList.of; 15 | 16 | public class PostgresqlUserService extends AbstractUserService { 17 | public static final String ANONYMOUS_ID_MAPPING = "$anonymous_id_mapping"; 18 | protected static final Map> ANONYMOUS_USER_MAPPING = ImmutableMap.of( 19 | FieldType.STRING, of( 20 | new SchemaField("id", FieldType.STRING), 21 | new SchemaField("_user", FieldType.STRING), 22 | new SchemaField("created_at", FieldType.TIMESTAMP), 23 | new SchemaField("merged_at", FieldType.TIMESTAMP)), 24 | 25 | FieldType.LONG, of( 26 | new SchemaField("id", FieldType.STRING), 27 | new SchemaField("_user", FieldType.STRING), 28 | new SchemaField("created_at", FieldType.TIMESTAMP), 29 | new SchemaField("merged_at", FieldType.TIMESTAMP)), 30 | 31 | FieldType.INTEGER, of( 32 | new SchemaField("id", FieldType.STRING), 33 | new SchemaField("_user", FieldType.STRING), 34 | new SchemaField("created_at", FieldType.TIMESTAMP), 35 | new SchemaField("merged_at", FieldType.TIMESTAMP)) 36 | ); 37 | private final PostgresqlUserStorage storage; 38 | 39 | @Inject 40 | public PostgresqlUserService(PostgresqlUserStorage storage) { 41 | super(storage); 42 | this.storage = storage; 43 | } 44 | 45 | @Override 46 | public CompletableFuture batch(String project, List batchUserOperations) { 47 | return storage.batch(project, batchUserOperations); 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /rakam-postgresql/src/test/java/org/rakam/pg10/TestingEnvironmentPg10.java: -------------------------------------------------------------------------------- 1 | package org.rakam.pg10; 2 | 3 | import io.airlift.testing.postgresql10.TestingPostgreSqlServer; 4 | import org.rakam.config.JDBCConfig; 5 | 6 | import java.io.IOException; 7 | 8 | public class TestingEnvironmentPg10 { 9 | private static TestingPostgreSqlServer testingPostgresqlServer; 10 | private static JDBCConfig postgresqlConfig; 11 | 12 | public TestingEnvironmentPg10() { 13 | if (testingPostgresqlServer == null) { 14 | synchronized (TestingEnvironmentPg10.class) { 15 | if (testingPostgresqlServer == null) { 16 | try { 17 | testingPostgresqlServer = new TestingPostgreSqlServer("testuser", "testdb"); 18 | testingPostgresqlServer.execute("ALTER USER testuser WITH SUPERUSER"); 19 | postgresqlConfig = new JDBCConfig() 20 | .setUrl(testingPostgresqlServer.getJdbcUrl()) 21 | .setUsername(testingPostgresqlServer.getUser()); 22 | Runtime.getRuntime().addShutdownHook( 23 | new Thread( 24 | () -> { 25 | try { 26 | testingPostgresqlServer.close(); 27 | } catch (IOException e) { 28 | e.printStackTrace(); 29 | } 30 | } 31 | ) 32 | ); 33 | } catch (Exception e) { 34 | throw new RuntimeException("Unable to start PG", e); 35 | } 36 | } 37 | } 38 | } 39 | } 40 | 41 | public JDBCConfig getPostgresqlConfig() { 42 | if (postgresqlConfig == null) { 43 | throw new UnsupportedOperationException(); 44 | } 45 | return postgresqlConfig; 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /rakam-postgresql/src/test/java/org/rakam/pg10/analysis/TestPostgresqlApiKeyService.java: -------------------------------------------------------------------------------- 1 | package org.rakam.pg10.analysis; 2 | 3 | import org.rakam.analysis.ApiKeyService; 4 | import org.rakam.analysis.JDBCPoolDataSource; 5 | import org.rakam.analysis.TestApiKeyService; 6 | import org.rakam.pg10.TestingEnvironmentPg10; 7 | import org.rakam.postgresql.PostgresqlApiKeyService; 8 | import org.testng.annotations.AfterMethod; 9 | import org.testng.annotations.BeforeSuite; 10 | 11 | public class TestPostgresqlApiKeyService extends TestApiKeyService { 12 | private PostgresqlApiKeyService apiKeyService; 13 | 14 | @BeforeSuite 15 | public void setupPostgresql() { 16 | TestingEnvironmentPg10 testingEnvironment = new TestingEnvironmentPg10(); 17 | JDBCPoolDataSource apiKeyServiceDataSource = JDBCPoolDataSource 18 | .getOrCreateDataSource(testingEnvironment.getPostgresqlConfig()); 19 | 20 | apiKeyService = new PostgresqlApiKeyService(apiKeyServiceDataSource); 21 | apiKeyService.setup(); 22 | } 23 | 24 | @Override 25 | public ApiKeyService getApiKeyService() { 26 | return apiKeyService; 27 | } 28 | 29 | @AfterMethod 30 | public void tearDownMethod() throws Exception { 31 | apiKeyService.clearCache(); 32 | } 33 | 34 | } 35 | -------------------------------------------------------------------------------- /rakam-postgresql/src/test/java/org/rakam/pg10/collection/TestPostgresqlUserStorage.java: -------------------------------------------------------------------------------- 1 | package org.rakam.pg10.collection; 2 | 3 | import com.google.common.eventbus.EventBus; 4 | import org.rakam.analysis.ConfigManager; 5 | import org.rakam.analysis.JDBCPoolDataSource; 6 | import org.rakam.analysis.metadata.Metastore; 7 | import org.rakam.collection.TestUserStorage; 8 | import org.rakam.config.ProjectConfig; 9 | import org.rakam.pg10.TestingEnvironmentPg10; 10 | import org.rakam.plugin.user.AbstractUserService; 11 | import org.rakam.postgresql.PostgresqlConfigManager; 12 | import org.rakam.postgresql.PostgresqlModule; 13 | import org.rakam.postgresql.analysis.PostgresqlMetastore; 14 | import org.rakam.postgresql.plugin.user.PostgresqlUserService; 15 | import org.rakam.postgresql.plugin.user.PostgresqlUserStorage; 16 | import org.testng.annotations.BeforeSuite; 17 | 18 | public class TestPostgresqlUserStorage 19 | extends TestUserStorage { 20 | private TestingEnvironmentPg10 testingPostgresqlServer; 21 | private PostgresqlUserService userService; 22 | private PostgresqlConfigManager configManager; 23 | private PostgresqlMetastore metastore; 24 | 25 | @BeforeSuite 26 | @Override 27 | public void setUp() 28 | throws Exception { 29 | testingPostgresqlServer = new TestingEnvironmentPg10(); 30 | 31 | JDBCPoolDataSource dataSource = JDBCPoolDataSource.getOrCreateDataSource(testingPostgresqlServer.getPostgresqlConfig(), "set time zone 'UTC'"); 32 | 33 | EventBus eventBus = new EventBus(); 34 | metastore = new PostgresqlMetastore(dataSource, new PostgresqlModule.PostgresqlVersion(dataSource), eventBus); 35 | 36 | configManager = new PostgresqlConfigManager(dataSource); 37 | configManager.setup(); 38 | PostgresqlUserStorage userStorage = new PostgresqlUserStorage(dataSource, configManager); 39 | userService = new PostgresqlUserService(userStorage); 40 | super.setUp(); 41 | } 42 | 43 | @Override 44 | public AbstractUserService getUserService() { 45 | return userService; 46 | } 47 | 48 | @Override 49 | public ConfigManager getConfigManager() { 50 | return configManager; 51 | } 52 | 53 | @Override 54 | public Metastore getMetastore() { 55 | return metastore; 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /rakam-postgresql/src/test/java/org/rakam/pg9/TestingEnvironmentPg9.java: -------------------------------------------------------------------------------- 1 | package org.rakam.pg9; 2 | 3 | import com.google.common.base.Throwables; 4 | import io.airlift.testing.postgresql.TestingPostgreSqlServer; 5 | import org.rakam.config.JDBCConfig; 6 | 7 | import java.io.IOException; 8 | 9 | public class TestingEnvironmentPg9 { 10 | private static TestingPostgreSqlServer testingPostgresqlServer; 11 | private static JDBCConfig postgresqlConfig; 12 | 13 | public TestingEnvironmentPg9() { 14 | if (testingPostgresqlServer == null) { 15 | synchronized (TestingEnvironmentPg9.class) { 16 | if (testingPostgresqlServer == null) { 17 | try { 18 | testingPostgresqlServer = new TestingPostgreSqlServer("testuser", "testdb"); 19 | testingPostgresqlServer.execute("ALTER USER testuser WITH SUPERUSER"); 20 | postgresqlConfig = new JDBCConfig() 21 | .setUrl(testingPostgresqlServer.getJdbcUrl()) 22 | .setUsername(testingPostgresqlServer.getUser()); 23 | Runtime.getRuntime().addShutdownHook( 24 | new Thread( 25 | () -> { 26 | try { 27 | testingPostgresqlServer.close(); 28 | } catch (IOException e) { 29 | e.printStackTrace(); 30 | } 31 | } 32 | ) 33 | ); 34 | } catch (Exception e) { 35 | throw Throwables.propagate(e); 36 | } 37 | } 38 | } 39 | } 40 | } 41 | 42 | public JDBCConfig getPostgresqlConfig() { 43 | if (postgresqlConfig == null) { 44 | throw new UnsupportedOperationException(); 45 | } 46 | return postgresqlConfig; 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /rakam-postgresql/src/test/java/org/rakam/pg9/analysis/TestPostgresqlApiKeyService.java: -------------------------------------------------------------------------------- 1 | package org.rakam.pg9.analysis; 2 | 3 | import org.rakam.analysis.ApiKeyService; 4 | import org.rakam.analysis.JDBCPoolDataSource; 5 | import org.rakam.analysis.TestApiKeyService; 6 | import org.rakam.pg9.TestingEnvironmentPg9; 7 | import org.rakam.postgresql.PostgresqlApiKeyService; 8 | import org.testng.annotations.AfterMethod; 9 | import org.testng.annotations.BeforeSuite; 10 | 11 | public class TestPostgresqlApiKeyService extends TestApiKeyService { 12 | private PostgresqlApiKeyService apiKeyService; 13 | 14 | @BeforeSuite 15 | public void setupPostgresql() { 16 | TestingEnvironmentPg9 testingEnvironment = new TestingEnvironmentPg9(); 17 | JDBCPoolDataSource apiKeyServiceDataSource = JDBCPoolDataSource 18 | .getOrCreateDataSource(testingEnvironment.getPostgresqlConfig()); 19 | 20 | apiKeyService = new PostgresqlApiKeyService(apiKeyServiceDataSource); 21 | apiKeyService.setup(); 22 | 23 | } 24 | 25 | @Override 26 | public ApiKeyService getApiKeyService() { 27 | return apiKeyService; 28 | } 29 | 30 | @AfterMethod 31 | public void tearDownMethod() throws Exception { 32 | apiKeyService.clearCache(); 33 | } 34 | 35 | } 36 | -------------------------------------------------------------------------------- /rakam-postgresql/src/test/java/org/rakam/pg9/collection/TestPostgresqlUserStorage.java: -------------------------------------------------------------------------------- 1 | package org.rakam.pg9.collection; 2 | 3 | import com.google.common.eventbus.EventBus; 4 | import org.rakam.analysis.ConfigManager; 5 | import org.rakam.analysis.JDBCPoolDataSource; 6 | import org.rakam.analysis.metadata.Metastore; 7 | import org.rakam.collection.TestUserStorage; 8 | import org.rakam.pg9.TestingEnvironmentPg9; 9 | import org.rakam.plugin.user.AbstractUserService; 10 | import org.rakam.postgresql.PostgresqlConfigManager; 11 | import org.rakam.postgresql.PostgresqlModule; 12 | import org.rakam.postgresql.analysis.PostgresqlMetastore; 13 | import org.rakam.postgresql.plugin.user.PostgresqlUserService; 14 | import org.rakam.postgresql.plugin.user.PostgresqlUserStorage; 15 | import org.testng.annotations.BeforeSuite; 16 | 17 | public class TestPostgresqlUserStorage 18 | extends TestUserStorage { 19 | private TestingEnvironmentPg9 testingPostgresqlServer; 20 | private PostgresqlMetastore metastore; 21 | private PostgresqlUserService userService; 22 | private PostgresqlConfigManager configManager; 23 | 24 | @BeforeSuite 25 | @Override 26 | public void setUp() 27 | throws Exception { 28 | testingPostgresqlServer = new TestingEnvironmentPg9(); 29 | 30 | JDBCPoolDataSource dataSource = JDBCPoolDataSource.getOrCreateDataSource(testingPostgresqlServer.getPostgresqlConfig(), "set time zone 'UTC'"); 31 | 32 | EventBus eventBus = new EventBus(); 33 | metastore = new PostgresqlMetastore(dataSource, new PostgresqlModule.PostgresqlVersion(dataSource), eventBus); 34 | 35 | configManager = new PostgresqlConfigManager(dataSource); 36 | configManager.setup(); 37 | 38 | PostgresqlUserStorage userStorage = new PostgresqlUserStorage(dataSource, configManager); 39 | userService = new PostgresqlUserService(userStorage); 40 | super.setUp(); 41 | } 42 | 43 | @Override 44 | public AbstractUserService getUserService() { 45 | return userService; 46 | } 47 | 48 | @Override 49 | public ConfigManager getConfigManager() { 50 | return configManager; 51 | } 52 | 53 | @Override 54 | public Metastore getMetastore() { 55 | return metastore; 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /rakam-presto-kafka/src/main/java/org/apache/avro/generic/SourceFilteredRecordWriter.java: -------------------------------------------------------------------------------- 1 | package org.apache.avro.generic; 2 | 3 | import org.apache.avro.Schema; 4 | import org.apache.avro.io.Encoder; 5 | 6 | import java.io.IOException; 7 | import java.util.Set; 8 | 9 | public class SourceFilteredRecordWriter extends GenericDatumWriter { 10 | private final GenericData data; 11 | private final Set sourceFields; 12 | 13 | public SourceFilteredRecordWriter(Schema root, GenericData data, Set sourceFields) { 14 | super(root, data); 15 | this.data = data; 16 | this.sourceFields = sourceFields; 17 | } 18 | 19 | @Override 20 | public void writeRecord(Schema schema, Object datum, Encoder out) throws IOException { 21 | Object state = data.getRecordState(datum, schema); 22 | for (Schema.Field f : schema.getFields()) { 23 | if (!sourceFields.contains(f.name())) { 24 | writeField(datum, f, out, state); 25 | } 26 | } 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /rakam-presto-kafka/src/main/java/org/rakam/kafka/collection/KafkaCollectorModule.java: -------------------------------------------------------------------------------- 1 | package org.rakam.kafka.collection; 2 | 3 | import com.google.auto.service.AutoService; 4 | import com.google.inject.Binder; 5 | import com.google.inject.name.Names; 6 | import org.rakam.plugin.EventStore; 7 | import org.rakam.plugin.RakamModule; 8 | import org.rakam.plugin.stream.EventStream; 9 | import org.rakam.util.ConditionalModule; 10 | 11 | import static io.airlift.configuration.ConfigurationModule.bindConfig; 12 | 13 | @AutoService(RakamModule.class) 14 | @ConditionalModule(config = "event.store", value = "kafka") 15 | public class KafkaCollectorModule extends RakamModule { 16 | 17 | @Override 18 | protected void setup(Binder binder) { 19 | bindConfig(binder) 20 | .annotatedWith(Names.named("event.store.kafka")) 21 | .prefixedWith("event.store.kafka") 22 | .to(KafkaConfig.class); 23 | binder.bind(EventStore.class).to(KafkaEventStore.class); 24 | } 25 | 26 | @Override 27 | public String name() { 28 | return "Kafka Event Collector"; 29 | } 30 | 31 | @Override 32 | public String description() { 33 | return "Sends events to Apache Kafka"; 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /rakam-presto-kafka/src/main/java/org/rakam/kafka/collection/KafkaConfig.java: -------------------------------------------------------------------------------- 1 | package org.rakam.kafka.collection; 2 | 3 | import com.google.common.base.Splitter; 4 | import com.google.common.collect.ImmutableSet; 5 | import com.google.common.net.HostAndPort; 6 | import io.airlift.configuration.Config; 7 | import io.airlift.units.DataSize; 8 | import io.airlift.units.Duration; 9 | import io.airlift.units.MinDuration; 10 | 11 | import java.util.Set; 12 | 13 | import static com.google.common.collect.Iterables.transform; 14 | 15 | public class KafkaConfig { 16 | public static final String SERIALIZER = "kafka.serializer.DefaultEncoder"; 17 | private static final int KAFKA_DEFAULT_PORT = 9092; 18 | private Set nodes = ImmutableSet.of(); 19 | private Duration kafkaConnectTimeout = Duration.valueOf("10s"); 20 | private DataSize kafkaBufferSize = new DataSize(64, DataSize.Unit.KILOBYTE); 21 | private Duration commitInterval = Duration.valueOf("5s"); 22 | private HostAndPort zookeeperNode; 23 | 24 | public static ImmutableSet parseNodes(String nodes) { 25 | Splitter splitter = Splitter.on(',').omitEmptyStrings().trimResults(); 26 | return ImmutableSet.copyOf(transform(splitter.split(nodes), KafkaConfig::toHostAddress)); 27 | } 28 | 29 | private static HostAndPort toHostAddress(String value) { 30 | return HostAndPort.fromString(value).withDefaultPort(KAFKA_DEFAULT_PORT); 31 | } 32 | 33 | // @Size(min = 1) 34 | public Set getNodes() { 35 | return nodes; 36 | } 37 | 38 | @Config("nodes") 39 | public KafkaConfig setNodes(String nodes) { 40 | this.nodes = (nodes == null) ? null : parseNodes(nodes); 41 | return this; 42 | } 43 | 44 | @MinDuration("1s") 45 | public Duration getKafkaConnectTimeout() { 46 | return kafkaConnectTimeout; 47 | } 48 | 49 | @Config("connect-timeout") 50 | public KafkaConfig setKafkaConnectTimeout(String kafkaConnectTimeout) { 51 | this.kafkaConnectTimeout = Duration.valueOf(kafkaConnectTimeout); 52 | return this; 53 | } 54 | 55 | public DataSize getKafkaBufferSize() { 56 | return kafkaBufferSize; 57 | } 58 | 59 | @Config("buffer-size") 60 | public KafkaConfig setKafkaBufferSize(String kafkaBufferSize) { 61 | this.kafkaBufferSize = DataSize.valueOf(kafkaBufferSize); 62 | return this; 63 | } 64 | 65 | @MinDuration("1s") 66 | public Duration getCommitInterval() { 67 | return commitInterval; 68 | } 69 | 70 | @Config("commit-interval") 71 | public KafkaConfig setCommitInterval(String interval) { 72 | if (interval != null) 73 | this.commitInterval = Duration.valueOf(interval); 74 | return this; 75 | } 76 | 77 | public HostAndPort getZookeeperNode() { 78 | return zookeeperNode; 79 | } 80 | 81 | @Config("zookeeper.connect") 82 | public KafkaConfig setZookeeperNode(String node) { 83 | this.zookeeperNode = (node == null) ? null : HostAndPort.fromString(node); 84 | return this; 85 | } 86 | } 87 | 88 | -------------------------------------------------------------------------------- /rakam-presto-kafka/src/main/java/org/rakam/kafka/collection/KafkaSimpleConsumerManager.java: -------------------------------------------------------------------------------- 1 | package org.rakam.kafka.collection; 2 | 3 | import com.google.common.base.Throwables; 4 | import com.google.common.cache.CacheBuilder; 5 | import com.google.common.cache.CacheLoader; 6 | import com.google.common.cache.LoadingCache; 7 | import com.google.common.net.HostAndPort; 8 | import com.google.inject.Singleton; 9 | import io.airlift.log.Logger; 10 | import kafka.javaapi.consumer.SimpleConsumer; 11 | 12 | import javax.annotation.PreDestroy; 13 | import javax.inject.Inject; 14 | import java.util.Map; 15 | import java.util.concurrent.ExecutionException; 16 | 17 | import static com.google.common.base.Preconditions.checkNotNull; 18 | 19 | @Singleton 20 | public class KafkaSimpleConsumerManager { 21 | 22 | private final static Logger LOGGER = Logger.get(KafkaSimpleConsumerManager.class); 23 | 24 | private final LoadingCache consumerCache; 25 | 26 | @Inject 27 | KafkaSimpleConsumerManager() { 28 | this.consumerCache = CacheBuilder.newBuilder().build(new SimpleConsumerCacheLoader()); 29 | } 30 | 31 | @PreDestroy 32 | public void tearDown() { 33 | for (Map.Entry entry : consumerCache.asMap().entrySet()) { 34 | try { 35 | entry.getValue().close(); 36 | } catch (Exception e) { 37 | LOGGER.warn("While closing consumer %s:", entry.getKey(), e); 38 | } 39 | } 40 | } 41 | 42 | public SimpleConsumer getConsumer(HostAndPort host) { 43 | checkNotNull(host, "host is null"); 44 | try { 45 | return consumerCache.get(host); 46 | } catch (ExecutionException e) { 47 | throw Throwables.propagate(e.getCause()); 48 | } 49 | } 50 | 51 | private class SimpleConsumerCacheLoader 52 | extends CacheLoader { 53 | @Override 54 | public SimpleConsumer load(HostAndPort host) 55 | throws Exception { 56 | LOGGER.info("Creating new Consumer for {}", host); 57 | return new SimpleConsumer(host.getHostText(), 58 | host.getPort(), 59 | 10000, 60 | 1024, 61 | "consumer"); 62 | } 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /rakam-presto/src/main/java/org/rakam/presto/MetadataModule.java: -------------------------------------------------------------------------------- 1 | package org.rakam.presto; 2 | 3 | import com.google.auto.service.AutoService; 4 | import com.google.inject.Binder; 5 | import com.google.inject.Scopes; 6 | import com.google.inject.name.Names; 7 | import org.rakam.analysis.ApiKeyService; 8 | import org.rakam.analysis.ConfigManager; 9 | import org.rakam.analysis.JDBCPoolDataSource; 10 | import org.rakam.analysis.metadata.Metastore; 11 | import org.rakam.config.JDBCConfig; 12 | import org.rakam.config.MetadataConfig; 13 | import org.rakam.plugin.RakamModule; 14 | import org.rakam.postgresql.analysis.JDBCApiKeyService; 15 | import org.rakam.presto.analysis.*; 16 | import org.rakam.util.ConditionalModule; 17 | 18 | import static io.airlift.configuration.ConfigBinder.configBinder; 19 | import static java.lang.String.format; 20 | 21 | @AutoService(RakamModule.class) 22 | @ConditionalModule(config = "metadata.store.jdbc.url") 23 | public class MetadataModule 24 | extends RakamModule { 25 | @Override 26 | protected void setup(Binder binder) { 27 | JDBCConfig jdbc = buildConfigObject(JDBCConfig.class, "metadata.store.jdbc"); 28 | if(!jdbc.getUrl().startsWith("jdbc:mysql")) { 29 | throw new IllegalArgumentException("We only support as metadata store at the moment"); 30 | } 31 | JDBCPoolDataSource metadataDataSource = bindJDBCConfig(binder, "metadata.store.jdbc"); 32 | binder.bind(ApiKeyService.class).toInstance(new JDBCApiKeyService(metadataDataSource)); 33 | binder.bind(JDBCPoolDataSource.class).annotatedWith(Names.named("metadata.store.jdbc")).toInstance(metadataDataSource); 34 | 35 | 36 | // we only support mysql at the moment 37 | binder.bind(ConfigManager.class).to(MysqlConfigManager.class); 38 | binder.bind(Metastore.class).to(MysqlExplicitMetastore.class).in(Scopes.SINGLETON); 39 | } 40 | 41 | @Override 42 | public String name() { 43 | return "Metadata store for Rakam"; 44 | } 45 | 46 | @Override 47 | public String description() { 48 | return "Rakam backend for high-throughput systems."; 49 | } 50 | 51 | private JDBCPoolDataSource bindJDBCConfig(Binder binder, String config) { 52 | JDBCPoolDataSource dataSource = JDBCPoolDataSource.getOrCreateDataSource( 53 | buildConfigObject(JDBCConfig.class, config)); 54 | binder.bind(JDBCPoolDataSource.class) 55 | .annotatedWith(Names.named(config)) 56 | .toInstance(dataSource); 57 | return dataSource; 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /rakam-presto/src/main/java/org/rakam/presto/PrestoType.java: -------------------------------------------------------------------------------- 1 | package org.rakam.presto; 2 | 3 | import com.facebook.presto.metadata.MetadataManager; 4 | import com.facebook.presto.spi.type.*; 5 | import com.google.common.collect.ImmutableList; 6 | import org.rakam.collection.FieldType; 7 | 8 | import static com.facebook.presto.type.MapParametricType.MAP; 9 | 10 | public class PrestoType { 11 | private static final TypeManager defaultTypeManager = MetadataManager.createTestMetadataManager().getTypeManager(); 12 | 13 | public static Type toType(FieldType type) { 14 | switch (type) { 15 | case DOUBLE: 16 | return DoubleType.DOUBLE; 17 | case LONG: 18 | return BigintType.BIGINT; 19 | case BOOLEAN: 20 | return BooleanType.BOOLEAN; 21 | case STRING: 22 | return VarcharType.VARCHAR; 23 | case INTEGER: 24 | return IntegerType.INTEGER; 25 | case DECIMAL: 26 | return DecimalType.createDecimalType(); 27 | case DATE: 28 | return DateType.DATE; 29 | case TIMESTAMP: 30 | return TimestampType.TIMESTAMP; 31 | case TIME: 32 | return TimeType.TIME; 33 | case BINARY: 34 | return VarbinaryType.VARBINARY; 35 | default: 36 | if (type.isArray()) { 37 | return new ArrayType(toType(type.getArrayElementType())); 38 | } 39 | if (type.isMap()) { 40 | return MAP.createType(defaultTypeManager, ImmutableList.of(TypeParameter.of(VarcharType.VARCHAR), 41 | TypeParameter.of(toType(type.getMapValueType())))); 42 | } 43 | throw new IllegalStateException(); 44 | } 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /rakam-presto/src/main/java/org/rakam/presto/analysis/MysqlConfigManager.java: -------------------------------------------------------------------------------- 1 | package org.rakam.presto.analysis; 2 | 3 | import com.google.common.base.Throwables; 4 | import com.google.inject.name.Named; 5 | import org.rakam.analysis.ConfigManager; 6 | import org.rakam.analysis.JDBCPoolDataSource; 7 | import org.rakam.util.JsonHelper; 8 | import org.skife.jdbi.v2.DBI; 9 | import org.skife.jdbi.v2.Handle; 10 | 11 | import javax.annotation.PostConstruct; 12 | import javax.inject.Inject; 13 | import java.util.Locale; 14 | 15 | public class MysqlConfigManager 16 | implements ConfigManager { 17 | 18 | private final DBI dbi; 19 | 20 | @Inject 21 | public MysqlConfigManager(@Named("metadata.store.jdbc") JDBCPoolDataSource dataSource) { 22 | this.dbi = new DBI(dataSource); 23 | } 24 | 25 | @PostConstruct 26 | public void setup() { 27 | try (Handle handle = dbi.open()) { 28 | handle.createStatement("CREATE TABLE IF NOT EXISTS config (" + 29 | " project VARCHAR(255) NOT NULL," + 30 | " name VARCHAR(255) NOT NULL," + 31 | " value TEXT," + 32 | " PRIMARY KEY (project, name)" + 33 | " )") 34 | .execute(); 35 | } 36 | } 37 | 38 | @Override 39 | public T getConfig(String project, String configName, Class clazz) { 40 | try (Handle handle = dbi.open()) { 41 | return handle.createQuery("SELECT value FROM config WHERE project = :project AND name = :name") 42 | .bind("project", project) 43 | .bind("name", configName.toUpperCase(Locale.ENGLISH)).map((i, resultSet, statementContext) -> { 44 | return JsonHelper.read(resultSet.getString(1), clazz); 45 | }).first(); 46 | } 47 | } 48 | 49 | @Override 50 | public T setConfigOnce(String project, String configName, T value) { 51 | try (Handle handle = dbi.open()) { 52 | T config = getConfig(project, configName, (Class) value.getClass()); 53 | 54 | if (config == null) { 55 | try { 56 | handle.createStatement("INSERT INTO config (project, name, value) VALUES (:project, :name, :value)") 57 | .bind("project", project) 58 | .bind("name", configName.toUpperCase(Locale.ENGLISH)) 59 | .bind("value", JsonHelper.encode(value)).execute(); 60 | return value; 61 | } catch (Exception e) { 62 | // handle race condition 63 | T lastValue = getConfig(project, configName, (Class) value.getClass()); 64 | if (lastValue == null) { 65 | throw Throwables.propagate(e); 66 | } 67 | return lastValue; 68 | } 69 | } else { 70 | return config; 71 | } 72 | } 73 | } 74 | 75 | @Override 76 | public void clear() { 77 | try (Handle handle = dbi.open()) { 78 | handle.createStatement("DELETE FROM config").execute(); 79 | } 80 | } 81 | 82 | @Override 83 | public void setConfig(String project, String configName, T value) { 84 | try (Handle handle = dbi.open()) { 85 | handle.createStatement("INSERT INTO config (project, name, value) VALUES (:project, :name, :value) ON DUPLICATE KEY UPDATE value = :value") 86 | .bind("project", project) 87 | .bind("name", configName.toUpperCase(Locale.ENGLISH)) 88 | .bind("value", JsonHelper.encode(value)).execute(); 89 | } 90 | } 91 | } 92 | -------------------------------------------------------------------------------- /rakam-presto/src/main/java/org/rakam/presto/analysis/SchemaDao.java: -------------------------------------------------------------------------------- 1 | package org.rakam.presto.analysis; 2 | 3 | import org.skife.jdbi.v2.sqlobject.SqlUpdate; 4 | 5 | public interface SchemaDao { 6 | @SqlUpdate("CREATE TABLE IF NOT EXISTS tables (\n" + 7 | " table_id BIGINT PRIMARY KEY AUTO_INCREMENT,\n" + 8 | " schema_name VARCHAR(255) NOT NULL,\n" + 9 | " table_name VARCHAR(255) NOT NULL,\n" + 10 | " create_time BIGINT NOT NULL,\n" + 11 | " table_version BIGINT NOT NULL,\n" + 12 | " UNIQUE (schema_name, table_name)\n" + 13 | ")") 14 | void createTable(); 15 | 16 | @SqlUpdate("CREATE TABLE IF NOT EXISTS columns (\n" + 17 | " table_id BIGINT NOT NULL,\n" + 18 | " column_id BIGINT PRIMARY KEY AUTO_INCREMENT,\n" + 19 | " column_name VARCHAR(255) NOT NULL,\n" + 20 | " create_time BIGINT NOT NULL,\n" + 21 | " data_type VARCHAR(255) NOT NULL,\n" + 22 | " UNIQUE (table_id, column_name),\n" + 23 | " FOREIGN KEY (table_id) REFERENCES tables (table_id)\n" + 24 | ")") 25 | void createColumn(); 26 | 27 | 28 | } 29 | -------------------------------------------------------------------------------- /rakam-presto/src/test/resources/keystore.jks: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rakam-io/rakam-api/ef75341e5337a2c78b4ae2b30cd78cd01ac1a362/rakam-presto/src/test/resources/keystore.jks -------------------------------------------------------------------------------- /rakam-presto/src/test/resources/s3proxy.conf: -------------------------------------------------------------------------------- 1 | s3proxy.endpoint=http://127.0.0.1:0 2 | s3proxy.secure-endpoint=https://127.0.0.1:0 3 | 4 | s3proxy.identity=local-identity 5 | s3proxy.credential=local-credential 6 | s3proxy.keystore-path=keystore.jks 7 | s3proxy.keystore-password=password 8 | 9 | jclouds.provider=transient 10 | jclouds.identity=remote-identity 11 | jclouds.credential=remote-credential 12 | jclouds.filesystem.basedir=/tmp/blobstore -------------------------------------------------------------------------------- /rakam-spi/src/main/java/org/rakam/Access.java: -------------------------------------------------------------------------------- 1 | package org.rakam; 2 | 3 | import com.fasterxml.jackson.annotation.JsonCreator; 4 | import org.rakam.server.http.annotations.ApiParam; 5 | 6 | import java.util.List; 7 | 8 | public final class Access { 9 | public final List tableAccessList; 10 | 11 | @JsonCreator 12 | public Access(@ApiParam("tables") List tableAccessList) { 13 | this.tableAccessList = tableAccessList; 14 | } 15 | 16 | public static class TableAccess { 17 | public final String tableName; 18 | public final String expression; 19 | 20 | @JsonCreator 21 | public TableAccess(@ApiParam("name") String tableName, @ApiParam("exp") String expression) { 22 | this.tableName = tableName; 23 | this.expression = expression; 24 | } 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /rakam-spi/src/main/java/org/rakam/Mapper.java: -------------------------------------------------------------------------------- 1 | package org.rakam; 2 | 3 | import java.lang.annotation.ElementType; 4 | import java.lang.annotation.Retention; 5 | import java.lang.annotation.RetentionPolicy; 6 | import java.lang.annotation.Target; 7 | 8 | @Retention(RetentionPolicy.RUNTIME) 9 | @Target(ElementType.TYPE) 10 | public @interface Mapper { 11 | String name(); 12 | 13 | String description() default ""; 14 | } 15 | -------------------------------------------------------------------------------- /rakam-spi/src/main/java/org/rakam/TestingConfigManager.java: -------------------------------------------------------------------------------- 1 | package org.rakam; 2 | 3 | import com.google.common.collect.HashBasedTable; 4 | import com.google.common.collect.Table; 5 | import org.rakam.analysis.ConfigManager; 6 | 7 | public class TestingConfigManager 8 | implements ConfigManager { 9 | Table table; 10 | 11 | public TestingConfigManager() { 12 | this.table = HashBasedTable.create(); 13 | } 14 | 15 | @Override 16 | public synchronized T getConfig(String project, String configName, Class clazz) { 17 | return (T) table.get(project, configName); 18 | } 19 | 20 | @Override 21 | public synchronized void setConfig(String project, String configName, T clazz) { 22 | if (clazz == null) { 23 | table.remove(project, configName); 24 | } else { 25 | table.put(project, configName, clazz); 26 | } 27 | } 28 | 29 | @Override 30 | public synchronized T setConfigOnce(String project, String configName, T clazz) { 31 | Object o = table.row(project).putIfAbsent(configName, clazz); 32 | return o == null ? clazz : (T) o; 33 | } 34 | 35 | @Override 36 | public void clear() { 37 | table.clear(); 38 | } 39 | 40 | public Table getTable() { 41 | return table; 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /rakam-spi/src/main/java/org/rakam/analysis/ApiKeyService.java: -------------------------------------------------------------------------------- 1 | package org.rakam.analysis; 2 | 3 | import com.fasterxml.jackson.annotation.JsonCreator; 4 | import com.fasterxml.jackson.annotation.JsonProperty; 5 | import com.google.auto.value.AutoValue; 6 | 7 | import javax.annotation.Nullable; 8 | 9 | public interface ApiKeyService { 10 | ProjectApiKeys createApiKeys(String project); 11 | 12 | String getProjectOfApiKey(String apiKey, AccessKeyType type); 13 | 14 | Key getProjectKey(int apiId, AccessKeyType type); 15 | 16 | class Key { 17 | public final String project; 18 | public final String key; 19 | 20 | public Key(String project, String key) { 21 | this.project = project; 22 | this.key = key; 23 | } 24 | } 25 | 26 | void revokeApiKeys(String project, String masterKey); 27 | 28 | void revokeAllKeys(String project); 29 | 30 | default void setup() { 31 | } 32 | 33 | enum AccessKeyType { 34 | MASTER_KEY("master_key"), WRITE_KEY("write_key"); 35 | 36 | private final String key; 37 | 38 | AccessKeyType(String key) { 39 | this.key = key; 40 | } 41 | 42 | public static AccessKeyType fromKey(String key) { 43 | for (AccessKeyType accessKeyType : values()) { 44 | if (accessKeyType.getKey().equals(key)) { 45 | return accessKeyType; 46 | } 47 | } 48 | throw new IllegalArgumentException(key + " doesn't exist."); 49 | } 50 | 51 | public String getKey() { 52 | return key; 53 | } 54 | } 55 | 56 | @AutoValue 57 | abstract class ProjectApiKeys { 58 | @JsonCreator 59 | public static ProjectApiKeys create( 60 | @JsonProperty("master_key") String masterKey, 61 | @JsonProperty("write_key") String writeKey) { 62 | return new AutoValue_ApiKeyService_ProjectApiKeys(masterKey, writeKey); 63 | } 64 | 65 | @Nullable 66 | @JsonProperty("master_key") 67 | public abstract String masterKey(); 68 | 69 | @Nullable 70 | @JsonProperty("write_key") 71 | public abstract String writeKey(); 72 | 73 | public String getKey(AccessKeyType accessKeyType) { 74 | switch (accessKeyType) { 75 | case WRITE_KEY: 76 | return writeKey(); 77 | case MASTER_KEY: 78 | return masterKey(); 79 | default: 80 | throw new IllegalStateException(); 81 | } 82 | } 83 | } 84 | } 85 | -------------------------------------------------------------------------------- /rakam-spi/src/main/java/org/rakam/analysis/ConfigManager.java: -------------------------------------------------------------------------------- 1 | package org.rakam.analysis; 2 | 3 | import com.google.common.annotations.VisibleForTesting; 4 | 5 | import javax.validation.constraints.NotNull; 6 | 7 | public interface ConfigManager { 8 | default void setup() { 9 | } 10 | 11 | T getConfig(String project, String configName, Class clazz); 12 | 13 | void setConfig(String project, String configName, @NotNull T value); 14 | 15 | T setConfigOnce(String project, String configName, @NotNull T clazz); 16 | 17 | @VisibleForTesting 18 | void clear(); 19 | } 20 | -------------------------------------------------------------------------------- /rakam-spi/src/main/java/org/rakam/analysis/CustomParameter.java: -------------------------------------------------------------------------------- 1 | package org.rakam.analysis; 2 | 3 | import org.rakam.server.http.HttpServerBuilder; 4 | 5 | public class CustomParameter { 6 | public final String parameterName; 7 | public final HttpServerBuilder.IRequestParameterFactory factory; 8 | 9 | public CustomParameter(String parameterName, HttpServerBuilder.IRequestParameterFactory factory) { 10 | this.parameterName = parameterName; 11 | this.factory = factory; 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /rakam-spi/src/main/java/org/rakam/analysis/EscapeIdentifier.java: -------------------------------------------------------------------------------- 1 | package org.rakam.analysis; 2 | 3 | import com.google.inject.BindingAnnotation; 4 | 5 | import java.lang.annotation.Retention; 6 | import java.lang.annotation.RetentionPolicy; 7 | 8 | @BindingAnnotation 9 | @Retention(RetentionPolicy.RUNTIME) 10 | public @interface EscapeIdentifier { 11 | } 12 | 13 | -------------------------------------------------------------------------------- /rakam-spi/src/main/java/org/rakam/analysis/InMemoryApiKeyService.java: -------------------------------------------------------------------------------- 1 | package org.rakam.analysis; 2 | 3 | import com.google.common.collect.ImmutableList; 4 | 5 | import java.util.*; 6 | import java.util.concurrent.ConcurrentHashMap; 7 | 8 | public class InMemoryApiKeyService implements ApiKeyService { 9 | private final Map> apiKeys = new ConcurrentHashMap<>(); 10 | 11 | @Override 12 | public synchronized ProjectApiKeys createApiKeys(String project) { 13 | List keys = apiKeys.computeIfAbsent(project, p -> new ArrayList<>()); 14 | 15 | ProjectApiKeys projectApiKeys = ProjectApiKeys.create(UUID.randomUUID().toString(), UUID.randomUUID().toString()); 16 | keys.add(projectApiKeys); 17 | return projectApiKeys; 18 | } 19 | 20 | @Override 21 | public String getProjectOfApiKey(String apiKey, AccessKeyType type) { 22 | Optional project = apiKeys.entrySet().stream() 23 | .filter(e -> e.getValue().stream() 24 | .anyMatch(a -> a.getKey(type).equals(apiKey))) 25 | .findAny().map(e -> e.getKey()); 26 | if (!project.isPresent()) { 27 | throw new IllegalStateException(); 28 | } 29 | return project.get(); 30 | } 31 | 32 | @Override 33 | public Key getProjectKey(int apiId, AccessKeyType type) { 34 | throw new UnsupportedOperationException(); 35 | } 36 | 37 | @Override 38 | public void revokeApiKeys(String project, String masterKey) { 39 | apiKeys.getOrDefault(project, ImmutableList.of()) 40 | .removeIf(e -> e.masterKey().equals(masterKey)); 41 | } 42 | 43 | private String getKey(ProjectApiKeys keys, AccessKeyType type) { 44 | switch (type) { 45 | case MASTER_KEY: 46 | return keys.masterKey(); 47 | case WRITE_KEY: 48 | return keys.writeKey(); 49 | default: 50 | throw new IllegalStateException(); 51 | } 52 | } 53 | 54 | 55 | @Override 56 | public void revokeAllKeys(String project) { 57 | apiKeys.remove(project); 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /rakam-spi/src/main/java/org/rakam/analysis/InMemoryEventStore.java: -------------------------------------------------------------------------------- 1 | package org.rakam.analysis; 2 | 3 | import org.rakam.collection.Event; 4 | import org.rakam.plugin.EventStore; 5 | import org.rakam.plugin.SyncEventStore; 6 | 7 | import java.util.ArrayList; 8 | import java.util.List; 9 | 10 | public class InMemoryEventStore implements SyncEventStore { 11 | private final List events = new ArrayList<>(); 12 | 13 | @Override 14 | public synchronized void store(Event event) { 15 | events.add(event); 16 | } 17 | 18 | @Override 19 | public int[] storeBatch(List events) { 20 | this.events.addAll(events); 21 | return EventStore.SUCCESSFUL_BATCH; 22 | } 23 | 24 | public void clear() { 25 | this.events.clear(); 26 | } 27 | 28 | public List getEvents() { 29 | return events; 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /rakam-spi/src/main/java/org/rakam/analysis/InMemoryMetastore.java: -------------------------------------------------------------------------------- 1 | package org.rakam.analysis; 2 | 3 | import com.google.common.collect.ImmutableList; 4 | import com.google.common.collect.ImmutableMap; 5 | import com.google.common.eventbus.EventBus; 6 | import org.rakam.analysis.metadata.AbstractMetastore; 7 | import org.rakam.collection.SchemaField; 8 | import org.rakam.util.NotExistsException; 9 | import org.rakam.util.ValidationUtil; 10 | 11 | import java.util.*; 12 | import java.util.concurrent.ConcurrentHashMap; 13 | 14 | public class InMemoryMetastore extends AbstractMetastore { 15 | private final Map>> collections = new ConcurrentHashMap<>(); 16 | private final ApiKeyService apiKeyService; 17 | 18 | public InMemoryMetastore(ApiKeyService apiKeyService) { 19 | super(new EventBus()); 20 | this.apiKeyService = apiKeyService; 21 | } 22 | 23 | public InMemoryMetastore(ApiKeyService apiKeyService, EventBus eventBus) { 24 | super(eventBus); 25 | this.apiKeyService = apiKeyService; 26 | } 27 | 28 | @Override 29 | public Map> getCollections(String project) { 30 | return collections.get(project); 31 | } 32 | 33 | @Override 34 | public Set getCollectionNames(String project) { 35 | return collections.get(project).keySet(); 36 | } 37 | 38 | 39 | @Override 40 | public void createProject(String project) { 41 | collections.put(project, new HashMap<>()); 42 | } 43 | 44 | @Override 45 | public Set getProjects() { 46 | return collections.keySet(); 47 | } 48 | 49 | @Override 50 | public List getCollection(String project, String collection) { 51 | return collections.getOrDefault(project, ImmutableMap.of()).getOrDefault(collection, ImmutableList.of()); 52 | } 53 | 54 | @Override 55 | public synchronized List getOrCreateCollectionFields(String project, String collection, Set fields) throws NotExistsException { 56 | ValidationUtil.checkCollectionValid(collection); 57 | 58 | Map> list = collections.get(project); 59 | if (list == null) { 60 | throw new NotExistsException("Project"); 61 | } 62 | List schemaFields = list.computeIfAbsent(collection, (key) -> new ArrayList<>()); 63 | fields.stream() 64 | .filter(field -> !schemaFields.stream().anyMatch(f -> f.getName().equals(field.getName()))) 65 | .forEach(schemaFields::add); 66 | return schemaFields; 67 | } 68 | 69 | @Override 70 | public void deleteProject(String project) { 71 | collections.remove(project); 72 | apiKeyService.revokeAllKeys(project); 73 | super.onDeleteProject(project); 74 | } 75 | 76 | } 77 | -------------------------------------------------------------------------------- /rakam-spi/src/main/java/org/rakam/analysis/InternalConfig.java: -------------------------------------------------------------------------------- 1 | package org.rakam.analysis; 2 | 3 | import org.rakam.collection.FieldType; 4 | 5 | public enum InternalConfig { 6 | USER_TYPE(FieldType.STRING, false), FIXED_SCHEMA(FieldType.BOOLEAN, true); 7 | 8 | private final FieldType type; 9 | private final boolean dynamic; 10 | 11 | InternalConfig(FieldType type, boolean dynamic) { 12 | this.type = type; 13 | this.dynamic = dynamic; 14 | } 15 | 16 | public FieldType getType() { 17 | return type; 18 | } 19 | 20 | public boolean isDynamic() { 21 | return dynamic; 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /rakam-spi/src/main/java/org/rakam/analysis/RequestContext.java: -------------------------------------------------------------------------------- 1 | package org.rakam.analysis; 2 | 3 | import org.rakam.Access; 4 | 5 | public class RequestContext { 6 | public final String project; 7 | public final String apiKey; 8 | public final Access access; 9 | 10 | public RequestContext(String project) { 11 | this(project, null, null); 12 | } 13 | 14 | public RequestContext(String project, String apiKey) { 15 | this(project, apiKey, null); 16 | } 17 | 18 | public RequestContext(String project, String apiKey, Access access) { 19 | this.project = project; 20 | this.apiKey = apiKey; 21 | this.access = access; 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /rakam-spi/src/main/java/org/rakam/analysis/RequestPreProcessorItem.java: -------------------------------------------------------------------------------- 1 | package org.rakam.analysis; 2 | 3 | import org.rakam.server.http.RequestPreprocessor; 4 | 5 | import java.lang.reflect.Method; 6 | import java.util.function.Predicate; 7 | 8 | public class RequestPreProcessorItem { 9 | public final Predicate predicate; 10 | public final RequestPreprocessor processor; 11 | 12 | public RequestPreProcessorItem(Predicate predicate, RequestPreprocessor processor) { 13 | this.predicate = predicate; 14 | this.processor = processor; 15 | } 16 | } -------------------------------------------------------------------------------- /rakam-spi/src/main/java/org/rakam/analysis/metadata/AbstractMetastore.java: -------------------------------------------------------------------------------- 1 | package org.rakam.analysis.metadata; 2 | 3 | import com.google.common.eventbus.EventBus; 4 | import org.rakam.collection.SchemaField; 5 | import org.rakam.plugin.SystemEvents; 6 | import org.rakam.plugin.SystemEvents.ProjectCreatedEvent; 7 | 8 | import java.util.Collection; 9 | import java.util.List; 10 | import java.util.Map; 11 | import java.util.Set; 12 | import java.util.stream.Collectors; 13 | 14 | public abstract class AbstractMetastore 15 | implements Metastore { 16 | private final EventBus eventBus; 17 | 18 | public AbstractMetastore(EventBus eventBus) { 19 | this.eventBus = eventBus; 20 | } 21 | 22 | protected void onCreateProject(String project) { 23 | eventBus.post(new ProjectCreatedEvent(project)); 24 | } 25 | 26 | protected void onDeleteProject(String project) { 27 | eventBus.post(new SystemEvents.ProjectDeletedEvent(project)); 28 | } 29 | 30 | protected void onCreateCollection(String project, String collection, List fields) { 31 | eventBus.post(new SystemEvents.CollectionCreatedEvent(project, collection, fields)); 32 | } 33 | 34 | protected void onCreateCollectionField(String project, String collection, List fields) { 35 | eventBus.post(new SystemEvents.CollectionFieldCreatedEvent(project, collection, fields)); 36 | } 37 | 38 | public abstract List getOrCreateCollectionFields(String project, String collection, Set fields); 39 | } 40 | -------------------------------------------------------------------------------- /rakam-spi/src/main/java/org/rakam/analysis/metadata/Metastore.java: -------------------------------------------------------------------------------- 1 | package org.rakam.analysis.metadata; 2 | 3 | import org.rakam.collection.SchemaField; 4 | import org.rakam.util.NotExistsException; 5 | 6 | import java.time.LocalDate; 7 | import java.util.*; 8 | import java.util.concurrent.CompletableFuture; 9 | 10 | 11 | public interface Metastore { 12 | Map> getCollections(String project); 13 | 14 | Set getCollectionNames(String project); 15 | 16 | void createProject(String project); 17 | 18 | Set getProjects(); 19 | 20 | List getCollection(String project, String collection); 21 | 22 | List getOrCreateCollectionFields(String project, String collection, Set fields) throws NotExistsException; 23 | 24 | void deleteProject(String project); 25 | 26 | default void setup() { 27 | } 28 | } -------------------------------------------------------------------------------- /rakam-spi/src/main/java/org/rakam/collection/EventList.java: -------------------------------------------------------------------------------- 1 | package org.rakam.collection; 2 | 3 | import com.fasterxml.jackson.annotation.JsonCreator; 4 | import com.fasterxml.jackson.annotation.JsonPropertyOrder; 5 | import org.rakam.server.http.annotations.ApiParam; 6 | 7 | import java.util.List; 8 | 9 | import static com.google.common.base.Preconditions.checkNotNull; 10 | 11 | @JsonPropertyOrder({"api", "events"}) 12 | public class EventList { 13 | public final Event.EventContext api; 14 | public final String project; 15 | public final List events; 16 | 17 | @JsonCreator 18 | public EventList(@ApiParam(value = "api", description = "api_key parameter is required in 'api' property") Event.EventContext api, 19 | @ApiParam(value = "events", description = "The list of events") List events) { 20 | this.events = checkNotNull(events, "events parameter is null"); 21 | this.api = checkNotNull(api, "api is null"); 22 | this.project = null; 23 | } 24 | 25 | public EventList(Event.EventContext api, 26 | String project, 27 | List events) { 28 | this.project = checkNotNull(project, "project parameter is null"); 29 | this.events = checkNotNull(events, "events parameter is null"); 30 | this.api = checkNotNull(api, "api is null"); 31 | } 32 | 33 | @Override 34 | public String toString() { 35 | return "EventList{" + 36 | "api=" + api + 37 | ", project='" + project + '\'' + 38 | ", events=" + events + 39 | '}'; 40 | } 41 | 42 | @Override 43 | public boolean equals(Object o) { 44 | if (this == o) return true; 45 | if (!(o instanceof EventList)) return false; 46 | 47 | EventList eventList = (EventList) o; 48 | 49 | if (!api.equals(eventList.api)) return false; 50 | if (!project.equals(eventList.project)) return false; 51 | return events.equals(eventList.events); 52 | 53 | } 54 | 55 | @Override 56 | public int hashCode() { 57 | int result = api.hashCode(); 58 | result = 31 * result + project.hashCode(); 59 | result = 31 * result + events.hashCode(); 60 | return result; 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /rakam-spi/src/main/java/org/rakam/collection/FieldDependencyBuilder.java: -------------------------------------------------------------------------------- 1 | package org.rakam.collection; 2 | 3 | import com.google.common.collect.Lists; 4 | import com.google.common.collect.Maps; 5 | 6 | import java.util.*; 7 | 8 | import static com.google.common.base.Preconditions.checkState; 9 | import static com.google.common.collect.Sets.newHashSet; 10 | 11 | 12 | public class FieldDependencyBuilder { 13 | private final List constantFields = Lists.newArrayList(); 14 | private final Map> dependentFields = Maps.newHashMap(); 15 | 16 | public void addFields(List fields) { 17 | checkFields(fields); 18 | constantFields.addAll(fields); 19 | } 20 | 21 | public void addFields(String dependentField, List fields) { 22 | checkFields(fields); 23 | dependentFields.put(dependentField, fields); 24 | } 25 | 26 | private void checkFields(List fields) { 27 | SchemaField[] collisions = fields.stream() 28 | .filter(newField -> constantFields.stream() 29 | .anyMatch(f -> f.getName().equals(newField.getName()) && !f.getType().equals(newField.getType()))) 30 | .toArray(SchemaField[]::new); 31 | checkState(collisions.length == 0, "Module field collides with existing field that has another type exists: ", Arrays.toString(collisions)); 32 | 33 | collisions = dependentFields.values().stream() 34 | .flatMap(col -> col.stream()) 35 | .filter(field -> fields.stream().anyMatch(f -> f.getName().equals(field.getName()) && !f.getType().equals(field.getType()))) 36 | .toArray(SchemaField[]::new); 37 | 38 | checkState(collisions.length == 0, "Fields already exist in dependency table: ", Arrays.toString(collisions)); 39 | } 40 | 41 | public FieldDependency build() { 42 | return new FieldDependency(newHashSet(constantFields), dependentFields); 43 | } 44 | 45 | public static class FieldDependency { 46 | public final Set constantFields; 47 | public final Map> dependentFields; 48 | 49 | public FieldDependency(Set constantFields, Map> dependentFields) { 50 | this.constantFields = Collections.unmodifiableSet(constantFields); 51 | this.dependentFields = Collections.unmodifiableMap(dependentFields); 52 | } 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /rakam-spi/src/main/java/org/rakam/collection/FieldType.java: -------------------------------------------------------------------------------- 1 | package org.rakam.collection; 2 | 3 | import com.fasterxml.jackson.annotation.JsonCreator; 4 | 5 | 6 | public enum FieldType { 7 | STRING, INTEGER, DECIMAL, DOUBLE, LONG, BOOLEAN, DATE, TIME, TIMESTAMP, BINARY, 8 | ARRAY_STRING, ARRAY_INTEGER, ARRAY_DECIMAL, ARRAY_DOUBLE, ARRAY_LONG, ARRAY_BOOLEAN, ARRAY_DATE, ARRAY_TIME, ARRAY_TIMESTAMP, ARRAY_BINARY, 9 | MAP_STRING, MAP_INTEGER, MAP_DECIMAL, MAP_DOUBLE, MAP_LONG, MAP_BOOLEAN, MAP_DATE, MAP_TIME, MAP_TIMESTAMP, MAP_BINARY; 10 | 11 | private static final FieldType values[] = values(); 12 | 13 | @JsonCreator 14 | public static FieldType fromString(String key) { 15 | return key == null ? null : FieldType.valueOf(key.toUpperCase()); 16 | } 17 | 18 | public boolean isArray() { 19 | return ordinal() > 9 && !isMap(); 20 | } 21 | 22 | public boolean isMap() { 23 | return ordinal() > 19; 24 | } 25 | 26 | public boolean isNumeric() { 27 | return this == INTEGER || this == DECIMAL || this == DOUBLE || this == LONG; 28 | } 29 | 30 | public FieldType getArrayElementType() { 31 | if (!isArray()) { 32 | throw new IllegalStateException("type is not array"); 33 | } 34 | 35 | return values[ordinal() - 10]; 36 | } 37 | 38 | public FieldType getMapValueType() { 39 | if (!isMap()) { 40 | throw new IllegalStateException("type is not map"); 41 | } 42 | 43 | return values[ordinal() - 20]; 44 | } 45 | 46 | public FieldType convertToMapValueType() { 47 | if (isMap()) { 48 | throw new IllegalStateException("type is already a map"); 49 | } 50 | if (isArray()) { 51 | throw new IllegalStateException("type is already a array"); 52 | } 53 | 54 | return values[ordinal() + 20]; 55 | } 56 | 57 | public FieldType convertToArrayType() { 58 | if (ordinal() > 9) { 59 | throw new IllegalStateException("type is already array"); 60 | } 61 | 62 | return values[ordinal() + 10]; 63 | } 64 | 65 | public String getPrettyName() { 66 | if (isArray()) { 67 | return "ARRAY<" + getArrayElementType().toString() + ">"; 68 | } 69 | if (isMap()) { 70 | return "MAP"; 71 | } 72 | return toString(); 73 | } 74 | } 75 | -------------------------------------------------------------------------------- /rakam-spi/src/main/java/org/rakam/collection/SchemaField.java: -------------------------------------------------------------------------------- 1 | package org.rakam.collection; 2 | 3 | import com.fasterxml.jackson.annotation.JsonCreator; 4 | import com.fasterxml.jackson.annotation.JsonProperty; 5 | import com.fasterxml.jackson.databind.annotation.JsonSerialize; 6 | import io.netty.handler.codec.http.HttpResponseStatus; 7 | import org.rakam.server.http.annotations.ApiParam; 8 | import org.rakam.util.RakamException; 9 | import org.rakam.util.ValidationUtil; 10 | 11 | import static org.rakam.util.ValidationUtil.stripName; 12 | 13 | public class SchemaField { 14 | private final String name; 15 | private final FieldType type; 16 | @JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL) 17 | private final String description; 18 | @JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL) 19 | private final String descriptiveName; 20 | @JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL) 21 | private final String category; 22 | 23 | @JsonCreator 24 | public SchemaField(@ApiParam("name") String name, 25 | @ApiParam("type") FieldType type, 26 | @ApiParam(value = "descriptiveName", required = false) String descriptiveName, 27 | @ApiParam(value = "description", required = false) String description, 28 | @ApiParam(value = "category", required = false) String category) { 29 | this.name = ValidationUtil.checkNotNull(name, "name"); 30 | this.type = ValidationUtil.checkNotNull(type, "type"); 31 | this.descriptiveName = descriptiveName; 32 | this.description = description; 33 | this.category = category; 34 | if (this.name.isEmpty()) { 35 | throw new RakamException(String.format("Field name (%s) can't be empty string", this.name), 36 | HttpResponseStatus.BAD_REQUEST); 37 | } 38 | } 39 | 40 | public SchemaField(String name, FieldType type) { 41 | this(name, type, null, null, null); 42 | } 43 | 44 | @JsonProperty 45 | public String getName() { 46 | return name; 47 | } 48 | 49 | @JsonProperty 50 | public FieldType getType() { 51 | return type; 52 | } 53 | 54 | public String getCategory() { 55 | return category; 56 | } 57 | 58 | @JsonProperty 59 | public String getDescriptiveName() { 60 | if (descriptiveName == null) { 61 | String replace = name.replace("_", " ").trim(); 62 | return Character.toUpperCase(replace.charAt(0)) + replace.substring(1); 63 | } 64 | return descriptiveName; 65 | } 66 | 67 | @JsonProperty 68 | @JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL) 69 | public String getDescription() { 70 | return description; 71 | } 72 | 73 | @Override 74 | public String toString() { 75 | return "SchemaField{" + 76 | "name='" + name + '\'' + 77 | ", type=" + type + 78 | '}'; 79 | } 80 | 81 | @Override 82 | public boolean equals(Object o) { 83 | if (this == o) { 84 | return true; 85 | } 86 | if (!(o instanceof SchemaField)) { 87 | return false; 88 | } 89 | 90 | SchemaField that = (SchemaField) o; 91 | 92 | if (!name.equals(that.name)) { 93 | return false; 94 | } 95 | if (type != that.type) { 96 | return false; 97 | } 98 | 99 | return true; 100 | } 101 | 102 | @Override 103 | public int hashCode() { 104 | int result = name.hashCode(); 105 | result = 31 * result + type.hashCode(); 106 | return result; 107 | } 108 | } 109 | -------------------------------------------------------------------------------- /rakam-spi/src/main/java/org/rakam/config/EncryptionConfig.java: -------------------------------------------------------------------------------- 1 | package org.rakam.config; 2 | 3 | import io.airlift.configuration.Config; 4 | import io.airlift.configuration.ConfigDescription; 5 | import org.rakam.util.CryptUtil; 6 | 7 | public class EncryptionConfig { 8 | private String secretKey = CryptUtil.generateRandomKey(70); 9 | 10 | public String getSecretKey() { 11 | return secretKey; 12 | } 13 | 14 | @Config("secret-key") 15 | @ConfigDescription("The secret key that will be used when encrypting sessions and passwords. " + 16 | "Do not expose this key because if it's known, the sessions may be hijacked. " + 17 | "If you don't set a secret key, it will be generated randomly for every restart.") 18 | public EncryptionConfig setSecretKey(String secretKey) { 19 | this.secretKey = secretKey; 20 | return this; 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /rakam-spi/src/main/java/org/rakam/config/MetadataConfig.java: -------------------------------------------------------------------------------- 1 | package org.rakam.config; 2 | 3 | import io.airlift.configuration.Config; 4 | import io.swagger.models.ExternalDocs; 5 | 6 | public class MetadataConfig { 7 | public final static ExternalDocs centralDocs = new ExternalDocs().url("https://rakam.io/doc").description("Rakam Documentation"); 8 | 9 | private String metastore; 10 | private String eventStore; 11 | private String reportMetastore; 12 | private boolean enableDynamic = true; 13 | 14 | public boolean getEnableDynamic() { 15 | return enableDynamic; 16 | } 17 | 18 | @Config("event.schema.enable-dynamic") 19 | public MetadataConfig setEnableDynamic(boolean enableDynamic) { 20 | this.enableDynamic = enableDynamic; 21 | return this; 22 | } 23 | 24 | public String getReportMetastore() { 25 | return reportMetastore; 26 | } 27 | 28 | @Config("report.metadata.store") 29 | public MetadataConfig setReportMetastore(String store) { 30 | this.reportMetastore = store; 31 | return this; 32 | } 33 | 34 | public String getMetastore() { 35 | return metastore; 36 | } 37 | 38 | @Config("event.schema.store") 39 | public MetadataConfig setMetastore(String store) { 40 | this.metastore = store; 41 | return this; 42 | } 43 | 44 | public String getEventStore() { 45 | return eventStore; 46 | } 47 | 48 | @Config("event.store") 49 | public MetadataConfig setEventStore(String eventStore) { 50 | this.eventStore = eventStore; 51 | return this; 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /rakam-spi/src/main/java/org/rakam/config/ProjectConfig.java: -------------------------------------------------------------------------------- 1 | package org.rakam.config; 2 | 3 | import com.google.common.base.Splitter; 4 | import io.airlift.configuration.Config; 5 | import io.airlift.configuration.ConfigDescription; 6 | import io.airlift.log.Logger; 7 | 8 | import java.util.List; 9 | 10 | public class ProjectConfig { 11 | private String lockKey; 12 | private String passphrase; 13 | private String timeColumn = "_time"; 14 | private String clientTimeColumn = "_client_time"; 15 | private String userColumn = "_user"; 16 | private String companyName; 17 | private int maxStringLength = 100; 18 | private boolean allowProjectDeletion; 19 | private List excludeEvents; 20 | 21 | public String getLockKey() { 22 | return lockKey; 23 | } 24 | 25 | @Config("lock-key") 26 | @ConfigDescription("A key that is required only for creating projects") 27 | public ProjectConfig setLockKey(String lockKey) { 28 | this.lockKey = lockKey != null && lockKey.isEmpty() ? null : lockKey; 29 | return this; 30 | } 31 | 32 | public String getPassphrase() { 33 | return passphrase; 34 | } 35 | 36 | @Config("passphrase") 37 | public ProjectConfig setPassphrase(String passphrase) { 38 | this.passphrase = passphrase != null && passphrase.isEmpty() ? null : passphrase; 39 | return this; 40 | } 41 | 42 | public String getTimeColumn() { 43 | return timeColumn; 44 | } 45 | 46 | @Config("time-column") 47 | public ProjectConfig setTimeColumn(String timeColumn) { 48 | this.timeColumn = timeColumn; 49 | return this; 50 | } 51 | 52 | public String getClientTimeColumn() { 53 | return clientTimeColumn; 54 | } 55 | 56 | @Config("client-time-column") 57 | public ProjectConfig setClientTimeColumn(String timeColumn) { 58 | this.clientTimeColumn = timeColumn; 59 | return this; 60 | } 61 | 62 | public String getUserColumn() { 63 | return userColumn; 64 | } 65 | 66 | @Config("user-column") 67 | public ProjectConfig setUserColumn(String userColumn) { 68 | this.userColumn = userColumn; 69 | return this; 70 | } 71 | 72 | public String getCompanyName() { 73 | return companyName; 74 | } 75 | 76 | @Config("company-name") 77 | public ProjectConfig setCompanyName(String companyName) { 78 | this.companyName = companyName; 79 | return this; 80 | } 81 | 82 | @Config("exclude-events") 83 | public ProjectConfig setExcludeEvents(String excludeEvents) { 84 | this.excludeEvents = Splitter.on(",").splitToList(excludeEvents); 85 | return this; 86 | } 87 | 88 | public List getExcludeEvents() { 89 | return excludeEvents; 90 | } 91 | 92 | public int getMaxStringLength() { 93 | return maxStringLength; 94 | } 95 | 96 | @Config("collection.max-string-length") 97 | public ProjectConfig setMaxStringLength(int maxStringLength) { 98 | this.maxStringLength = maxStringLength; 99 | return this; 100 | } 101 | 102 | public boolean getAllowProjectDeletion() { 103 | return allowProjectDeletion; 104 | } 105 | 106 | @Config("allow-project-deletion") 107 | public ProjectConfig setAllowProjectDeletion(boolean allowProjectDeletion) { 108 | this.allowProjectDeletion = allowProjectDeletion; 109 | return this; 110 | } 111 | } 112 | -------------------------------------------------------------------------------- /rakam-spi/src/main/java/org/rakam/config/TaskConfig.java: -------------------------------------------------------------------------------- 1 | package org.rakam.config; 2 | 3 | import io.airlift.configuration.Config; 4 | 5 | public class TaskConfig { 6 | public boolean tasksEnabled; 7 | private boolean webhookEnabled; 8 | 9 | public boolean getTasksEnabled() { 10 | return tasksEnabled; 11 | } 12 | 13 | @Config("tasks.enable") 14 | public TaskConfig setTasksEnabled(boolean enabled) { 15 | this.tasksEnabled = enabled; 16 | return this; 17 | } 18 | 19 | public boolean getWebhookEnabled() { 20 | return webhookEnabled; 21 | } 22 | 23 | @Config("webhook.enable") 24 | public TaskConfig setWebhookEnabled(boolean enabled) { 25 | this.webhookEnabled = enabled; 26 | return this; 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /rakam-spi/src/main/java/org/rakam/plugin/DummyEventStore.java: -------------------------------------------------------------------------------- 1 | package org.rakam.plugin; 2 | 3 | import com.google.auto.service.AutoService; 4 | import com.google.inject.Binder; 5 | import org.rakam.collection.Event; 6 | import org.rakam.util.ConditionalModule; 7 | import org.rakam.util.RakamException; 8 | 9 | import java.util.List; 10 | 11 | import static io.netty.handler.codec.http.HttpResponseStatus.NOT_IMPLEMENTED; 12 | 13 | public class DummyEventStore 14 | implements SyncEventStore { 15 | @Override 16 | public void store(Event event) { 17 | throw new RakamException(NOT_IMPLEMENTED); 18 | } 19 | 20 | @Override 21 | public int[] storeBatch(List events) { 22 | throw new RakamException(NOT_IMPLEMENTED); 23 | } 24 | 25 | @AutoService(RakamModule.class) 26 | @ConditionalModule(config = "event.store", value = "dummy") 27 | public static class DummyEventStoreModule 28 | extends RakamModule { 29 | 30 | @Override 31 | protected void setup(Binder binder) { 32 | binder.bind(EventStore.class).to(DummyEventStore.class); 33 | } 34 | 35 | @Override 36 | public String name() { 37 | return "Dummy Event Store"; 38 | } 39 | 40 | @Override 41 | public String description() { 42 | return "Used if no event store implementation exists"; 43 | } 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /rakam-spi/src/main/java/org/rakam/plugin/EventMapper.java: -------------------------------------------------------------------------------- 1 | package org.rakam.plugin; 2 | 3 | import com.google.common.collect.ImmutableList; 4 | import io.netty.handler.codec.http.HttpHeaders; 5 | import io.netty.handler.codec.http.cookie.Cookie; 6 | import org.rakam.collection.Event; 7 | import org.rakam.collection.EventList; 8 | import org.rakam.collection.FieldDependencyBuilder; 9 | 10 | import java.net.InetAddress; 11 | import java.util.ArrayList; 12 | import java.util.Collection; 13 | import java.util.List; 14 | import java.util.concurrent.CompletableFuture; 15 | 16 | public interface EventMapper { 17 | CompletableFuture> COMPLETED_EMPTY_FUTURE = CompletableFuture.completedFuture(null); 18 | 19 | CompletableFuture> mapAsync(Event event, RequestParams requestParams, InetAddress sourceAddress, HttpHeaders responseHeaders); 20 | 21 | default CompletableFuture> mapAsync(EventList events, RequestParams requestParams, InetAddress sourceAddress, HttpHeaders responseHeaders) { 22 | List cookies = new ArrayList<>(); 23 | CompletableFuture[] futures = null; 24 | int futureIndex = 0; 25 | for (int i = 0; i < events.events.size(); i++) { 26 | Event event = events.events.get(i); 27 | CompletableFuture> map = mapAsync(event, requestParams, sourceAddress, responseHeaders); 28 | if (map == null || map.equals(COMPLETED_EMPTY_FUTURE)) { 29 | continue; 30 | } 31 | 32 | CompletableFuture> future = map.thenApply(value -> { 33 | if (value != null) { 34 | cookies.addAll(value); 35 | } 36 | return cookies; 37 | }); 38 | 39 | if (futures == null) { 40 | futures = new CompletableFuture[events.events.size() - i]; 41 | } 42 | 43 | futures[futureIndex++] = future; 44 | } 45 | 46 | if (futures == null) { 47 | return COMPLETED_EMPTY_FUTURE; 48 | } else { 49 | return CompletableFuture.allOf(futures).thenApply(val -> cookies); 50 | } 51 | } 52 | 53 | default void addFieldDependency(FieldDependencyBuilder builder) { 54 | } 55 | 56 | default void init() { 57 | } 58 | 59 | interface RequestParams { 60 | RequestParams EMPTY_PARAMS = new RequestParams() { 61 | @Override 62 | public Collection cookies() { 63 | return ImmutableList.of(); 64 | } 65 | 66 | @Override 67 | public HttpHeaders headers() { 68 | return HttpHeaders.EMPTY_HEADERS; 69 | } 70 | }; 71 | 72 | default Collection cookies() { 73 | return ImmutableList.of(); 74 | } 75 | 76 | HttpHeaders headers(); 77 | } 78 | } -------------------------------------------------------------------------------- /rakam-spi/src/main/java/org/rakam/plugin/EventStore.java: -------------------------------------------------------------------------------- 1 | package org.rakam.plugin; 2 | 3 | import com.fasterxml.jackson.annotation.JsonCreator; 4 | import com.fasterxml.jackson.annotation.JsonProperty; 5 | import org.rakam.collection.Event; 6 | 7 | import java.util.List; 8 | import java.util.concurrent.CompletableFuture; 9 | 10 | public interface EventStore { 11 | int[] SUCCESSFUL_BATCH = new int[0]; 12 | CompletableFuture COMPLETED_FUTURE = CompletableFuture.completedFuture(null); 13 | CompletableFuture COMPLETED_FUTURE_BATCH = CompletableFuture.completedFuture(new int[]{}); 14 | 15 | default void store(Event event) { 16 | storeAsync(event).join(); 17 | } 18 | 19 | default int[] storeBatch(List events) { 20 | return storeBatchAsync(events).join(); 21 | } 22 | 23 | CompletableFuture storeBatchAsync(List events); 24 | 25 | CompletableFuture storeAsync(Event event); 26 | 27 | default void storeBulk(List events) { 28 | if (events.isEmpty()) { 29 | return; 30 | } 31 | storeBatch(events); 32 | } 33 | 34 | enum CopyType { 35 | AVRO, CSV, JSON; 36 | 37 | @JsonCreator 38 | public static CopyType get(String name) { 39 | return valueOf(name.toUpperCase()); 40 | } 41 | 42 | @JsonProperty 43 | public String value() { 44 | return name(); 45 | } 46 | } 47 | 48 | enum CompressionType { 49 | GZIP; 50 | 51 | @JsonCreator 52 | public static CompressionType get(String name) { 53 | return valueOf(name.toUpperCase()); 54 | } 55 | 56 | @JsonProperty 57 | public String value() { 58 | return name(); 59 | } 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /rakam-spi/src/main/java/org/rakam/plugin/InjectionHook.java: -------------------------------------------------------------------------------- 1 | package org.rakam.plugin; 2 | 3 | @FunctionalInterface 4 | public interface InjectionHook { 5 | void call(); 6 | } 7 | -------------------------------------------------------------------------------- /rakam-spi/src/main/java/org/rakam/plugin/Parameter.java: -------------------------------------------------------------------------------- 1 | package org.rakam.plugin; 2 | 3 | import com.fasterxml.jackson.annotation.JsonCreator; 4 | import com.fasterxml.jackson.annotation.JsonInclude; 5 | import org.rakam.collection.FieldType; 6 | import org.rakam.server.http.annotations.ApiParam; 7 | 8 | import java.util.List; 9 | 10 | public class Parameter { 11 | public final FieldType type; 12 | @JsonInclude(JsonInclude.Include.NON_NULL) 13 | public final String placeholder; 14 | @JsonInclude(JsonInclude.Include.NON_NULL) 15 | public final boolean required; 16 | public final String description; 17 | @JsonInclude(JsonInclude.Include.NON_NULL) 18 | public final List choices; 19 | public final boolean hidden; 20 | public Object value; 21 | 22 | @JsonCreator 23 | public Parameter( 24 | @ApiParam("type") FieldType type, 25 | @ApiParam(value = "value", required = false) Object value, 26 | @ApiParam(value = "required", required = false) Boolean required, 27 | @ApiParam(value = "placeholder", required = false) String placeholder, 28 | @ApiParam(value = "choices", required = false) List choices, 29 | @ApiParam(value = "description", required = false) String description, 30 | @ApiParam(value = "hidden", required = false) Boolean hidden) { 31 | this.type = type; 32 | this.required = Boolean.TRUE.equals(required); 33 | this.value = value; 34 | this.placeholder = placeholder; 35 | this.choices = choices; 36 | this.description = description; 37 | this.hidden = Boolean.TRUE.equals(hidden); 38 | } 39 | 40 | public static class Choice { 41 | public final String key; 42 | public final String value; 43 | 44 | @JsonCreator 45 | public Choice(@ApiParam("key") String key, @ApiParam("value") String value) { 46 | this.key = key; 47 | this.value = value; 48 | } 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /rakam-spi/src/main/java/org/rakam/plugin/ProjectItem.java: -------------------------------------------------------------------------------- 1 | package org.rakam.plugin; 2 | 3 | public interface ProjectItem { 4 | String project(); 5 | } 6 | -------------------------------------------------------------------------------- /rakam-spi/src/main/java/org/rakam/plugin/RakamModule.java: -------------------------------------------------------------------------------- 1 | package org.rakam.plugin; 2 | 3 | import com.google.inject.Binder; 4 | import com.google.inject.name.Names; 5 | import io.airlift.configuration.ConfigDefaults; 6 | import io.airlift.configuration.ConfigurationAwareModule; 7 | import io.airlift.configuration.ConfigurationFactory; 8 | import org.rakam.util.ConditionalModule; 9 | 10 | import javax.validation.constraints.NotNull; 11 | import java.lang.reflect.Field; 12 | import java.lang.reflect.InvocationTargetException; 13 | import java.lang.reflect.Method; 14 | import java.util.Objects; 15 | import java.util.Optional; 16 | 17 | import static com.google.common.base.Preconditions.checkNotNull; 18 | import static com.google.common.base.Preconditions.checkState; 19 | import static io.airlift.configuration.ConfigBinder.configBinder; 20 | 21 | public abstract class RakamModule 22 | implements ConfigurationAwareModule { 23 | private ConfigurationFactory configurationFactory; 24 | private Binder binder; 25 | 26 | @Override 27 | public synchronized void setConfigurationFactory(ConfigurationFactory configurationFactory) { 28 | this.configurationFactory = checkNotNull(configurationFactory, "configurationFactory is null"); 29 | } 30 | 31 | @Override 32 | public final synchronized void configure(Binder binder) { 33 | checkState(this.binder == null, "re-entry not allowed"); 34 | this.binder = checkNotNull(binder, "binder is null"); 35 | 36 | try { 37 | ConditionalModule annotation = this.getClass().getAnnotation(ConditionalModule.class); 38 | if (annotation != null) { 39 | configurationFactory.consumeProperty(annotation.config()); 40 | String value = Optional.ofNullable(configurationFactory.getProperties().get(annotation.config())) 41 | .map(v -> v.trim()).orElse(null); 42 | if (!Objects.equals(annotation.value(), value)) { 43 | if(!(annotation.value().equals("") && value != null)) { 44 | return; 45 | } 46 | } 47 | } 48 | 49 | setup(binder); 50 | } finally { 51 | this.binder = null; 52 | } 53 | } 54 | 55 | protected synchronized T buildConfigObject(Class configClass) { 56 | configBinder(binder).bindConfig(configClass); 57 | return configurationFactory.build(configClass); 58 | } 59 | 60 | protected synchronized String getConfig(String config) { 61 | String value = configurationFactory.getProperties().get(config); 62 | configurationFactory.consumeProperty(config); 63 | return value; 64 | } 65 | 66 | protected synchronized T buildConfigObject(Class configClass, String prefix) { 67 | configBinder(binder).bindConfig(configClass, 68 | prefix != null ? Names.named(prefix) : null, prefix); 69 | try { 70 | Method method = configurationFactory.getClass().getDeclaredMethod("build", Class.class, Optional.class, ConfigDefaults.class); 71 | method.setAccessible(true); 72 | Object invoke = method.invoke(configurationFactory, configClass, Optional.of(prefix), ConfigDefaults.noDefaults()); 73 | Field instance = invoke.getClass().getDeclaredField("instance"); 74 | instance.setAccessible(true); 75 | return (T) instance.get(invoke); 76 | } catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException | NoSuchFieldException e) { 77 | throw new IllegalStateException("Internal error related to airlift.configuration library", e); 78 | } 79 | } 80 | 81 | protected synchronized void install(RakamModule module) { 82 | module.setConfigurationFactory(configurationFactory); 83 | binder.install(module); 84 | } 85 | 86 | protected abstract void setup(Binder binder); 87 | 88 | @NotNull 89 | public abstract String name(); 90 | 91 | public abstract String description(); 92 | } -------------------------------------------------------------------------------- /rakam-spi/src/main/java/org/rakam/plugin/SyncEventMapper.java: -------------------------------------------------------------------------------- 1 | package org.rakam.plugin; 2 | 3 | import io.netty.handler.codec.http.HttpHeaders; 4 | import io.netty.handler.codec.http.cookie.Cookie; 5 | import org.rakam.collection.Event; 6 | import org.rakam.collection.EventList; 7 | 8 | import java.net.InetAddress; 9 | import java.util.ArrayList; 10 | import java.util.List; 11 | import java.util.concurrent.CompletableFuture; 12 | 13 | public interface SyncEventMapper 14 | extends EventMapper { 15 | default List map(EventList events, RequestParams requestParams, InetAddress sourceAddress, HttpHeaders responseHeaders) { 16 | List cookies = null; 17 | for (Event event : events.events) { 18 | List map = map(event, requestParams, sourceAddress, responseHeaders); 19 | if (map != null) { 20 | if (cookies == null) { 21 | cookies = new ArrayList<>(); 22 | } 23 | cookies.addAll(map); 24 | } 25 | } 26 | return cookies; 27 | } 28 | 29 | List map(Event event, RequestParams requestParams, InetAddress sourceAddress, HttpHeaders responseHeaders); 30 | 31 | default CompletableFuture> mapAsync(Event event, RequestParams requestParams, InetAddress sourceAddress, HttpHeaders responseHeaders) { 32 | List map = map(event, requestParams, sourceAddress, responseHeaders); 33 | if (map == null) { 34 | return COMPLETED_EMPTY_FUTURE; 35 | } 36 | 37 | return CompletableFuture.completedFuture(map); 38 | } 39 | 40 | } 41 | -------------------------------------------------------------------------------- /rakam-spi/src/main/java/org/rakam/plugin/SyncEventStore.java: -------------------------------------------------------------------------------- 1 | package org.rakam.plugin; 2 | 3 | import org.rakam.collection.Event; 4 | import org.rakam.util.RakamException; 5 | 6 | import java.util.List; 7 | import java.util.concurrent.*; 8 | 9 | import static io.netty.handler.codec.http.HttpResponseStatus.INTERNAL_SERVER_ERROR; 10 | 11 | public interface SyncEventStore 12 | extends EventStore { 13 | ThreadPoolExecutor workerGroup = new ThreadPoolExecutor(0, Runtime.getRuntime().availableProcessors() * 6, 14 | 60L, TimeUnit.SECONDS, 15 | // SynchronousQueue 16 | new ArrayBlockingQueue<>(Runtime.getRuntime().availableProcessors() * 6 * 20)); 17 | 18 | default CompletableFuture storeAsync(Event event) { 19 | try { 20 | return CompletableFuture.supplyAsync(() -> { 21 | store(event); 22 | return null; 23 | }, workerGroup); 24 | } catch (RejectedExecutionException e) { 25 | throw new RakamException("The server is busy, please try again later", INTERNAL_SERVER_ERROR); 26 | } 27 | } 28 | 29 | default CompletableFuture storeBatchAsync(List events) { 30 | try { 31 | return CompletableFuture.supplyAsync(() -> storeBatch(events), workerGroup); 32 | } catch (RejectedExecutionException e) { 33 | throw new RakamException("The server is busy, please try again later", INTERNAL_SERVER_ERROR); 34 | } 35 | } 36 | 37 | void store(Event event); 38 | 39 | int[] storeBatch(List events); 40 | } 41 | -------------------------------------------------------------------------------- /rakam-spi/src/main/java/org/rakam/plugin/SystemEvents.java: -------------------------------------------------------------------------------- 1 | package org.rakam.plugin; 2 | 3 | import org.rakam.collection.SchemaField; 4 | 5 | import java.util.List; 6 | 7 | public final class SystemEvents { 8 | 9 | private SystemEvents() throws InstantiationException { 10 | throw new InstantiationException("The class is not created for instantiation"); 11 | } 12 | 13 | public static class ProjectCreatedEvent { 14 | public final String project; 15 | 16 | public ProjectCreatedEvent(String project) { 17 | this.project = project; 18 | } 19 | } 20 | 21 | public static class ProjectDeletedEvent { 22 | public final String project; 23 | 24 | public ProjectDeletedEvent(String project) { 25 | this.project = project; 26 | } 27 | } 28 | 29 | public static class CollectionCreatedEvent { 30 | public final String project; 31 | public final String collection; 32 | public final List fields; 33 | 34 | public CollectionCreatedEvent(String project, String collection, List fields) { 35 | this.project = project; 36 | this.collection = collection; 37 | this.fields = fields; 38 | } 39 | } 40 | 41 | public static class UserPropertyAdded { 42 | public final String project; 43 | public final List fields; 44 | 45 | public UserPropertyAdded(String project, List fields) { 46 | this.project = project; 47 | this.fields = fields; 48 | } 49 | } 50 | 51 | public static class CollectionFieldCreatedEvent { 52 | public final String project; 53 | public final String collection; 54 | public final List fields; 55 | 56 | public CollectionFieldCreatedEvent(String project, String collection, List fields) { 57 | this.project = project; 58 | this.collection = collection; 59 | this.fields = fields; 60 | } 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /rakam-spi/src/main/java/org/rakam/plugin/TimestampEventMapper.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed under the Apache License, Version 2.0 (the "License"); 3 | * you may not use this file except in compliance with the License. 4 | * You may obtain a copy of the License at 5 | * 6 | * http://www.apache.org/licenses/LICENSE-2.0 7 | * 8 | * Unless required by applicable law or agreed to in writing, software 9 | * distributed under the License is distributed on an "AS IS" BASIS, 10 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | * See the License for the specific language governing permissions and 12 | * limitations under the License. 13 | */ 14 | package org.rakam.plugin; 15 | 16 | import io.netty.handler.codec.http.HttpHeaders; 17 | import io.netty.handler.codec.http.cookie.Cookie; 18 | import org.apache.avro.generic.GenericRecord; 19 | import org.rakam.Mapper; 20 | import org.rakam.collection.Event; 21 | import org.rakam.collection.FieldDependencyBuilder; 22 | import org.rakam.collection.FieldType; 23 | import org.rakam.collection.SchemaField; 24 | import org.rakam.config.ProjectConfig; 25 | 26 | import javax.inject.Inject; 27 | import java.net.InetAddress; 28 | import java.time.Instant; 29 | import java.util.List; 30 | 31 | import static com.google.common.collect.ImmutableList.of; 32 | 33 | @Mapper(name = "Timestamp mapper", description = "Attaches or re-configures time attribute of events.") 34 | public class TimestampEventMapper 35 | implements SyncEventMapper { 36 | private static final int HASHCODE = TimestampEventMapper.class.getName().hashCode(); 37 | private final ProjectConfig projectConfig; 38 | 39 | @Inject 40 | public TimestampEventMapper(ProjectConfig projectConfig) { 41 | this.projectConfig = projectConfig; 42 | } 43 | 44 | @Override 45 | public List map(Event event, RequestParams extraProperties, InetAddress sourceAddress, HttpHeaders responseHeaders) { 46 | GenericRecord properties = event.properties(); 47 | Object time = properties.get(projectConfig.getTimeColumn()); 48 | if (time == null) { 49 | long serverTime = Instant.now().getEpochSecond(); 50 | 51 | properties.put(projectConfig.getTimeColumn(), serverTime * 1000); 52 | } else if (time instanceof Number && event.api() != null && event.api().uploadTime != null) { 53 | // match server time and client time and get an estimate 54 | long shift = System.currentTimeMillis() - event.api().uploadTime; 55 | long fixedTime = ((Number) time).longValue() + shift; 56 | properties.put(projectConfig.getTimeColumn(), fixedTime); 57 | } 58 | return null; 59 | } 60 | 61 | @Override 62 | public void addFieldDependency(FieldDependencyBuilder builder) { 63 | builder.addFields(of( 64 | new SchemaField(projectConfig.getTimeColumn(), FieldType.TIMESTAMP) 65 | // ,new SchemaField(projectConfig.getClientTimeColumn(), FieldType.TIMESTAMP) 66 | )); 67 | } 68 | 69 | @Override 70 | public int hashCode() { 71 | return HASHCODE; 72 | } 73 | 74 | @Override 75 | public boolean equals(Object obj) { 76 | return obj instanceof TimestampEventMapper; 77 | } 78 | } 79 | -------------------------------------------------------------------------------- /rakam-spi/src/main/java/org/rakam/plugin/stream/CollectionStreamQuery.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed under the Apache License, Version 2.0 (the "License"); 3 | * you may not use this file except in compliance with the License. 4 | * You may obtain a copy of the License at 5 | * 6 | * http://www.apache.org/licenses/LICENSE-2.0 7 | * 8 | * Unless required by applicable law or agreed to in writing, software 9 | * distributed under the License is distributed on an "AS IS" BASIS, 10 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | * See the License for the specific language governing permissions and 12 | * limitations under the License. 13 | */ 14 | package org.rakam.plugin.stream; 15 | 16 | import com.fasterxml.jackson.annotation.JsonCreator; 17 | import com.fasterxml.jackson.annotation.JsonProperty; 18 | 19 | 20 | public class CollectionStreamQuery { 21 | private final String collection; 22 | private final String filter; 23 | 24 | @JsonCreator 25 | public CollectionStreamQuery(@JsonProperty("collection") String collection, 26 | @JsonProperty("filter") String filter) { 27 | this.collection = collection; 28 | this.filter = filter; 29 | } 30 | 31 | @JsonProperty 32 | public String getCollection() { 33 | return collection; 34 | } 35 | 36 | @JsonProperty 37 | public String getFilter() { 38 | return filter; 39 | } 40 | 41 | @Override 42 | public boolean equals(Object o) { 43 | if (this == o) { 44 | return true; 45 | } 46 | if (o == null || getClass() != o.getClass()) { 47 | return false; 48 | } 49 | 50 | CollectionStreamQuery that = (CollectionStreamQuery) o; 51 | 52 | if (!collection.equals(that.collection)) { 53 | return false; 54 | } 55 | return filter != null ? filter.equals(that.filter) : that.filter == null; 56 | } 57 | 58 | @Override 59 | public int hashCode() { 60 | int result = collection.hashCode(); 61 | result = 31 * result + (filter != null ? filter.hashCode() : 0); 62 | return result; 63 | } 64 | } -------------------------------------------------------------------------------- /rakam-spi/src/main/java/org/rakam/plugin/stream/EventStream.java: -------------------------------------------------------------------------------- 1 | package org.rakam.plugin.stream; 2 | 3 | 4 | import java.util.List; 5 | 6 | 7 | public interface EventStream { 8 | EventStreamer subscribe(String project, 9 | List collections, 10 | List columns, 11 | StreamResponse response); 12 | 13 | interface EventStreamer { 14 | void sync(); 15 | 16 | void shutdown(); 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /rakam-spi/src/main/java/org/rakam/plugin/stream/EventStreamConfig.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed under the Apache License, Version 2.0 (the "License"); 3 | * you may not use this file except in compliance with the License. 4 | * You may obtain a copy of the License at 5 | * 6 | * http://www.apache.org/licenses/LICENSE-2.0 7 | * 8 | * Unless required by applicable law or agreed to in writing, software 9 | * distributed under the License is distributed on an "AS IS" BASIS, 10 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | * See the License for the specific language governing permissions and 12 | * limitations under the License. 13 | */ 14 | package org.rakam.plugin.stream; 15 | 16 | import io.airlift.configuration.Config; 17 | 18 | 19 | public class EventStreamConfig { 20 | private boolean enabled = false; 21 | 22 | public boolean getEventStreamEnabled() { 23 | return enabled; 24 | } 25 | 26 | @Config("event.stream.enabled") 27 | public void setEventStreamEnabled(boolean enabled) { 28 | this.enabled = enabled; 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /rakam-spi/src/main/java/org/rakam/plugin/stream/StreamResponse.java: -------------------------------------------------------------------------------- 1 | package org.rakam.plugin.stream; 2 | 3 | 4 | public interface StreamResponse { 5 | public StreamResponse send(String event, String data); 6 | 7 | public boolean isClosed(); 8 | 9 | public void end(); 10 | } 11 | -------------------------------------------------------------------------------- /rakam-spi/src/main/java/org/rakam/plugin/user/ISingleUserBatchOperation.java: -------------------------------------------------------------------------------- 1 | package org.rakam.plugin.user; 2 | 3 | import com.fasterxml.jackson.databind.node.ObjectNode; 4 | 5 | import java.util.List; 6 | import java.util.Map; 7 | 8 | public interface ISingleUserBatchOperation { 9 | ObjectNode getSetProperties(); 10 | 11 | ObjectNode getSetPropertiesOnce(); 12 | 13 | Map getIncrementProperties(); 14 | 15 | List getUnsetProperties(); 16 | 17 | Long getTime(); 18 | 19 | Object getUser(); 20 | } 21 | -------------------------------------------------------------------------------- /rakam-spi/src/main/java/org/rakam/plugin/user/User.java: -------------------------------------------------------------------------------- 1 | package org.rakam.plugin.user; 2 | 3 | import com.fasterxml.jackson.annotation.JsonCreator; 4 | import com.fasterxml.jackson.annotation.JsonProperty; 5 | import com.fasterxml.jackson.databind.node.ObjectNode; 6 | import org.rakam.collection.Event; 7 | import org.rakam.server.http.annotations.ApiParam; 8 | 9 | public class User { 10 | public final ObjectNode properties; 11 | public final UserContext api; 12 | public Object id; 13 | 14 | @JsonCreator 15 | public User(@ApiParam(value = "id", description = "The value may be a string or a numeric value.") Object id, 16 | @ApiParam("api") UserContext api, 17 | @ApiParam("properties") ObjectNode properties) { 18 | this.id = id; 19 | this.api = api; 20 | this.properties = properties; 21 | } 22 | 23 | public void setId(Object id) { 24 | this.id = id; 25 | } 26 | 27 | public static class UserContext { 28 | @JsonProperty(value = "library") 29 | public final Event.Library library; 30 | @JsonProperty(value = "api_key") 31 | public final String apiKey; 32 | @JsonProperty(value = "upload_time") 33 | public final Long uploadTime; 34 | @JsonProperty(value = "checksum") 35 | public final String checksum; 36 | 37 | @JsonCreator 38 | public UserContext(@ApiParam("api_key") String apiKey, 39 | @ApiParam(value = "library", required = false) Event.Library library, 40 | @ApiParam(value = "api_library", required = false) String apiLibrary, 41 | @ApiParam(value = "api_version", required = false) String apiVersion, 42 | @ApiParam(value = "upload_time", required = false) Long uploadTime, 43 | @ApiParam(value = "checksum", required = false) String checksum) { 44 | this.apiKey = apiKey; 45 | this.library = library != null ? library : new Event.Library(apiLibrary, apiVersion); 46 | this.checksum = checksum; 47 | this.uploadTime = uploadTime; 48 | } 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /rakam-spi/src/main/java/org/rakam/plugin/user/UserPropertyMapper.java: -------------------------------------------------------------------------------- 1 | package org.rakam.plugin.user; 2 | 3 | import io.netty.handler.codec.http.cookie.Cookie; 4 | import org.rakam.plugin.EventMapper; 5 | 6 | import java.net.InetAddress; 7 | import java.util.List; 8 | 9 | public interface UserPropertyMapper { 10 | List map(String project, List user, EventMapper.RequestParams requestParams, InetAddress sourceAddress); 11 | } 12 | 13 | -------------------------------------------------------------------------------- /rakam-spi/src/main/java/org/rakam/plugin/user/UserStorage.java: -------------------------------------------------------------------------------- 1 | package org.rakam.plugin.user; 2 | 3 | import com.fasterxml.jackson.databind.node.ObjectNode; 4 | import org.rakam.analysis.RequestContext; 5 | import org.rakam.collection.SchemaField; 6 | 7 | import java.util.List; 8 | import java.util.Map; 9 | import java.util.concurrent.CompletableFuture; 10 | 11 | 12 | public interface UserStorage { 13 | String PRIMARY_KEY = "id"; 14 | 15 | Object create(String project, Object id, ObjectNode properties); 16 | 17 | List batchCreate(RequestContext context, List users); 18 | 19 | default CompletableFuture batch(String project, List operations) { 20 | for (ISingleUserBatchOperation operation : operations) { 21 | if (operation.getSetPropertiesOnce() != null) { 22 | setUserProperties(project, operation.getUser(), operation.getSetProperties()); 23 | } 24 | if (operation.getSetPropertiesOnce() != null) { 25 | setUserPropertiesOnce(project, operation.getUser(), operation.getSetPropertiesOnce()); 26 | } 27 | if (operation.getUnsetProperties() != null) { 28 | unsetProperties(project, operation.getUser(), operation.getUnsetProperties()); 29 | } 30 | if (operation.getIncrementProperties() != null) { 31 | for (Map.Entry entry : operation.getIncrementProperties().entrySet()) { 32 | incrementProperty(project, operation.getUser(), entry.getKey(), entry.getValue()); 33 | } 34 | } 35 | } 36 | return CompletableFuture.completedFuture(null); 37 | } 38 | 39 | List getMetadata(RequestContext context); 40 | 41 | CompletableFuture getUser(RequestContext context, Object userId); 42 | 43 | void setUserProperties(String project, Object user, ObjectNode properties); 44 | 45 | void setUserPropertiesOnce(String project, Object user, ObjectNode properties); 46 | 47 | default void createProjectIfNotExists(String project, boolean isNumeric) { 48 | 49 | } 50 | 51 | void incrementProperty(String project, Object user, String property, double value); 52 | 53 | void dropProjectIfExists(String project); 54 | 55 | void unsetProperties(String project, Object user, List properties); 56 | 57 | default void applyOperations(String project, List req) { 58 | for (ISingleUserBatchOperation data : req) { 59 | if (data.getSetProperties() != null) { 60 | setUserProperties(project, data.getUser(), data.getSetPropertiesOnce()); 61 | } 62 | if (data.getSetProperties() != null) { 63 | setUserPropertiesOnce(project, data.getUser(), data.getSetPropertiesOnce()); 64 | } 65 | if (data.getUnsetProperties() != null) { 66 | unsetProperties(project, data.getUser(), data.getUnsetProperties()); 67 | } 68 | if (data.getIncrementProperties() != null) { 69 | for (Map.Entry entry : data.getIncrementProperties().entrySet()) { 70 | incrementProperty(project, data.getUser(), entry.getKey(), entry.getValue()); 71 | } 72 | } 73 | } 74 | } 75 | 76 | } 77 | -------------------------------------------------------------------------------- /rakam-spi/src/main/java/org/rakam/report/QueryError.java: -------------------------------------------------------------------------------- 1 | package org.rakam.report; 2 | 3 | 4 | import com.fasterxml.jackson.annotation.JsonCreator; 5 | import org.rakam.server.http.annotations.ApiParam; 6 | 7 | public class QueryError { 8 | public final String message; 9 | public final String sqlState; 10 | public final Integer errorCode; 11 | public final Integer errorLine; 12 | public final Integer charPositionInLine; 13 | 14 | @JsonCreator 15 | public QueryError( 16 | @ApiParam("message") String message, 17 | @ApiParam(value = "sqlState", required = false) String sqlState, 18 | @ApiParam(value = "errorCode", required = false) Integer errorCode, 19 | @ApiParam(value = "errorLine", required = false) Integer errorLine, 20 | @ApiParam(value = "charPositionInLine", required = false) Integer charPositionInLine) { 21 | this.message = message; 22 | this.sqlState = sqlState; 23 | this.errorCode = errorCode; 24 | this.errorLine = errorLine; 25 | this.charPositionInLine = charPositionInLine; 26 | } 27 | 28 | public static QueryError create(String message) { 29 | return new QueryError(message, null, null, null, null); 30 | } 31 | 32 | @Override 33 | public String toString() { 34 | return "QueryError{" + 35 | "message='" + message + '\'' + 36 | ", sqlState='" + sqlState + '\'' + 37 | ", errorCode=" + errorCode + 38 | '}'; 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /rakam-spi/src/main/java/org/rakam/util/AllowCookie.java: -------------------------------------------------------------------------------- 1 | package org.rakam.util; 2 | 3 | import java.lang.annotation.ElementType; 4 | import java.lang.annotation.Retention; 5 | import java.lang.annotation.RetentionPolicy; 6 | import java.lang.annotation.Target; 7 | 8 | @Target(ElementType.METHOD) 9 | @Retention(RetentionPolicy.RUNTIME) 10 | public @interface AllowCookie { 11 | } 12 | -------------------------------------------------------------------------------- /rakam-spi/src/main/java/org/rakam/util/AlreadyExistsException.java: -------------------------------------------------------------------------------- 1 | package org.rakam.util; 2 | 3 | import io.netty.handler.codec.http.HttpResponseStatus; 4 | 5 | public class AlreadyExistsException extends RakamException { 6 | public AlreadyExistsException(String itemName, HttpResponseStatus status) { 7 | super(String.format("%s already exists", itemName), status); 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /rakam-spi/src/main/java/org/rakam/util/ConditionalModule.java: -------------------------------------------------------------------------------- 1 | package org.rakam.util; 2 | 3 | import java.lang.annotation.ElementType; 4 | import java.lang.annotation.Retention; 5 | import java.lang.annotation.RetentionPolicy; 6 | import java.lang.annotation.Target; 7 | 8 | 9 | @Retention(RetentionPolicy.RUNTIME) 10 | @Target(ElementType.TYPE) 11 | public @interface ConditionalModule { 12 | String config(); 13 | 14 | String value() default ""; 15 | } 16 | -------------------------------------------------------------------------------- /rakam-spi/src/main/java/org/rakam/util/LogUtil.java: -------------------------------------------------------------------------------- 1 | package org.rakam.util; 2 | 3 | import io.netty.handler.codec.http.HttpResponseStatus; 4 | import io.sentry.Sentry; 5 | import io.sentry.event.Event; 6 | import io.sentry.event.EventBuilder; 7 | import io.sentry.event.interfaces.ExceptionInterface; 8 | import io.sentry.event.interfaces.HttpInterface; 9 | import org.rakam.server.http.RakamHttpRequest; 10 | import org.rakam.server.http.RakamServletWrapper; 11 | 12 | public class LogUtil { 13 | 14 | public static void logException(RakamHttpRequest request, RakamException e) { 15 | Sentry.capture(buildEvent(request) 16 | .withMessage(e.getErrors().get(0).title) 17 | .withSentryInterface(new ExceptionInterface(e), false) 18 | .withTag("status", e.getStatusCode().reasonPhrase()).withLevel(Event.Level.WARNING) 19 | .build()); 20 | } 21 | 22 | public static void logException(RakamHttpRequest request, Throwable e) { 23 | Sentry.capture(buildEvent(request) 24 | .withSentryInterface(new ExceptionInterface(e), false) 25 | .build()); 26 | } 27 | 28 | private static EventBuilder buildEvent(RakamHttpRequest request) { 29 | return new EventBuilder() 30 | .withSentryInterface(new HttpInterface(new RakamServletWrapper(request)), false) 31 | .withLogger(RakamException.class.getName()) 32 | .withRelease(RakamClient.RELEASE); 33 | } 34 | 35 | public static void logException(RakamHttpRequest request, IllegalArgumentException e) { 36 | logException(request, new RakamException(e.getMessage(), HttpResponseStatus.BAD_REQUEST)); 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /rakam-spi/src/main/java/org/rakam/util/MailSender.java: -------------------------------------------------------------------------------- 1 | package org.rakam.util; 2 | 3 | import com.google.common.base.Throwables; 4 | import com.google.common.collect.ImmutableList; 5 | 6 | import javax.mail.*; 7 | import javax.mail.internet.*; 8 | import java.io.UnsupportedEncodingException; 9 | import java.util.List; 10 | import java.util.Optional; 11 | import java.util.stream.Stream; 12 | 13 | public class MailSender { 14 | private final Session session; 15 | private final InternetAddress fromAddress; 16 | 17 | public MailSender(Session session, String fromAddress, String fromName) { 18 | this.session = session; 19 | try { 20 | this.fromAddress = new InternetAddress(fromAddress, fromName); 21 | } catch (UnsupportedEncodingException e) { 22 | throw Throwables.propagate(e); 23 | } 24 | } 25 | 26 | public void sendMail(String toEmail, String title, String textContent, Optional richText, Stream parts) 27 | throws MessagingException { 28 | sendMail(ImmutableList.of(toEmail), title, textContent, richText, parts); 29 | } 30 | 31 | public void sendMail(List toEmail, String title, String textContent, Optional richText, Stream parts) 32 | throws MessagingException { 33 | Message msg = new MimeMessage(session); 34 | msg.setFrom(fromAddress); 35 | msg.addRecipients(MimeMessage.RecipientType.TO, toEmail.stream().map(e -> { 36 | try { 37 | return new InternetAddress(e); 38 | } catch (AddressException e1) { 39 | throw Throwables.propagate(e1); 40 | } 41 | }).toArray(InternetAddress[]::new)); 42 | msg.setSubject(title); 43 | if (textContent != null) { 44 | msg.setText(textContent); 45 | } 46 | 47 | if (richText.isPresent()) { 48 | Multipart mp = new MimeMultipart(); 49 | MimeBodyPart htmlPart = new MimeBodyPart(); 50 | htmlPart.setContent(richText.get(), "text/html"); 51 | mp.addBodyPart(htmlPart); 52 | parts.forEach(part -> { 53 | try { 54 | mp.addBodyPart(part); 55 | } catch (MessagingException e) { 56 | throw Throwables.propagate(e); 57 | } 58 | }); 59 | msg.setContent(mp); 60 | } 61 | Transport.send(msg); 62 | } 63 | } -------------------------------------------------------------------------------- /rakam-spi/src/main/java/org/rakam/util/MapProxyGenericRecord.java: -------------------------------------------------------------------------------- 1 | package org.rakam.util; 2 | 3 | import com.fasterxml.jackson.databind.node.ObjectNode; 4 | import org.apache.avro.Schema; 5 | import org.apache.avro.generic.GenericRecord; 6 | 7 | 8 | public class MapProxyGenericRecord 9 | implements GenericRecord { 10 | 11 | private final ObjectNode properties; 12 | 13 | public MapProxyGenericRecord(ObjectNode properties) { 14 | this.properties = properties; 15 | } 16 | 17 | @Override 18 | public Schema getSchema() { 19 | throw new UnsupportedOperationException(); 20 | } 21 | 22 | @Override 23 | public void put(int i, Object v) { 24 | throw new UnsupportedOperationException(); 25 | } 26 | 27 | @Override 28 | public Object get(int i) { 29 | throw new UnsupportedOperationException(); 30 | } 31 | 32 | @Override 33 | public void put(String key, Object v) { 34 | if (v instanceof String) { 35 | properties.put(key, (String) v); 36 | } else if (v instanceof Integer) { 37 | properties.put(key, (Integer) v); 38 | } else if (v instanceof Float) { 39 | properties.put(key, (Float) v); 40 | } else if (v instanceof Double) { 41 | properties.put(key, (Double) v); 42 | } else if (v instanceof Long) { 43 | properties.put(key, (Long) v); 44 | } else if (v instanceof byte[]) { 45 | properties.put(key, (byte[]) v); 46 | } else if (v instanceof Boolean) { 47 | properties.put(key, (Boolean) v); 48 | } else { 49 | throw new UnsupportedOperationException(); 50 | } 51 | } 52 | 53 | @Override 54 | public Object get(String key) { 55 | return properties.get(key); 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /rakam-spi/src/main/java/org/rakam/util/NotExistsException.java: -------------------------------------------------------------------------------- 1 | package org.rakam.util; 2 | 3 | 4 | import io.netty.handler.codec.http.HttpResponseStatus; 5 | 6 | public class NotExistsException extends RakamException { 7 | public NotExistsException(String itemName) { 8 | super(String.format("%s does not exist", itemName), HttpResponseStatus.NOT_FOUND); 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /rakam-spi/src/main/java/org/rakam/util/NotFoundHandler.java: -------------------------------------------------------------------------------- 1 | package org.rakam.util; 2 | 3 | import com.google.inject.BindingAnnotation; 4 | 5 | import java.lang.annotation.ElementType; 6 | import java.lang.annotation.Retention; 7 | import java.lang.annotation.Target; 8 | 9 | import static java.lang.annotation.RetentionPolicy.RUNTIME; 10 | 11 | @Retention(RUNTIME) 12 | @Target({ElementType.FIELD, ElementType.PARAMETER, ElementType.METHOD}) 13 | @BindingAnnotation 14 | public @interface NotFoundHandler { 15 | } 16 | -------------------------------------------------------------------------------- /rakam-spi/src/main/java/org/rakam/util/ProjectCollection.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed under the Apache License, Version 2.0 (the "License"); 3 | * you may not use this file except in compliance with the License. 4 | * You may obtain a copy of the License at 5 | * 6 | * http://www.apache.org/licenses/LICENSE-2.0 7 | * 8 | * Unless required by applicable law or agreed to in writing, software 9 | * distributed under the License is distributed on an "AS IS" BASIS, 10 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | * See the License for the specific language governing permissions and 12 | * limitations under the License. 13 | */ 14 | package org.rakam.util; 15 | 16 | import static org.rakam.util.ValidationUtil.checkNotNull; 17 | 18 | public class ProjectCollection { 19 | public final String project; 20 | public final String collection; 21 | 22 | public ProjectCollection(String project, String collection) { 23 | this.project = checkNotNull(project, "project is null"); 24 | this.collection = checkNotNull(collection, "collection is null"); 25 | } 26 | 27 | @Override 28 | public boolean equals(Object o) { 29 | if (this == o) return true; 30 | if (!(o instanceof ProjectCollection)) return false; 31 | 32 | ProjectCollection that = (ProjectCollection) o; 33 | 34 | if (!project.equals(that.project)) return false; 35 | return collection.equals(that.collection); 36 | 37 | } 38 | 39 | @Override 40 | public int hashCode() { 41 | int result = project.hashCode(); 42 | result = 31 * result + collection.hashCode(); 43 | return result; 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /rakam-spi/src/main/java/org/rakam/util/RakamException.java: -------------------------------------------------------------------------------- 1 | package org.rakam.util; 2 | 3 | import io.netty.handler.codec.http.HttpResponseStatus; 4 | import org.rakam.server.http.HttpRequestException; 5 | 6 | 7 | public class RakamException extends HttpRequestException { 8 | 9 | public RakamException(String message, HttpResponseStatus statusCode) { 10 | super(message, statusCode); 11 | } 12 | 13 | public RakamException(HttpResponseStatus statusCode) { 14 | super(statusCode.reasonPhrase(), statusCode); 15 | } 16 | } -------------------------------------------------------------------------------- /rakam-spi/src/main/java/org/rakam/util/StandardErrors.java: -------------------------------------------------------------------------------- 1 | package org.rakam.util; 2 | 3 | public class StandardErrors { 4 | public static final String PARTIAL_ERROR_MESSAGE = "Failed to collect some of the events in batch. Returns the indexes of events that are failed in batch."; 5 | } 6 | -------------------------------------------------------------------------------- /rakam-spi/src/main/java/org/rakam/util/SuccessMessage.java: -------------------------------------------------------------------------------- 1 | package org.rakam.util; 2 | 3 | import com.fasterxml.jackson.annotation.JsonCreator; 4 | import com.fasterxml.jackson.annotation.JsonInclude; 5 | import org.rakam.server.http.annotations.ApiParam; 6 | 7 | import java.util.Objects; 8 | 9 | 10 | public class SuccessMessage { 11 | private static final SuccessMessage SUCCESS = new SuccessMessage(null); 12 | 13 | public final boolean success = true; 14 | @JsonInclude(JsonInclude.Include.NON_NULL) 15 | public final String message; 16 | 17 | @JsonCreator 18 | private SuccessMessage(@ApiParam("message") String message) { 19 | this.message = message; 20 | } 21 | 22 | public static SuccessMessage success() { 23 | return SUCCESS; 24 | } 25 | 26 | public static SuccessMessage success(String message) { 27 | return new SuccessMessage(message); 28 | } 29 | 30 | @Override 31 | public boolean equals(Object o) { 32 | if (this == o) return true; 33 | if (!(o instanceof SuccessMessage)) return false; 34 | 35 | SuccessMessage that = (SuccessMessage) o; 36 | 37 | if (!Objects.equals(message, that.message)) return false; 38 | 39 | return true; 40 | } 41 | 42 | @Override 43 | public int hashCode() { 44 | return message == null ? 1 : message.hashCode(); 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /rakam-spi/src/test/java/org/rakam/analysis/TestApiKeyService.java: -------------------------------------------------------------------------------- 1 | package org.rakam.analysis; 2 | 3 | import org.rakam.analysis.ApiKeyService.AccessKeyType; 4 | import org.rakam.util.RakamException; 5 | import org.testng.annotations.AfterMethod; 6 | import org.testng.annotations.Test; 7 | 8 | import static org.testng.Assert.assertEquals; 9 | import static org.testng.Assert.fail; 10 | 11 | public abstract class TestApiKeyService { 12 | private static final String PROJECT_NAME = TestApiKeyService.class.getName().replace(".", "_").toLowerCase(); 13 | 14 | public abstract ApiKeyService getApiKeyService(); 15 | 16 | @AfterMethod 17 | public void tearDownMethod() throws Exception { 18 | getApiKeyService().revokeAllKeys(PROJECT_NAME); 19 | } 20 | 21 | @Test 22 | public void testCreateApiKeys() { 23 | ApiKeyService.ProjectApiKeys testing = getApiKeyService().createApiKeys(PROJECT_NAME); 24 | 25 | assertEquals(getApiKeyService().getProjectOfApiKey(testing.writeKey(), AccessKeyType.WRITE_KEY), PROJECT_NAME); 26 | assertEquals(getApiKeyService().getProjectOfApiKey(testing.masterKey(), AccessKeyType.MASTER_KEY), PROJECT_NAME); 27 | } 28 | 29 | @Test 30 | public void testInvalidApiKeys() { 31 | getApiKeyService().createApiKeys(PROJECT_NAME); 32 | 33 | try { 34 | getApiKeyService().getProjectOfApiKey("invalidKey", AccessKeyType.WRITE_KEY); 35 | fail(); 36 | } catch (RakamException e) { 37 | } 38 | 39 | try { 40 | getApiKeyService().getProjectOfApiKey("invalidKey", AccessKeyType.MASTER_KEY); 41 | fail(); 42 | } catch (RakamException e) { 43 | } 44 | } 45 | 46 | @Test 47 | public void testRevokeApiKeys() { 48 | ApiKeyService.ProjectApiKeys apiKeys = getApiKeyService().createApiKeys(PROJECT_NAME); 49 | 50 | getApiKeyService().revokeApiKeys(PROJECT_NAME, apiKeys.masterKey()); 51 | 52 | try { 53 | getApiKeyService().getProjectOfApiKey(apiKeys.writeKey(), AccessKeyType.WRITE_KEY); 54 | fail(); 55 | } catch (RakamException e) { 56 | } 57 | 58 | try { 59 | getApiKeyService().getProjectOfApiKey(apiKeys.masterKey(), AccessKeyType.MASTER_KEY); 60 | fail(); 61 | } catch (RakamException e) { 62 | } 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /rakam-spi/src/test/java/org/rakam/analysis/TestConfigManager.java: -------------------------------------------------------------------------------- 1 | package org.rakam.analysis; 2 | 3 | import org.testng.annotations.Test; 4 | 5 | import static org.testng.Assert.assertEquals; 6 | 7 | public abstract class TestConfigManager { 8 | private static final String PROJECT_NAME = TestConfigManager.class.getName().replace(".", "_").toLowerCase(); 9 | 10 | public abstract ConfigManager getConfigManager(); 11 | 12 | @Test 13 | public void testSet() 14 | throws Exception { 15 | getConfigManager().setConfig(PROJECT_NAME, "test", "naber"); 16 | 17 | assertEquals(getConfigManager().getConfig(PROJECT_NAME, "test", String.class), "naber"); 18 | } 19 | 20 | @Test 21 | public void testSetOnce() 22 | throws Exception { 23 | getConfigManager().setConfig(PROJECT_NAME, "test", "naber"); 24 | getConfigManager().setConfigOnce(PROJECT_NAME, "test", "naber2"); 25 | 26 | assertEquals(getConfigManager().getConfig(PROJECT_NAME, "test", String.class), "naber"); 27 | } 28 | 29 | @Test 30 | public void testSetOnceOtherProject() 31 | throws Exception { 32 | getConfigManager().setConfig(PROJECT_NAME, "test", "naber"); 33 | getConfigManager().setConfigOnce(PROJECT_NAME + "i", "test", "naber2"); 34 | 35 | assertEquals(getConfigManager().getConfig(PROJECT_NAME, "test", String.class), "naber"); 36 | assertEquals(getConfigManager().getConfig(PROJECT_NAME + "i", "test", String.class), "naber2"); 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /rakam/bin/launcher: -------------------------------------------------------------------------------- 1 | #!/bin/sh -eu 2 | # 3 | # Launcher for Airlift applications 4 | # 5 | # Java must be in PATH. 6 | # 7 | # This launcher script, launcher.py and launcher.properties must be 8 | # located in 'bin'. The properties file must contain a 'main-class' 9 | # entry that specifies the Java class to execute. 10 | # 11 | # The classpath will contain everything in the 'lib' directory. 12 | # 13 | # Config files must be located in 'etc': 14 | # 15 | # jvm.config -- required: Java command line options, one per line 16 | # config.properties -- required: application configuration properties 17 | # node.properties -- optional: application environment properties 18 | # log.properties -- optional: log levels 19 | # 20 | # The 'etc' and 'plugin' directories will be symlinked into the data 21 | # directory before the process is started, allowing the application to 22 | # easily reference these at runtime. 23 | # 24 | # When run as a daemon, the application will log to the server log and 25 | # stdout and stderr are redirected to the launcher log. 26 | # 27 | # The following commands are supported: 28 | # 29 | # run -- run the application in the foreground (for debugging) 30 | # start -- run the application as a daemon 31 | # stop -- request the application to terminate (SIGTERM) 32 | # kill -- forcibly terminate the application (SIGKILL) 33 | # restart -- run the stop command, then run the start command 34 | # status -- check if the application is running (0=true, 3=false) 35 | # 36 | # Run with --help to see options. 37 | # 38 | 39 | exec "$(dirname "$0")/launcher.py" "$@" 40 | -------------------------------------------------------------------------------- /rakam/bin/launcher.properties: -------------------------------------------------------------------------------- 1 | main-class=org.rakam.ServiceStarter 2 | process-name=rakam-server 3 | -------------------------------------------------------------------------------- /rakam/bin/procname/Linux-x86_64/libprocname.so: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rakam-io/rakam-api/ef75341e5337a2c78b4ae2b30cd78cd01ac1a362/rakam/bin/procname/Linux-x86_64/libprocname.so -------------------------------------------------------------------------------- /rakam/src/main/java/org/rakam/LogModule.java: -------------------------------------------------------------------------------- 1 | package org.rakam; 2 | 3 | import com.google.auto.service.AutoService; 4 | import com.google.inject.Binder; 5 | import io.airlift.configuration.Config; 6 | import io.sentry.Sentry; 7 | import io.sentry.event.User; 8 | import org.rakam.config.ProjectConfig; 9 | import org.rakam.plugin.RakamModule; 10 | import org.rakam.util.RakamClient; 11 | 12 | import java.net.InetAddress; 13 | import java.net.UnknownHostException; 14 | 15 | @AutoService(RakamModule.class) 16 | public class LogModule 17 | extends RakamModule { 18 | private static String HOST_NAME; 19 | 20 | static { 21 | InetAddress ip; 22 | try { 23 | ip = InetAddress.getLocalHost(); 24 | HOST_NAME = ip.getHostName(); 25 | } catch (UnknownHostException e) { 26 | // 27 | } 28 | } 29 | 30 | @Override 31 | protected void setup(Binder binder) { 32 | LogConfig logConfig = buildConfigObject(LogConfig.class); 33 | ProjectConfig projectConfig = buildConfigObject(ProjectConfig.class); 34 | if (logConfig.getLogActive()) { 35 | if(projectConfig.getCompanyName() != null) { 36 | Sentry.getStoredClient().setEnvironment(projectConfig.getCompanyName()); 37 | } 38 | Sentry.getStoredClient().setRelease(RakamClient.RELEASE); 39 | Sentry.getStoredClient().setServerName(HOST_NAME); 40 | Sentry.init(logConfig.getSentryUri() + "?stacktrace.app.packages=org.rakam"); 41 | } 42 | } 43 | 44 | @Override 45 | public String name() { 46 | return null; 47 | } 48 | 49 | @Override 50 | public String description() { 51 | return null; 52 | } 53 | 54 | public static class LogConfig { 55 | private static final String PUBLIC_SENTRY_DSN = "https://76daa36329be422ab9b592ab7239c2aa@sentry.io/1290994"; 56 | 57 | private boolean logActive = true; 58 | private String sentryUri = PUBLIC_SENTRY_DSN; 59 | 60 | public boolean getLogActive() { 61 | return logActive; 62 | } 63 | 64 | @Config("log-active") 65 | public LogConfig setLogActive(boolean logActive) { 66 | this.logActive = logActive; 67 | return this; 68 | } 69 | 70 | @Config("sentry-uri") 71 | public LogConfig setSentryUri(String sentryUri) { 72 | this.sentryUri = sentryUri; 73 | return this; 74 | } 75 | 76 | public String getSentryUri() { 77 | return sentryUri; 78 | } 79 | } 80 | } 81 | -------------------------------------------------------------------------------- /rakam/src/main/java/org/rakam/SystemRegistryGenerator.java: -------------------------------------------------------------------------------- 1 | package org.rakam; 2 | 3 | import com.google.inject.Module; 4 | import org.rakam.bootstrap.SystemRegistry; 5 | import org.rakam.bootstrap.SystemRegistry.ModuleDescriptor.Condition; 6 | import org.rakam.util.JsonHelper; 7 | 8 | import java.io.IOException; 9 | import java.io.PrintWriter; 10 | import java.util.Locale; 11 | import java.util.Set; 12 | 13 | public final class SystemRegistryGenerator { 14 | 15 | private SystemRegistryGenerator() throws InstantiationException { 16 | throw new InstantiationException("The class is not created for instantiation"); 17 | } 18 | 19 | public static void main(String[] args) throws IOException { 20 | try { 21 | if (args.length != 1 || !args[0].equals("json") && !args[0].equals("properties")) { 22 | System.err.println("Usage: [json] or [properties]"); 23 | System.exit(1); 24 | } 25 | Set allModules = ServiceStarter.getModules(); 26 | SystemRegistry systemRegistry = new SystemRegistry(allModules, allModules); 27 | if (args[0].equals("json")) { 28 | System.out.print(JsonHelper.encode(systemRegistry)); 29 | } else { 30 | 31 | PrintWriter printWriter = new PrintWriter(System.out); 32 | 33 | for (SystemRegistry.ModuleDescriptor moduleDescriptor : systemRegistry.getModules()) { 34 | String name; 35 | if (moduleDescriptor.name == null) { 36 | name = moduleDescriptor.className; 37 | } else { 38 | name = moduleDescriptor.name; 39 | } 40 | 41 | printWriter.println("#------------------------------------------------------------------------------"); 42 | printWriter.println("#" + name.toUpperCase(Locale.ENGLISH)); 43 | if (moduleDescriptor.description != null) { 44 | printWriter.println("#" + moduleDescriptor.description); 45 | } 46 | printWriter.println("#------------------------------------------------------------------------------"); 47 | 48 | if (moduleDescriptor.condition.isPresent()) { 49 | Condition condition = moduleDescriptor.condition.get(); 50 | printWriter.println("#Condition for this plugin to be is_active:"); 51 | 52 | if (condition.expectedValue.isEmpty()) { 53 | printWriter.println("#" + condition.property + " property must be set"); 54 | } else { 55 | printWriter.println("#" + condition.property + "=" + condition.expectedValue + "\n"); 56 | } 57 | } 58 | 59 | for (SystemRegistry.ConfigItem property : moduleDescriptor.properties) { 60 | if (property.description != null && !property.description.isEmpty()) { 61 | // TODO: support for breaking words to multiple lines 62 | printWriter.println("# " + property.description); 63 | } 64 | String value = property.defaultValue.equals("null") ? "" : property.defaultValue; 65 | printWriter.println("#" + property.property + "=" + value); 66 | } 67 | 68 | printWriter.print("\n\n"); 69 | } 70 | printWriter.flush(); 71 | } 72 | } catch (Exception e) { 73 | e.printStackTrace(); 74 | } 75 | } 76 | } 77 | -------------------------------------------------------------------------------- /rakam/src/main/java/org/rakam/analysis/SchemaConverter.java: -------------------------------------------------------------------------------- 1 | package org.rakam.analysis; 2 | 3 | import com.fasterxml.jackson.annotation.JsonCreator; 4 | import com.fasterxml.jackson.annotation.JsonProperty; 5 | import com.google.common.collect.Lists; 6 | import org.apache.avro.Schema; 7 | import org.rakam.collection.FieldType; 8 | import org.rakam.collection.SchemaField; 9 | import org.rakam.util.AvroUtil; 10 | import org.rakam.util.RakamException; 11 | 12 | import java.util.*; 13 | import java.util.function.Function; 14 | 15 | import static io.netty.handler.codec.http.HttpResponseStatus.BAD_REQUEST; 16 | import static org.apache.avro.Schema.Type.NULL; 17 | 18 | public enum SchemaConverter { 19 | AVRO(schemaFields -> { 20 | Schema parse = new Schema.Parser().parse(schemaFields); 21 | if (parse.getType() != Schema.Type.RECORD) { 22 | throw new RakamException("Avro schema must be a RECORD", BAD_REQUEST); 23 | } 24 | 25 | Set rakamFields = new HashSet<>(); 26 | 27 | for (Schema.Field field : parse.getFields()) { 28 | Schema avroSchema = field.schema(); 29 | if (avroSchema.getType() == Schema.Type.UNION) { 30 | List types = field.schema().getTypes(); 31 | if (types.isEmpty()) { 32 | throw new IllegalStateException(); 33 | } 34 | if (types.size() == 1) { 35 | avroSchema = types.get(0); 36 | } 37 | if (types.size() == 2) { 38 | if (types.get(0).getType() == Schema.Type.NULL) { 39 | avroSchema = Schema.createUnion(Lists.newArrayList(Schema.create(NULL), types.get(1))); 40 | } else if (types.get(1).getType() == Schema.Type.NULL) { 41 | avroSchema = Schema.createUnion(Lists.newArrayList(Schema.create(NULL), types.get(0))); 42 | } else { 43 | throw new RakamException("UNION type is not supported: " + avroSchema, BAD_REQUEST); 44 | } 45 | } else { 46 | throw new RakamException("UNION type is not supported: " + avroSchema, BAD_REQUEST); 47 | } 48 | } 49 | 50 | final Schema finalAvroSchema = avroSchema; 51 | Optional fieldType = Arrays.stream(FieldType.values()).filter(e -> AvroUtil.generateAvroSchema(e).equals(finalAvroSchema)).findAny(); 52 | if (!fieldType.isPresent()) { 53 | new RakamException("Unsupported Avro type" + avroSchema, BAD_REQUEST); 54 | } 55 | 56 | rakamFields.add(new SchemaField(field.name(), fieldType.get(), avroSchema.getFullName(), avroSchema.getDoc(), null)); 57 | } 58 | return rakamFields; 59 | }); 60 | 61 | private final Function> mapper; 62 | 63 | SchemaConverter(Function> mapper) { 64 | this.mapper = mapper; 65 | } 66 | 67 | @JsonCreator 68 | public static SchemaConverter get(String name) { 69 | return valueOf(name.toUpperCase()); 70 | } 71 | 72 | @JsonProperty 73 | public String value() { 74 | return name(); 75 | } 76 | 77 | public Function> getMapper() { 78 | return mapper; 79 | } 80 | } 81 | -------------------------------------------------------------------------------- /rakam/src/main/java/org/rakam/analysis/webhook/WebhookConfig.java: -------------------------------------------------------------------------------- 1 | package org.rakam.analysis.webhook; 2 | 3 | import io.airlift.configuration.Config; 4 | import org.rakam.util.JsonHelper; 5 | 6 | import java.util.Map; 7 | 8 | public class WebhookConfig { 9 | private String url; 10 | private Map headers; 11 | 12 | public String getUrl() { 13 | return url; 14 | } 15 | 16 | @Config("collection.webhook.url") 17 | public WebhookConfig setUrl(String url) { 18 | this.url = url; 19 | return this; 20 | } 21 | 22 | public Map getHeaders() { 23 | return headers; 24 | } 25 | 26 | @Config("collection.webhook.headers") 27 | public WebhookConfig setHeaders(String headers) { 28 | this.headers = JsonHelper.read(headers, Map.class); 29 | return this; 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /rakam/src/main/java/org/rakam/analysis/webhook/WebhookModule.java: -------------------------------------------------------------------------------- 1 | package org.rakam.analysis.webhook; 2 | 3 | 4 | import com.google.auto.service.AutoService; 5 | import com.google.inject.Binder; 6 | import com.google.inject.Scopes; 7 | import com.google.inject.multibindings.Multibinder; 8 | import org.rakam.aws.AWSConfig; 9 | import org.rakam.plugin.EventMapper; 10 | import org.rakam.plugin.RakamModule; 11 | 12 | import static io.airlift.configuration.ConfigBinder.configBinder; 13 | 14 | @AutoService(RakamModule.class) 15 | public class WebhookModule extends RakamModule { 16 | @Override 17 | protected void setup(Binder binder) { 18 | WebhookConfig webhookConfig = buildConfigObject(WebhookConfig.class); 19 | if(webhookConfig.getUrl() != null) { 20 | configBinder(binder).bindConfig(WebhookConfig.class); 21 | configBinder(binder).bindConfig(AWSConfig.class); 22 | Multibinder mappers = Multibinder.newSetBinder(binder, EventMapper.class); 23 | mappers.addBinding().to(WebhookEventMapper.class).in(Scopes.SINGLETON); 24 | } 25 | } 26 | 27 | @Override 28 | public String name() { 29 | return null; 30 | } 31 | 32 | @Override 33 | public String description() { 34 | return null; 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /rakam/src/main/java/org/rakam/bootstrap/ProxyBootstrap.java: -------------------------------------------------------------------------------- 1 | package org.rakam.bootstrap; 2 | 3 | import com.google.common.collect.ImmutableList; 4 | import com.google.common.collect.ImmutableSet; 5 | import com.google.inject.Injector; 6 | import com.google.inject.Module; 7 | import io.airlift.bootstrap.Bootstrap; 8 | import io.airlift.log.Logger; 9 | 10 | import java.lang.reflect.Field; 11 | import java.util.ArrayList; 12 | import java.util.List; 13 | import java.util.Locale; 14 | import java.util.Set; 15 | import java.util.stream.Collectors; 16 | 17 | public class ProxyBootstrap 18 | extends Bootstrap { 19 | private final static Logger LOGGER = Logger.get(ProxyBootstrap.class); 20 | 21 | public ProxyBootstrap(Set modules) { 22 | super(modules); 23 | } 24 | 25 | @Override 26 | public Injector initialize() 27 | throws Exception { 28 | Field modules = Bootstrap.class.getDeclaredField("modules"); 29 | modules.setAccessible(true); 30 | List installedModules = (List) modules.get(this); 31 | SystemRegistry systemRegistry = new SystemRegistry(null, ImmutableSet.copyOf(installedModules)); 32 | 33 | modules.set(this, ImmutableList.builder().addAll(installedModules).add((Module) binder -> { 34 | binder.bind(SystemRegistry.class).toInstance(systemRegistry); 35 | }).build()); 36 | 37 | String env = System.getProperty("env"); 38 | ArrayList objects = new ArrayList<>(); 39 | if (env != null) { 40 | LOGGER.info("Reading environment variables starting with `%s`", env); 41 | 42 | System.getenv().entrySet().stream() 43 | .filter(entry -> entry.getKey().startsWith(env)).forEach(entry -> { 44 | String configName = entry.getKey().substring(env.length() + 1) 45 | .toLowerCase(Locale.ENGLISH).replaceAll("__", "-").replaceAll("_", "."); 46 | objects.add(configName); 47 | this.setOptionalConfigurationProperty(configName, entry.getValue()); 48 | }); 49 | 50 | LOGGER.info("Set the configurations using environment variables (%s)", objects.stream().collect(Collectors.joining(", "))); 51 | } 52 | 53 | Injector initialize = super.initialize(); 54 | return initialize; 55 | } 56 | } 57 | -------------------------------------------------------------------------------- /rakam/src/main/java/org/rakam/collection/AvroEventDeserializer.java: -------------------------------------------------------------------------------- 1 | package org.rakam.collection; 2 | 3 | import io.airlift.slice.SliceInput; 4 | import org.apache.avro.Schema; 5 | import org.apache.avro.generic.GenericDatumReader; 6 | import org.apache.avro.generic.GenericRecord; 7 | import org.apache.avro.io.BinaryDecoder; 8 | import org.apache.avro.io.DecoderFactory; 9 | import org.rakam.analysis.metadata.Metastore; 10 | import org.rakam.util.AvroUtil; 11 | 12 | import javax.inject.Inject; 13 | import java.io.IOException; 14 | import java.util.ArrayList; 15 | import java.util.List; 16 | 17 | public class AvroEventDeserializer { 18 | 19 | private final Metastore metastore; 20 | 21 | @Inject 22 | public AvroEventDeserializer(Metastore metastore) { 23 | this.metastore = metastore; 24 | } 25 | 26 | public EventList deserialize(String project, String collection, SliceInput slice) throws IOException { 27 | String json = slice.readSlice(slice.readInt()).toStringUtf8(); 28 | Schema schema = new Schema.Parser().parse(json); 29 | int records = slice.readInt(); 30 | 31 | BinaryDecoder binaryDecoder = DecoderFactory.get().directBinaryDecoder(slice, null); 32 | 33 | List fields = metastore.getCollection(project, collection); 34 | Schema avroSchema = AvroUtil.convertAvroSchema(fields); 35 | 36 | GenericDatumReader reader = new GenericDatumReader(schema, avroSchema); 37 | 38 | List list = new ArrayList<>(records); 39 | for (int i = 0; i < records; i++) { 40 | GenericRecord record = reader.read(null, binaryDecoder); 41 | list.add(new Event(project, collection, null, fields, record)); 42 | } 43 | 44 | return new EventList(Event.EventContext.empty(), project, list); 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /rakam/src/main/java/org/rakam/collection/HeaderDefaultFullHttpResponse.java: -------------------------------------------------------------------------------- 1 | package org.rakam.collection; 2 | 3 | import io.netty.buffer.ByteBuf; 4 | import io.netty.handler.codec.http.*; 5 | 6 | public final class HeaderDefaultFullHttpResponse extends DefaultHttpResponse implements FullHttpResponse { 7 | private final ByteBuf content; 8 | private final HttpHeaders trailingHeaders; 9 | 10 | public HeaderDefaultFullHttpResponse(HttpVersion version, HttpResponseStatus status, ByteBuf content, HttpHeaders headers) { 11 | super(version, status); 12 | trailingHeaders = headers; 13 | this.content = content; 14 | } 15 | 16 | @Override 17 | public HttpHeaders trailingHeaders() { 18 | return trailingHeaders; 19 | } 20 | 21 | @Override 22 | public HttpHeaders headers() { 23 | return trailingHeaders; 24 | } 25 | 26 | @Override 27 | public ByteBuf content() { 28 | return content; 29 | } 30 | 31 | @Override 32 | public int refCnt() { 33 | return content.refCnt(); 34 | } 35 | 36 | @Override 37 | public FullHttpResponse retain() { 38 | content.retain(); 39 | return this; 40 | } 41 | 42 | @Override 43 | public FullHttpResponse retain(int increment) { 44 | content.retain(increment); 45 | return this; 46 | } 47 | 48 | @Override 49 | public boolean release() { 50 | return content.release(); 51 | } 52 | 53 | @Override 54 | public boolean release(int decrement) { 55 | return content.release(decrement); 56 | } 57 | 58 | @Override 59 | public FullHttpResponse setProtocolVersion(HttpVersion version) { 60 | super.setProtocolVersion(version); 61 | return this; 62 | } 63 | 64 | @Override 65 | public FullHttpResponse setStatus(HttpResponseStatus status) { 66 | super.setStatus(status); 67 | return this; 68 | } 69 | 70 | @Override 71 | public FullHttpResponse copy() { 72 | DefaultFullHttpResponse copy = new DefaultFullHttpResponse( 73 | getProtocolVersion(), getStatus(), content().copy(), true); 74 | copy.headers().set(headers()); 75 | copy.trailingHeaders().set(trailingHeaders()); 76 | return copy; 77 | } 78 | 79 | @Override 80 | public FullHttpResponse duplicate() { 81 | DefaultFullHttpResponse duplicate = new DefaultFullHttpResponse(getProtocolVersion(), getStatus(), 82 | content().duplicate(), true); 83 | duplicate.headers().set(headers()); 84 | duplicate.trailingHeaders().set(trailingHeaders()); 85 | return duplicate; 86 | } 87 | } 88 | -------------------------------------------------------------------------------- /rakam/src/main/java/org/rakam/http/ForHttpServer.java: -------------------------------------------------------------------------------- 1 | package org.rakam.http; 2 | 3 | import com.google.inject.BindingAnnotation; 4 | 5 | import java.lang.annotation.Retention; 6 | import java.lang.annotation.RetentionPolicy; 7 | 8 | @BindingAnnotation 9 | @Retention(RetentionPolicy.RUNTIME) 10 | public @interface ForHttpServer { 11 | } 12 | -------------------------------------------------------------------------------- /rakam/src/main/java/org/rakam/http/HttpServerConfig.java: -------------------------------------------------------------------------------- 1 | package org.rakam.http; 2 | 3 | import com.google.common.net.HostAndPort; 4 | import io.airlift.configuration.Config; 5 | 6 | public class HttpServerConfig { 7 | private static final int RAKAM_DEFAULT_PORT = 9999; 8 | private static final String RAKAM_DEFAULT_HOST = "0.0.0.0"; 9 | 10 | private HostAndPort address = HostAndPort.fromParts(RAKAM_DEFAULT_HOST, RAKAM_DEFAULT_PORT); 11 | private boolean disabled; 12 | private boolean proxyProtocol; 13 | private boolean debug; 14 | private long maximumRequestSize = Runtime.getRuntime().maxMemory() / 10; 15 | 16 | public HostAndPort getAddress() { 17 | return address; 18 | } 19 | 20 | @Config("http.server.address") 21 | public HttpServerConfig setAddress(String address) { 22 | if (address == null) 23 | this.address = HostAndPort.fromParts(RAKAM_DEFAULT_HOST, RAKAM_DEFAULT_PORT); 24 | else 25 | this.address = HostAndPort.fromString(address).withDefaultPort(RAKAM_DEFAULT_PORT); 26 | return this; 27 | } 28 | 29 | public boolean getDisabled() { 30 | return disabled; 31 | } 32 | 33 | @Config("http.server.disabled") 34 | public HttpServerConfig setDisabled(boolean disabled) { 35 | this.disabled = disabled; 36 | return this; 37 | } 38 | 39 | public boolean getProxyProtocol() { 40 | return proxyProtocol; 41 | } 42 | 43 | @Config("http.server.proxy-protocol") 44 | public HttpServerConfig setProxyProtocol(boolean proxyProtocol) { 45 | this.proxyProtocol = proxyProtocol; 46 | return this; 47 | } 48 | 49 | public boolean getDebug() { 50 | return debug; 51 | } 52 | 53 | @Config("http.server.debug") 54 | public HttpServerConfig setDebug(boolean debug) { 55 | this.debug = debug; 56 | return this; 57 | } 58 | 59 | public long getMaximumRequestSize() { 60 | return maximumRequestSize; 61 | } 62 | 63 | @Config("http.server.max-request-size") 64 | public HttpServerConfig setMaximumRequestSize(long maximumRequestSize) { 65 | this.maximumRequestSize = maximumRequestSize; 66 | return this; 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /rakam/src/main/java/org/rakam/http/OptionMethodHttpService.java: -------------------------------------------------------------------------------- 1 | package org.rakam.http; 2 | 3 | import io.netty.handler.codec.http.DefaultFullHttpResponse; 4 | import io.netty.handler.codec.http.HttpResponseStatus; 5 | import org.rakam.server.http.HttpService; 6 | import org.rakam.server.http.RakamHttpRequest; 7 | import org.rakam.server.http.annotations.IgnoreApi; 8 | 9 | import javax.ws.rs.OPTIONS; 10 | import javax.ws.rs.Path; 11 | 12 | import static io.netty.handler.codec.http.HttpHeaders.Names.*; 13 | import static io.netty.handler.codec.http.HttpVersion.HTTP_1_1; 14 | 15 | @Path("/") 16 | @IgnoreApi 17 | public class OptionMethodHttpService extends HttpService { 18 | @OPTIONS 19 | @Path("/*") 20 | public static void handle(RakamHttpRequest request) { 21 | DefaultFullHttpResponse response = new DefaultFullHttpResponse(HTTP_1_1, HttpResponseStatus.OK); 22 | 23 | response.headers().set(ACCESS_CONTROL_ALLOW_HEADERS, "Origin, X-Requested-With, Content-Type, Accept, master_key, write_key"); 24 | response.headers().set(ACCESS_CONTROL_EXPOSE_HEADERS, "_auto_action"); 25 | response.headers().set(ACCESS_CONTROL_ALLOW_METHODS, "GET, POST, OPTIONS, PUT, DELETE"); 26 | 27 | request.response(response).end(); 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /rakam/src/main/java/org/rakam/plugin/JSEventMapperModule.java: -------------------------------------------------------------------------------- 1 | package org.rakam.plugin; 2 | 3 | import com.google.auto.service.AutoService; 4 | import com.google.inject.Binder; 5 | import com.google.inject.Scopes; 6 | import com.google.inject.multibindings.Multibinder; 7 | import org.rakam.server.http.HttpService; 8 | import org.rakam.util.ConditionalModule; 9 | 10 | @AutoService(RakamModule.class) 11 | @ConditionalModule(config = "js-event-mapper.enabled", value = "true") 12 | public class JSEventMapperModule extends RakamModule { 13 | @Override 14 | protected void setup(Binder binder) { 15 | Multibinder httpServices = Multibinder.newSetBinder(binder, HttpService.class); 16 | httpServices.addBinding().to(CustomEventMapperHttpService.class).in(Scopes.SINGLETON); 17 | } 18 | 19 | @Override 20 | public String name() { 21 | return "Custom event mapper module"; 22 | } 23 | 24 | @Override 25 | public String description() { 26 | return "Write Javascript code to enrich and sanitize your event data in real-time"; 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /rakam/src/main/java/org/rakam/plugin/PluginConfig.java: -------------------------------------------------------------------------------- 1 | package org.rakam.plugin; 2 | 3 | import com.google.common.base.Splitter; 4 | import com.google.common.collect.ImmutableList; 5 | import io.airlift.configuration.Config; 6 | 7 | import java.io.File; 8 | import java.util.List; 9 | 10 | public class PluginConfig { 11 | private List plugins; 12 | private File pluginDir = new File(System.getProperty("user.dir"), "plugins"); 13 | 14 | public List getPlugins() { 15 | return plugins; 16 | } 17 | 18 | @Config("plugin.bundles") 19 | public PluginConfig setPlugins(String plugins) { 20 | if (plugins == null) { 21 | this.plugins = null; 22 | } else { 23 | this.plugins = ImmutableList.copyOf(Splitter.on(',').omitEmptyStrings().trimResults().split(plugins)); 24 | } 25 | return this; 26 | } 27 | 28 | public File getPluginsDirectory() { 29 | return pluginDir; 30 | } 31 | 32 | @Config("plugin.directory") 33 | public PluginConfig setPluginsDirectory(String pluginDir) { 34 | this.pluginDir = new File(pluginDir); 35 | return this; 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /rakam/src/main/java/org/rakam/plugin/WebhookModule.java: -------------------------------------------------------------------------------- 1 | package org.rakam.plugin; 2 | 3 | import com.google.auto.service.AutoService; 4 | import com.google.inject.Binder; 5 | import com.google.inject.multibindings.Multibinder; 6 | import org.rakam.collection.WebHookHttpService; 7 | import org.rakam.config.TaskConfig; 8 | import org.rakam.server.http.HttpService; 9 | import org.rakam.util.ConditionalModule; 10 | 11 | import static io.airlift.configuration.ConfigBinder.configBinder; 12 | 13 | @AutoService(RakamModule.class) 14 | @ConditionalModule(config = "webhook.enable", value = "true") 15 | public class WebhookModule 16 | extends RakamModule { 17 | @Override 18 | protected void setup(Binder binder) { 19 | configBinder(binder).bindConfig(TaskConfig.class); 20 | Multibinder httpServices = Multibinder.newSetBinder(binder, HttpService.class); 21 | httpServices.addBinding().to(WebHookHttpService.class); 22 | } 23 | 24 | @Override 25 | public String name() { 26 | return null; 27 | } 28 | 29 | @Override 30 | public String description() { 31 | return null; 32 | } 33 | } 34 | 35 | -------------------------------------------------------------------------------- /rakam/src/main/java/org/rakam/plugin/user/UserActionService.java: -------------------------------------------------------------------------------- 1 | package org.rakam.plugin.user; 2 | 3 | import org.rakam.report.QueryResult; 4 | import org.rakam.server.http.HttpService; 5 | 6 | import java.util.concurrent.CompletableFuture; 7 | 8 | public abstract class UserActionService extends HttpService { 9 | 10 | public abstract CompletableFuture batch(String project, CompletableFuture queryResult, T config); 11 | 12 | public abstract String getName(); 13 | 14 | public abstract boolean send(String project, User user, T config); 15 | } 16 | -------------------------------------------------------------------------------- /rakam/src/main/java/org/rakam/util/RakamHealthCheckModule.java: -------------------------------------------------------------------------------- 1 | package org.rakam.util; 2 | 3 | import com.google.auto.service.AutoService; 4 | import com.google.inject.Binder; 5 | import com.google.inject.Scopes; 6 | import com.google.inject.multibindings.Multibinder; 7 | import org.rakam.plugin.RakamModule; 8 | import org.rakam.server.http.HttpService; 9 | import org.rakam.server.http.RakamHttpRequest; 10 | 11 | import javax.ws.rs.GET; 12 | import javax.ws.rs.Path; 13 | 14 | @AutoService(RakamModule.class) 15 | public class RakamHealthCheckModule extends RakamModule { 16 | @Override 17 | protected void setup(Binder binder) { 18 | Multibinder.newSetBinder(binder, HttpService.class).addBinding() 19 | .to(RootAPIInformationService.class).in(Scopes.SINGLETON); 20 | } 21 | 22 | @Override 23 | public String name() { 24 | return "Fallback for Rakam API BI Module"; 25 | } 26 | 27 | @Override 28 | public String description() { 29 | return null; 30 | } 31 | 32 | @Path("/") 33 | public static class RootAPIInformationService extends HttpService { 34 | @GET 35 | @Path("/") 36 | public void main(RakamHttpRequest request) { 37 | request.response("Rakam API is successfully installed! \n---------- \n" + 38 | "Visit app.rakam.io to register the API with Rakam Data Platform or api.rakam.io for API documentation.") 39 | .end(); 40 | } 41 | } 42 | } -------------------------------------------------------------------------------- /rakam/src/main/java/org/rakam/util/StringTemplate.java: -------------------------------------------------------------------------------- 1 | package org.rakam.util; 2 | 3 | import java.util.ArrayList; 4 | import java.util.List; 5 | import java.util.Map; 6 | import java.util.function.Function; 7 | import java.util.regex.Matcher; 8 | import java.util.regex.Pattern; 9 | 10 | public class StringTemplate { 11 | private static final Pattern VARIABLE_PATTERN = Pattern.compile("(? parameters) { 24 | return format(parameters::get); 25 | } 26 | 27 | public List getVariables() { 28 | List vars = new ArrayList<>(); 29 | Matcher matcher = VARIABLE_PATTERN.matcher(template); 30 | 31 | while (matcher.find()) { 32 | vars.add(matcher.group(1)); 33 | } 34 | return vars; 35 | } 36 | 37 | public String format(Function replacement) { 38 | StringBuffer sb = new StringBuffer(); 39 | Matcher matcher = VARIABLE_PATTERN.matcher(template); 40 | 41 | while (matcher.find()) { 42 | String apply = replacement.apply(matcher.group(1)); 43 | matcher.appendReplacement(sb, apply == null ? "" : apply); 44 | } 45 | matcher.appendTail(sb); 46 | return sb.toString(); 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /rakam/src/main/java/org/rakam/util/javascript/ILogger.java: -------------------------------------------------------------------------------- 1 | package org.rakam.util.javascript; 2 | 3 | public interface ILogger { 4 | void debug(String value); 5 | 6 | void warn(String value); 7 | 8 | void info(String value); 9 | 10 | void error(String value); 11 | } 12 | -------------------------------------------------------------------------------- /rakam/src/main/java/org/rakam/util/javascript/JSConfigManager.java: -------------------------------------------------------------------------------- 1 | package org.rakam.util.javascript; 2 | 3 | import org.rakam.analysis.ConfigManager; 4 | 5 | import java.util.Optional; 6 | 7 | public class JSConfigManager 8 | implements JSCodeCompiler.IJSConfigManager { 9 | private final ConfigManager configManager; 10 | private final String project; 11 | private final String prefix; 12 | 13 | public JSConfigManager(ConfigManager configManager, String project, String prefix) { 14 | this.configManager = configManager; 15 | this.project = project; 16 | this.prefix = Optional.ofNullable(prefix).map(v -> v + ".").orElse(""); 17 | } 18 | 19 | @Override 20 | public Object get(String configName) { 21 | return configManager.getConfig(project, prefix + configName, Object.class); 22 | } 23 | 24 | @Override 25 | public void set(String configName, Object value) { 26 | configManager.setConfig(project, prefix + configName, value); 27 | } 28 | 29 | @Override 30 | public Object setOnce(String configName, Object value) { 31 | return configManager.setConfigOnce(project, prefix + configName, value); 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /rakam/src/main/java/org/rakam/util/javascript/JSLoggerService.java: -------------------------------------------------------------------------------- 1 | package org.rakam.util.javascript; 2 | 3 | import io.airlift.log.Level; 4 | 5 | import java.time.Instant; 6 | 7 | public interface JSLoggerService { 8 | 9 | ILogger createLogger(String project, String prefix); 10 | 11 | ILogger createLogger(String project, String prefix, String identifier); 12 | 13 | class LogEntry { 14 | public final String id; 15 | public final Level level; 16 | public final String message; 17 | public final Instant timestamp; 18 | 19 | public LogEntry(String id, Level level, String message, Instant timestamp) { 20 | this.id = id; 21 | this.level = level; 22 | this.message = message; 23 | this.timestamp = timestamp; 24 | } 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /rakam/src/main/java/org/rakam/util/javascript/JavascriptConfig.java: -------------------------------------------------------------------------------- 1 | package org.rakam.util.javascript; 2 | 3 | import io.airlift.configuration.Config; 4 | 5 | public class JavascriptConfig { 6 | private boolean customEnabled = true; 7 | 8 | public boolean getCustomEnabled() { 9 | return customEnabled; 10 | } 11 | 12 | @Config("custom-javascript-enabled") 13 | public JavascriptConfig setCustomEnabled(boolean customEnabled) { 14 | this.customEnabled = customEnabled; 15 | return this; 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /rakam/src/main/java/org/rakam/util/javascript/JavascriptModule.java: -------------------------------------------------------------------------------- 1 | package org.rakam.util.javascript; 2 | 3 | import com.google.inject.Binder; 4 | import com.google.inject.Scopes; 5 | import io.airlift.configuration.ConfigBinder; 6 | import org.rakam.plugin.RakamModule; 7 | 8 | //@AutoService(RakamModule.class) 9 | public class JavascriptModule 10 | extends RakamModule { 11 | @Override 12 | protected void setup(Binder binder) { 13 | ConfigBinder.configBinder(binder).bindConfig(JavascriptConfig.class); 14 | binder.bind(JSCodeCompiler.class).in(Scopes.SINGLETON); 15 | } 16 | 17 | @Override 18 | public String name() { 19 | return "Javascript executor module"; 20 | } 21 | 22 | @Override 23 | public String description() { 24 | return null; 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /rakam/src/main/java/org/rakam/util/javascript/MonitorThread.java: -------------------------------------------------------------------------------- 1 | package org.rakam.util.javascript; 2 | 3 | 4 | import com.google.common.base.Throwables; 5 | 6 | import java.lang.management.ManagementFactory; 7 | import java.lang.management.ThreadMXBean; 8 | import java.util.concurrent.atomic.AtomicBoolean; 9 | 10 | @SuppressWarnings("all") 11 | public class MonitorThread extends Thread { 12 | private final long maxCPUTime; 13 | 14 | private final AtomicBoolean stop; 15 | 16 | private final AtomicBoolean operationInterrupted; 17 | private final AtomicBoolean cpuLimitExceeded; 18 | private Thread threadToMonitor; 19 | private Runnable onInvalid; 20 | 21 | public MonitorThread(final long maxCPUTimne) { 22 | this.maxCPUTime = maxCPUTimne; 23 | AtomicBoolean _atomicBoolean = new AtomicBoolean(false); 24 | this.stop = _atomicBoolean; 25 | AtomicBoolean _atomicBoolean_1 = new AtomicBoolean(false); 26 | this.operationInterrupted = _atomicBoolean_1; 27 | AtomicBoolean _atomicBoolean_2 = new AtomicBoolean(false); 28 | this.cpuLimitExceeded = _atomicBoolean_2; 29 | } 30 | 31 | @Override 32 | public void run() { 33 | try { 34 | final ThreadMXBean bean = ManagementFactory.getThreadMXBean(); 35 | long _id = this.threadToMonitor.getId(); 36 | final long startCPUTime = bean.getThreadCpuTime(_id); 37 | while ((!this.stop.get())) { 38 | { 39 | long _id_1 = this.threadToMonitor.getId(); 40 | final long threadCPUTime = bean.getThreadCpuTime(_id_1); 41 | final long runtime = (threadCPUTime - startCPUTime); 42 | if ((runtime > this.maxCPUTime)) { 43 | this.cpuLimitExceeded.set(true); 44 | this.stop.set(true); 45 | this.onInvalid.run(); 46 | Thread.sleep(50); 47 | boolean _get = this.operationInterrupted.get(); 48 | boolean _equals = (_get == false); 49 | if (_equals) { 50 | String _plus = (this + ": Thread hard shutdown!"); 51 | this.threadToMonitor.stop(); 52 | } 53 | return; 54 | } 55 | Thread.sleep(5); 56 | } 57 | } 58 | } catch (Throwable _e) { 59 | throw Throwables.propagate(_e); 60 | } 61 | } 62 | 63 | public void stopMonitor() { 64 | this.stop.set(true); 65 | } 66 | 67 | public void setThreadToMonitor(final Thread t) { 68 | this.threadToMonitor = t; 69 | } 70 | 71 | public void setOnInvalidHandler(final Runnable r) { 72 | this.onInvalid = r; 73 | } 74 | 75 | public void notifyOperationInterrupted() { 76 | this.operationInterrupted.set(true); 77 | } 78 | 79 | public boolean isCPULimitExceeded() { 80 | return this.cpuLimitExceeded.get(); 81 | } 82 | 83 | public boolean gracefullyInterrputed() { 84 | return this.operationInterrupted.get(); 85 | } 86 | } -------------------------------------------------------------------------------- /rakam/src/main/resources/assembly.xml: -------------------------------------------------------------------------------- 1 | 5 | bundle 6 | 7 | tar.gz 8 | dir 9 | 10 | 11 | 12 | ${air.main.basedir} 13 | / 14 | 15 | README* 16 | LICENSE* 17 | NOTICE* 18 | 19 | 20 | 21 | ${air.main.basedir} 22 | /etc 23 | 24 | log.properties 25 | jvm.config 26 | 27 | 28 | 29 | ${project.build.directory} 30 | /etc 31 | 32 | config.properties 33 | log.properties 34 | 35 | 36 | 37 | ${basedir}/bin 38 | /bin 39 | 40 | **/* 41 | 42 | 43 | 44 | 45 | 46 | lib/ 47 | 48 | *:jar 49 | 50 | 51 | 52 | -------------------------------------------------------------------------------- /rakam/src/main/resources/log4j2.properties: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /rakam/src/test/java/Test.java: -------------------------------------------------------------------------------- 1 | import com.fasterxml.jackson.databind.MappingIterator; 2 | import com.fasterxml.jackson.dataformat.csv.CsvMapper; 3 | import com.fasterxml.jackson.dataformat.csv.CsvSchema; 4 | import com.google.common.io.ByteStreams; 5 | import org.rakam.util.javascript.ILogger; 6 | import org.rakam.util.javascript.JSCodeCompiler; 7 | import org.rakam.util.javascript.JSLoggerService; 8 | import org.rakam.util.javascript.JavascriptConfig; 9 | 10 | import javax.script.Invocable; 11 | import javax.script.ScriptException; 12 | import java.io.File; 13 | import java.io.IOException; 14 | import java.util.HashMap; 15 | import java.util.Map; 16 | 17 | public class Test { 18 | public static void main1(String[] args) throws IOException, ScriptException, NoSuchMethodException { 19 | JSCodeCompiler jsCodeCompiler = new JSCodeCompiler(null, null, new JSLoggerService() { 20 | @Override 21 | public ILogger createLogger(String project, String prefix) { 22 | return new JSCodeCompiler.TestLogger(); 23 | } 24 | 25 | @Override 26 | public ILogger createLogger(String project, String prefix, String identifier) { 27 | return new JSCodeCompiler.TestLogger(); 28 | } 29 | }, new JavascriptConfig()); 30 | 31 | 32 | Invocable engine = jsCodeCompiler.createEngine("test", new String(ByteStreams.toByteArray(Test.class.getResourceAsStream("example.js"))), "test"); 33 | HashMap map = new HashMap<>(); 34 | HashMap inline = new HashMap<>(); 35 | inline.put("param", 1); 36 | map.put("test", inline); 37 | Object main = engine.invokeFunction("main", map); 38 | System.out.println(main); 39 | } 40 | 41 | public static void main(String[] args) throws IOException, ScriptException, NoSuchMethodException { 42 | CsvMapper mapper = new CsvMapper(); 43 | 44 | CsvSchema.Builder schema = new CsvSchema.Builder(); 45 | schema.addColumn("geonameid", CsvSchema.ColumnType.NUMBER); 46 | schema.addColumn("name", CsvSchema.ColumnType.STRING); 47 | schema.addColumn("asciiname", CsvSchema.ColumnType.STRING); 48 | schema.addColumn("alternatenames", CsvSchema.ColumnType.STRING); 49 | schema.addColumn("latitude", CsvSchema.ColumnType.NUMBER); 50 | schema.addColumn("longitude", CsvSchema.ColumnType.NUMBER); 51 | schema.addColumn("feature class", CsvSchema.ColumnType.STRING); 52 | schema.addColumn("feature code", CsvSchema.ColumnType.STRING); 53 | schema.addColumn("country code", CsvSchema.ColumnType.STRING); 54 | schema.addColumn("cc2", CsvSchema.ColumnType.STRING); 55 | schema.addColumn("admin1 code", CsvSchema.ColumnType.STRING); 56 | schema.addColumn("admin2 code", CsvSchema.ColumnType.STRING); 57 | schema.addColumn("admin3 code", CsvSchema.ColumnType.STRING); 58 | schema.addColumn("admin4 code", CsvSchema.ColumnType.STRING); 59 | schema.addColumn("population", CsvSchema.ColumnType.STRING); 60 | schema.addColumn("elevation", CsvSchema.ColumnType.STRING); 61 | schema.addColumn("dem", CsvSchema.ColumnType.STRING); 62 | schema.addColumn("timezone", CsvSchema.ColumnType.STRING); 63 | schema.addColumn("modification date", CsvSchema.ColumnType.STRING); 64 | schema.setColumnSeparator('\t'); 65 | schema.setEscapeChar('"'); 66 | 67 | File csvFile = new File("/Users/buremba/Downloads/allCountries.txt"); // or from String, URL etc 68 | MappingIterator it = mapper.readerFor(Map.class).with(schema.build()).readValues(csvFile); 69 | while (it.hasNext()) { 70 | Map row = it.next(); 71 | // System.out.println(row.get("latitude")); 72 | } 73 | } 74 | } 75 | -------------------------------------------------------------------------------- /rakam/src/test/java/TestCSVParser.java: -------------------------------------------------------------------------------- 1 | import com.fasterxml.jackson.databind.cfg.ContextAttributes; 2 | import com.fasterxml.jackson.databind.module.SimpleModule; 3 | import com.fasterxml.jackson.dataformat.csv.CsvMapper; 4 | import com.google.common.collect.ImmutableList; 5 | import com.google.common.collect.ImmutableSet; 6 | import com.google.common.eventbus.EventBus; 7 | import org.apache.avro.Schema; 8 | import org.apache.avro.generic.GenericData; 9 | import org.rakam.TestingConfigManager; 10 | import org.rakam.analysis.InMemoryApiKeyService; 11 | import org.rakam.analysis.InMemoryMetastore; 12 | import org.rakam.analysis.metadata.Metastore; 13 | import org.rakam.analysis.metadata.SchemaChecker; 14 | import org.rakam.collection.*; 15 | import org.rakam.config.ProjectConfig; 16 | import org.rakam.util.AvroUtil; 17 | import org.testng.annotations.Test; 18 | 19 | import java.util.List; 20 | 21 | import static com.google.common.collect.ImmutableSet.of; 22 | import static org.rakam.collection.FieldType.DOUBLE; 23 | import static org.rakam.collection.FieldType.STRING; 24 | import static org.testng.Assert.assertEquals; 25 | 26 | public class TestCSVParser { 27 | @Test 28 | public void testName() throws Exception { 29 | CsvMapper mapper = new CsvMapper(); 30 | 31 | FieldDependencyBuilder.FieldDependency build = new FieldDependencyBuilder().build(); 32 | Metastore metastore = new InMemoryMetastore(new InMemoryApiKeyService(), new EventBus()); 33 | mapper.registerModule(new SimpleModule().addDeserializer(EventList.class, 34 | new CsvEventDeserializer(metastore, new ProjectConfig(), new TestingConfigManager(), new SchemaChecker(metastore, build), build))); 35 | 36 | metastore.createProject("project"); 37 | metastore.getOrCreateCollectionFields("project", "collection", 38 | of(new SchemaField("price", DOUBLE))); 39 | 40 | String csv = "Transaction_date,Product,Price\n" + 41 | "1/2/09 6:17,Product1,1200\n" + 42 | "1/2/09 4:53,Product2,1500\n"; 43 | 44 | EventList actual = mapper.reader(EventList.class).with(ContextAttributes.getEmpty() 45 | .withSharedAttribute("project", "project") 46 | .withSharedAttribute("collection", "collection") 47 | .withSharedAttribute("apiKey", "apiKey") 48 | ).readValue(csv); 49 | 50 | List collection = metastore.getCollection("project", "collection"); 51 | 52 | assertEquals(ImmutableSet.copyOf(collection), ImmutableSet.of( 53 | new SchemaField("transaction_date", STRING), 54 | new SchemaField("product", STRING), 55 | new SchemaField("price", DOUBLE))); 56 | 57 | Schema avroSchema = AvroUtil.convertAvroSchema(collection); 58 | GenericData.Record record1 = new GenericData.Record(avroSchema); 59 | record1.put("transaction_date", "1/2/09 6:17"); 60 | record1.put("product", "Product1"); 61 | record1.put("price", 1200.0); 62 | 63 | GenericData.Record record2 = new GenericData.Record(avroSchema); 64 | record2.put("transaction_date", "1/2/09 4:53"); 65 | record2.put("product", "Product2"); 66 | record2.put("price", 1500.0); 67 | 68 | EventList eventList = new EventList(Event.EventContext.apiKey("apiKey"), "project", ImmutableList.of( 69 | new Event("project", "collection", null, ImmutableList.copyOf(collection), record1), 70 | new Event("project", "collection", null, ImmutableList.copyOf(collection), record2))); 71 | assertEquals(actual, eventList); 72 | } 73 | 74 | } 75 | -------------------------------------------------------------------------------- /rakam/src/test/java/TestJSCodeCompiler.java: -------------------------------------------------------------------------------- 1 | import okhttp3.OkHttpClient; 2 | import org.rakam.TestingConfigManager; 3 | import org.rakam.util.RAsyncHttpClient; 4 | import org.rakam.util.javascript.JSCodeCompiler; 5 | import org.testng.annotations.Test; 6 | 7 | import javax.script.ScriptException; 8 | 9 | public class TestJSCodeCompiler { 10 | @Test 11 | public void testName() 12 | throws ScriptException { 13 | 14 | JSCodeCompiler jsCodeCompiler = new JSCodeCompiler(new TestingConfigManager(), 15 | new RAsyncHttpClient(new OkHttpClient()), 16 | (project, prefix) -> new JSCodeCompiler.TestLogger(), false, true); 17 | // jsCodeCompiler.createEngine("test", "new Array(100000000).concat(new Array(100000000));", ""); 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /rakam/src/test/java/bloom/OoaBFilter.java: -------------------------------------------------------------------------------- 1 | package bloom; 2 | 3 | import com.google.common.hash.HashCode; 4 | import com.google.common.hash.HashFunction; 5 | import com.google.common.hash.Hashing; 6 | import com.google.common.math.IntMath; 7 | 8 | import java.math.RoundingMode; 9 | 10 | /** 11 | * OoaBFilter is used to filter out duplicate elements from a given dataset or stream. It is 12 | * guaranteed to never return a false positive (that is, it will never say that an item has already 13 | * been seen by the filter when it has not) but may return a false negative. 14 | *

15 | * The check is syncronized on the individual buffer. Depending on the dataset, hash function 16 | * and size of the underlying array lock contention should be very low. Dataset/hash function 17 | * combinations that cause many collisions will result in more contention. 18 | *

19 | * OoaBFilter is thread-safe. 20 | */ 21 | public class OoaBFilter { 22 | private static final HashFunction HASH_FUNC = Hashing.murmur3_32(); 23 | private final long[] leastSignificantBitsArray; 24 | private final long[] mostSignificantBitsArray; 25 | private final int sizeMask; 26 | 27 | public OoaBFilter(int size) { 28 | leastSignificantBitsArray = new long[size]; 29 | mostSignificantBitsArray = new long[size]; 30 | 31 | this.sizeMask = IntMath.pow(2, IntMath.log2(size, RoundingMode.CEILING)) - 1; 32 | } 33 | 34 | public boolean containsAndAdd(long leastSignificantBits, long mostSignificantBits) { 35 | HashCode code = HASH_FUNC.hashLong(leastSignificantBits); 36 | int index = code.asInt() & sizeMask; 37 | 38 | int buffer = (int) leastSignificantBitsArray[index]; 39 | if (mostSignificantBitsArray[buffer] == mostSignificantBits) { 40 | return true; 41 | } else { 42 | mostSignificantBitsArray[buffer] = mostSignificantBits; 43 | return false; 44 | } 45 | } 46 | } -------------------------------------------------------------------------------- /rakam/src/test/java/bloom/TestFilter.java: -------------------------------------------------------------------------------- 1 | package bloom; 2 | 3 | import java.util.UUID; 4 | 5 | public class TestFilter { 6 | 7 | // @Test 8 | public void testName() 9 | throws Exception { 10 | OoaBFilter byteArrayFilter = new OoaBFilter(1000000); 11 | 12 | while (true) { 13 | UUID uuid = UUID.randomUUID(); 14 | long leastSignificantBits = uuid.getLeastSignificantBits(); 15 | long mostSignificantBits = uuid.getMostSignificantBits(); 16 | 17 | if (byteArrayFilter.containsAndAdd(leastSignificantBits, mostSignificantBits)) { 18 | throw new RuntimeException(); 19 | } 20 | } 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /system.properties: -------------------------------------------------------------------------------- 1 | java.runtime.version=1.8 --------------------------------------------------------------------------------