├── .dockerignore ├── .gitignore ├── .travis.yml ├── CODE_OF_CONDUCT.md ├── Changelog.md ├── Dockerfile ├── LICENSE.md ├── Makefile ├── README.md ├── TODO.md ├── build.sh ├── cmd ├── actord │ ├── README.md │ ├── bootstrap │ │ ├── bootstrap.go │ │ ├── log.go │ │ └── options.go │ ├── controller │ │ ├── assign.go │ │ ├── assign_test.go │ │ ├── audit.go │ │ ├── controller.go │ │ ├── httpd.go │ │ ├── jobqueues.go │ │ ├── watchdog.go │ │ └── webhooks.go │ ├── executor │ │ ├── exe_job.go │ │ └── exe_webhook.go │ └── main.go ├── ehaproxy │ ├── Makefile │ ├── README.md │ ├── command │ │ ├── config.go │ │ ├── consts.go │ │ ├── deploy.go │ │ ├── haproxy.go │ │ ├── httpd.go │ │ ├── log.go │ │ ├── metrics.go │ │ ├── start.go │ │ ├── templates │ │ │ ├── 500.http │ │ │ ├── 502.http │ │ │ ├── 503.http │ │ │ ├── 504.http │ │ │ ├── haproxy-1.6.3.tar.gz │ │ │ ├── haproxy.debug.tpl │ │ │ ├── haproxy.tpl │ │ │ ├── hatop-0.7.7.tar.gz │ │ │ └── init.ehaproxy │ │ ├── util.go │ │ └── util_test.go │ ├── commands.go │ └── main.go ├── es │ ├── README.md │ ├── command │ │ ├── aliases.go │ │ ├── allocation.go │ │ ├── clusters.go │ │ ├── count.go │ │ ├── health.go │ │ ├── indices.go │ │ ├── merge.go │ │ ├── nodes.go │ │ ├── pending.go │ │ ├── plugins.go │ │ ├── segments.go │ │ ├── shards.go │ │ ├── threads.go │ │ ├── top.go │ │ └── utils.go │ ├── commands.go │ └── main.go ├── gk │ ├── README.md │ ├── audit.go │ ├── command │ │ ├── agent.go │ │ ├── agent │ │ │ ├── Makefile │ │ │ ├── agent.go │ │ │ ├── api.go │ │ │ ├── client.go │ │ │ └── util.go │ │ ├── alias.go │ │ ├── audit.go │ │ ├── auth.go │ │ ├── autocomplete.go │ │ ├── balance.go │ │ ├── brokers.go │ │ ├── capacity.go │ │ ├── chaos.go │ │ ├── checkup.go │ │ ├── clusters.go │ │ ├── comma.go │ │ ├── console.go │ │ ├── consumers.go │ │ ├── controllers.go │ │ ├── cpu.go │ │ ├── deploy.go │ │ ├── deploy_test.go │ │ ├── disable.go │ │ ├── discover.go │ │ ├── ext4fs.go │ │ ├── faq.go │ │ ├── farm.go │ │ ├── gc.go │ │ ├── haproxy.go │ │ ├── histogram.go │ │ ├── host.go │ │ ├── influx.go │ │ ├── init.go │ │ ├── jmx.go │ │ ├── job.go │ │ ├── kafka.go │ │ ├── kateway.go │ │ ├── kfs │ │ │ ├── dir.go │ │ │ ├── file.go │ │ │ └── fs.go │ │ ├── kguard.go │ │ ├── lags.go │ │ ├── layers │ │ │ └── kafka.go │ │ ├── logstash.go │ │ ├── lookup.go │ │ ├── lszk.go │ │ ├── members.go │ │ ├── merkle.go │ │ ├── migrate.go │ │ ├── migrate_test.go │ │ ├── mirror.go │ │ ├── mirror │ │ │ ├── config.go │ │ │ ├── config_test.go │ │ │ ├── const.go │ │ │ ├── factory.go │ │ │ ├── mirror.go │ │ │ └── pump.go │ │ ├── mount.go │ │ ├── move.go │ │ ├── normalize.go │ │ ├── offset.go │ │ ├── partition.go │ │ ├── peek.go │ │ ├── perf.go │ │ ├── ping.go │ │ ├── produce.go │ │ ├── protos │ │ │ ├── ascii.go │ │ │ ├── kafka.go │ │ │ ├── protocol.go │ │ │ ├── stream.go │ │ │ └── zk.go │ │ ├── rebalance.go │ │ ├── redis.go │ │ ├── sample.go │ │ ├── scale.go │ │ ├── segment.go │ │ ├── sniff.go │ │ ├── systool.go │ │ ├── template.go │ │ ├── template │ │ │ ├── bash_autocomplete.dbc │ │ │ ├── bash_autocomplete.es │ │ │ ├── bash_autocomplete.gk │ │ │ ├── bash_autocomplete.zk │ │ │ ├── bin │ │ │ │ ├── kafka-preferred-replica-election.sh │ │ │ │ ├── kafka-reassign-partitions.sh │ │ │ │ ├── kafka-run-class.sh │ │ │ │ ├── kafka-server-start.sh │ │ │ │ ├── kafka-topics.sh │ │ │ │ └── setenv.sh │ │ │ ├── config │ │ │ │ ├── log4j.properties │ │ │ │ └── server.properties │ │ │ ├── helix-core-0.6.7 │ │ │ │ ├── bin │ │ │ │ │ ├── JmxDumper.sh │ │ │ │ │ ├── helix-admin.sh │ │ │ │ │ ├── quickstart.sh │ │ │ │ │ ├── run-helix-controller.sh │ │ │ │ │ ├── start-helix-participant.sh │ │ │ │ │ ├── start-standalone-zookeeper.sh │ │ │ │ │ ├── test-util.sh │ │ │ │ │ ├── yaml-cluster-setup.sh │ │ │ │ │ ├── zk-dumper.sh │ │ │ │ │ ├── zk-log-csv-formatter.sh │ │ │ │ │ ├── zk-log-parser.sh │ │ │ │ │ ├── zkcopy.sh │ │ │ │ │ └── zkgrep.sh │ │ │ │ ├── conf │ │ │ │ │ └── log4j.properties │ │ │ │ └── repo │ │ │ │ │ ├── com │ │ │ │ │ ├── 101tec │ │ │ │ │ │ └── zkclient │ │ │ │ │ │ │ └── 0.5 │ │ │ │ │ │ │ └── zkclient-0.5.jar │ │ │ │ │ └── google │ │ │ │ │ │ └── guava │ │ │ │ │ │ └── guava │ │ │ │ │ │ └── 15.0 │ │ │ │ │ │ └── guava-15.0.jar │ │ │ │ │ ├── commons-cli │ │ │ │ │ └── commons-cli │ │ │ │ │ │ └── 1.2 │ │ │ │ │ │ └── commons-cli-1.2.jar │ │ │ │ │ ├── commons-codec │ │ │ │ │ └── commons-codec │ │ │ │ │ │ └── 1.6 │ │ │ │ │ │ └── commons-codec-1.6.jar │ │ │ │ │ ├── commons-io │ │ │ │ │ └── commons-io │ │ │ │ │ │ └── 1.4 │ │ │ │ │ │ └── commons-io-1.4.jar │ │ │ │ │ ├── io │ │ │ │ │ └── netty │ │ │ │ │ │ └── netty │ │ │ │ │ │ └── 3.10.5.Final │ │ │ │ │ │ └── netty-3.10.5.Final.jar │ │ │ │ │ ├── jline │ │ │ │ │ └── jline │ │ │ │ │ │ └── 0.9.94 │ │ │ │ │ │ └── jline-0.9.94.jar │ │ │ │ │ ├── log4j │ │ │ │ │ └── log4j │ │ │ │ │ │ └── 1.2.15 │ │ │ │ │ │ └── log4j-1.2.15.jar │ │ │ │ │ └── org │ │ │ │ │ ├── apache │ │ │ │ │ ├── commons │ │ │ │ │ │ └── commons-math │ │ │ │ │ │ │ └── 2.1 │ │ │ │ │ │ │ └── commons-math-2.1.jar │ │ │ │ │ ├── helix │ │ │ │ │ │ └── helix-core │ │ │ │ │ │ │ └── 0.6.7 │ │ │ │ │ │ │ └── helix-core-0.6.7.jar │ │ │ │ │ └── zookeeper │ │ │ │ │ │ └── zookeeper │ │ │ │ │ │ └── 3.4.9 │ │ │ │ │ │ └── zookeeper-3.4.9.jar │ │ │ │ │ ├── codehaus │ │ │ │ │ └── jackson │ │ │ │ │ │ ├── jackson-core-asl │ │ │ │ │ │ └── 1.8.5 │ │ │ │ │ │ │ └── jackson-core-asl-1.8.5.jar │ │ │ │ │ │ └── jackson-mapper-asl │ │ │ │ │ │ └── 1.8.5 │ │ │ │ │ │ └── jackson-mapper-asl-1.8.5.jar │ │ │ │ │ ├── slf4j │ │ │ │ │ ├── slf4j-api │ │ │ │ │ │ └── 1.6.1 │ │ │ │ │ │ │ └── slf4j-api-1.6.1.jar │ │ │ │ │ └── slf4j-log4j12 │ │ │ │ │ │ └── 1.6.1 │ │ │ │ │ │ └── slf4j-log4j12-1.6.1.jar │ │ │ │ │ └── yaml │ │ │ │ │ └── snakeyaml │ │ │ │ │ └── 1.12 │ │ │ │ │ └── snakeyaml-1.12.jar │ │ │ ├── init.d │ │ │ │ ├── gkagent │ │ │ │ ├── kafka │ │ │ │ └── zookeeper │ │ │ ├── kafka_2.10-0.10.0.0 │ │ │ │ ├── bin │ │ │ │ │ ├── connect-distributed.sh │ │ │ │ │ ├── connect-standalone.sh │ │ │ │ │ ├── kafka-acls.sh │ │ │ │ │ ├── kafka-configs.sh │ │ │ │ │ ├── kafka-console-consumer.sh │ │ │ │ │ ├── kafka-console-producer.sh │ │ │ │ │ ├── kafka-consumer-groups.sh │ │ │ │ │ ├── kafka-consumer-offset-checker.sh │ │ │ │ │ ├── kafka-consumer-perf-test.sh │ │ │ │ │ ├── kafka-mirror-maker.sh │ │ │ │ │ ├── kafka-preferred-replica-election.sh │ │ │ │ │ ├── kafka-producer-perf-test.sh │ │ │ │ │ ├── kafka-reassign-partitions.sh │ │ │ │ │ ├── kafka-replay-log-producer.sh │ │ │ │ │ ├── kafka-replica-verification.sh │ │ │ │ │ ├── kafka-run-class.sh │ │ │ │ │ ├── kafka-server-start.sh │ │ │ │ │ ├── kafka-server-stop.sh │ │ │ │ │ ├── kafka-simple-consumer-shell.sh │ │ │ │ │ ├── kafka-topics.sh │ │ │ │ │ ├── kafka-verifiable-consumer.sh │ │ │ │ │ ├── kafka-verifiable-producer.sh │ │ │ │ │ ├── zookeeper-security-migration.sh │ │ │ │ │ ├── zookeeper-server-start.sh │ │ │ │ │ ├── zookeeper-server-stop.sh │ │ │ │ │ └── zookeeper-shell.sh │ │ │ │ └── libs │ │ │ │ │ ├── aopalliance-repackaged-2.4.0-b34.jar │ │ │ │ │ ├── argparse4j-0.5.0.jar │ │ │ │ │ ├── connect-api-0.10.0.0.jar │ │ │ │ │ ├── connect-file-0.10.0.0.jar │ │ │ │ │ ├── connect-json-0.10.0.0.jar │ │ │ │ │ ├── connect-runtime-0.10.0.0.jar │ │ │ │ │ ├── guava-18.0.jar │ │ │ │ │ ├── hk2-api-2.4.0-b34.jar │ │ │ │ │ ├── hk2-locator-2.4.0-b34.jar │ │ │ │ │ ├── hk2-utils-2.4.0-b34.jar │ │ │ │ │ ├── jackson-annotations-2.6.0.jar │ │ │ │ │ ├── jackson-core-2.6.3.jar │ │ │ │ │ ├── jackson-databind-2.6.3.jar │ │ │ │ │ ├── jackson-jaxrs-base-2.6.3.jar │ │ │ │ │ ├── jackson-jaxrs-json-provider-2.6.3.jar │ │ │ │ │ ├── jackson-module-jaxb-annotations-2.6.3.jar │ │ │ │ │ ├── javassist-3.18.2-GA.jar │ │ │ │ │ ├── javax.annotation-api-1.2.jar │ │ │ │ │ ├── javax.inject-1.jar │ │ │ │ │ ├── javax.inject-2.4.0-b34.jar │ │ │ │ │ ├── javax.servlet-api-3.1.0.jar │ │ │ │ │ ├── javax.ws.rs-api-2.0.1.jar │ │ │ │ │ ├── jersey-client-2.22.2.jar │ │ │ │ │ ├── jersey-common-2.22.2.jar │ │ │ │ │ ├── jersey-container-servlet-2.22.2.jar │ │ │ │ │ ├── jersey-container-servlet-core-2.22.2.jar │ │ │ │ │ ├── jersey-guava-2.22.2.jar │ │ │ │ │ ├── jersey-media-jaxb-2.22.2.jar │ │ │ │ │ ├── jersey-server-2.22.2.jar │ │ │ │ │ ├── jetty-continuation-9.2.15.v20160210.jar │ │ │ │ │ ├── jetty-http-9.2.15.v20160210.jar │ │ │ │ │ ├── jetty-io-9.2.15.v20160210.jar │ │ │ │ │ ├── jetty-security-9.2.15.v20160210.jar │ │ │ │ │ ├── jetty-server-9.2.15.v20160210.jar │ │ │ │ │ ├── jetty-servlet-9.2.15.v20160210.jar │ │ │ │ │ ├── jetty-servlets-9.2.15.v20160210.jar │ │ │ │ │ ├── jetty-util-9.2.15.v20160210.jar │ │ │ │ │ ├── jopt-simple-4.9.jar │ │ │ │ │ ├── kafka-clients-0.10.0.0.jar │ │ │ │ │ ├── kafka-log4j-appender-0.10.0.0.jar │ │ │ │ │ ├── kafka-streams-0.10.0.0.jar │ │ │ │ │ ├── kafka-streams-examples-0.10.0.0.jar │ │ │ │ │ ├── kafka-tools-0.10.0.0.jar │ │ │ │ │ ├── kafka_2.10-0.10.0.0.jar │ │ │ │ │ ├── log4j-1.2.17.jar │ │ │ │ │ ├── lz4-1.3.0.jar │ │ │ │ │ ├── metrics-core-2.2.0.jar │ │ │ │ │ ├── osgi-resource-locator-1.0.1.jar │ │ │ │ │ ├── reflections-0.9.10.jar │ │ │ │ │ ├── rocksdbjni-4.4.1.jar │ │ │ │ │ ├── scala-library-2.10.6.jar │ │ │ │ │ ├── slf4j-api-1.7.21.jar │ │ │ │ │ ├── slf4j-log4j12-1.7.21.jar │ │ │ │ │ ├── snappy-java-1.1.2.4.jar │ │ │ │ │ ├── validation-api-1.1.0.Final.jar │ │ │ │ │ ├── zkclient-0.8.jar │ │ │ │ │ └── zookeeper-3.4.6.jar │ │ │ ├── kafka_2.10-0.8.1.1 │ │ │ │ ├── bin │ │ │ │ │ ├── kafka-preferred-replica-election.sh │ │ │ │ │ ├── kafka-reassign-partitions.sh │ │ │ │ │ ├── kafka-replay-log-producer.sh │ │ │ │ │ ├── kafka-run-class.sh │ │ │ │ │ ├── kafka-server-start.sh │ │ │ │ │ ├── kafka-server-stop.sh │ │ │ │ │ └── kafka-topics.sh │ │ │ │ └── libs │ │ │ │ │ ├── jopt-simple-3.2.jar │ │ │ │ │ ├── kafka_2.10-0.8.1.1.jar │ │ │ │ │ ├── log4j-1.2.15.jar │ │ │ │ │ ├── metrics-core-2.2.0.jar │ │ │ │ │ ├── scala-library-2.10.1.jar │ │ │ │ │ ├── slf4j-api-1.7.2.jar │ │ │ │ │ ├── snappy-java-1.0.5.jar │ │ │ │ │ ├── zkclient-0.3.jar │ │ │ │ │ └── zookeeper-3.3.4.jar │ │ │ ├── kafka_2.10-0.8.2.2 │ │ │ │ ├── bin │ │ │ │ │ ├── kafka-mirror-maker.sh │ │ │ │ │ ├── kafka-preferred-replica-election.sh │ │ │ │ │ ├── kafka-reassign-partitions.sh │ │ │ │ │ ├── kafka-replay-log-producer.sh │ │ │ │ │ ├── kafka-replica-verification.sh │ │ │ │ │ ├── kafka-run-class.sh │ │ │ │ │ ├── kafka-server-start.sh │ │ │ │ │ ├── kafka-server-stop.sh │ │ │ │ │ └── kafka-topics.sh │ │ │ │ └── libs │ │ │ │ │ ├── jopt-simple-3.2.jar │ │ │ │ │ ├── kafka-clients-0.8.2.2.jar │ │ │ │ │ ├── kafka_2.10-0.8.2.2.jar │ │ │ │ │ ├── log4j-1.2.16.jar │ │ │ │ │ ├── lz4-1.2.0.jar │ │ │ │ │ ├── metrics-core-2.2.0.jar │ │ │ │ │ ├── scala-library-2.10.4.jar │ │ │ │ │ ├── slf4j-api-1.7.6.jar │ │ │ │ │ ├── slf4j-log4j12-1.6.1.jar │ │ │ │ │ ├── snappy-java-1.1.1.7.jar │ │ │ │ │ ├── zkclient-0.3.jar │ │ │ │ │ ├── zookeeper-3.4.6.jar │ │ │ │ │ └── zzz-kafka-influxdb-reporter-1.0.0-uber.jar │ │ │ ├── tools │ │ │ │ └── itop │ │ │ └── zk │ │ │ │ ├── bin │ │ │ │ ├── zkCleanup.sh │ │ │ │ ├── zkCli.sh │ │ │ │ ├── zkEnv.sh │ │ │ │ └── zkServer.sh │ │ │ │ ├── conf │ │ │ │ ├── log4j.properties │ │ │ │ └── zoo.cfg │ │ │ │ ├── data │ │ │ │ └── myid │ │ │ │ ├── lib │ │ │ │ ├── jline-0.9.94.jar │ │ │ │ ├── log4j-1.2.16.jar │ │ │ │ ├── netty-3.7.0.Final.jar │ │ │ │ ├── slf4j-api-1.6.1.jar │ │ │ │ └── slf4j-log4j12-1.6.1.jar │ │ │ │ └── zookeeper-3.4.6.jar │ │ ├── time.go │ │ ├── top.go │ │ ├── topbroker.go │ │ ├── topics.go │ │ ├── topology.go │ │ ├── trace.go │ │ ├── underreplicated.go │ │ ├── upgrade.go │ │ ├── utils.go │ │ ├── utils_test.go │ │ ├── verify.go │ │ ├── watch.go │ │ ├── webhook.go │ │ ├── whois.go │ │ ├── zkinstall.go │ │ ├── zklog.go │ │ ├── zktop.go │ │ ├── zones.go │ │ └── zookeeper.go │ ├── commands.go │ └── main.go ├── kateway │ ├── .gdbinit │ ├── Makefile │ ├── README.md │ ├── TESTCASE.md │ ├── api │ │ ├── v1 │ │ │ ├── admin.go │ │ │ ├── client.go │ │ │ ├── config.go │ │ │ ├── globals.go │ │ │ ├── job.go │ │ │ ├── pub.go │ │ │ └── sub.go │ │ └── v2 │ │ │ ├── client.go │ │ │ ├── config.go │ │ │ └── service.go │ ├── bench │ │ ├── Makefile │ │ ├── ack.go │ │ ├── bench.go │ │ ├── bury.go │ │ ├── fasthttpd.go │ │ ├── httpd.go │ │ ├── job.go │ │ ├── pub.go │ │ ├── pubsub.go │ │ ├── sub.go │ │ ├── system │ │ │ └── wal_test.go │ │ └── webhookd.go │ ├── demo │ │ ├── demo.php │ │ ├── java │ │ │ ├── Makefile │ │ │ └── PubsubDemo │ │ │ │ ├── pom.xml │ │ │ │ └── src │ │ │ │ ├── main │ │ │ │ ├── assembly │ │ │ │ │ └── assembly.xml │ │ │ │ ├── java │ │ │ │ │ └── com │ │ │ │ │ │ └── foo │ │ │ │ │ │ └── App.java │ │ │ │ └── resources │ │ │ │ │ └── log4j.properties │ │ │ │ └── test │ │ │ │ └── java │ │ │ │ └── com │ │ │ │ └── foo │ │ │ │ └── AppTest.java │ │ ├── pub.go │ │ ├── sub.go │ │ └── ws.go │ ├── gateway │ │ ├── Makefile │ │ ├── access_logger.go │ │ ├── base_test.go │ │ ├── bench_test.go │ │ ├── benchfast_test.go │ │ ├── codec.go │ │ ├── codec_test.go │ │ ├── const.go │ │ ├── doc.go │ │ ├── errors.go │ │ ├── gateway.go │ │ ├── guard.go │ │ ├── handler_fastpub.go │ │ ├── handler_job.go │ │ ├── handler_man.go │ │ ├── handler_man_pub.go │ │ ├── handler_man_sub.go │ │ ├── handler_metrics.go │ │ ├── handler_pub.go │ │ ├── handler_pub_raw.go │ │ ├── handler_pub_ws.go │ │ ├── handler_sub.go │ │ ├── handler_sub_ack.go │ │ ├── handler_sub_ack_test.go │ │ ├── handler_sub_bury.go │ │ ├── handler_sub_raw.go │ │ ├── handler_sub_ws.go │ │ ├── handler_token.go │ │ ├── handler_xa.go │ │ ├── healthchk.go │ │ ├── hijack.go │ │ ├── jwt.go │ │ ├── jwt_test.go │ │ ├── listener.go │ │ ├── listener_test.go │ │ ├── log.go │ │ ├── metrics_pub.go │ │ ├── metrics_server.go │ │ ├── metrics_sub.go │ │ ├── metrics_test.go │ │ ├── middleware.go │ │ ├── middleware_fast.go │ │ ├── middleware_test.go │ │ ├── option.go │ │ ├── response.go │ │ ├── routing.go │ │ ├── rpc.go │ │ ├── security.go │ │ ├── server.go │ │ ├── server_fastpub.go │ │ ├── server_man.go │ │ ├── server_pub.go │ │ ├── server_sub.go │ │ ├── server_test.go │ │ ├── server_web.go │ │ ├── tag.go │ │ ├── tag_test.go │ │ ├── ulimit_darwin.go │ │ ├── ulimit_linux.go │ │ ├── util.go │ │ ├── util_test.go │ │ ├── writer_wrapper.go │ │ └── ws.go │ ├── hh │ │ ├── disk │ │ │ ├── bench_test.go │ │ │ ├── block.go │ │ │ ├── block_test.go │ │ │ ├── bufio.go │ │ │ ├── config.go │ │ │ ├── cursor.go │ │ │ ├── disk.go │ │ │ ├── disk_test.go │ │ │ ├── doc.go │ │ │ ├── errors.go │ │ │ ├── example │ │ │ │ └── main.go │ │ │ ├── flusher.go │ │ │ ├── globals.go │ │ │ ├── housekeep.go │ │ │ ├── index.go │ │ │ ├── pump.go │ │ │ ├── queue.go │ │ │ ├── queue_test.go │ │ │ ├── replicator.go │ │ │ ├── segment.go │ │ │ ├── segment_test.go │ │ │ ├── types.go │ │ │ ├── types_test.go │ │ │ ├── util.go │ │ │ └── util_test.go │ │ ├── dummy │ │ │ └── dummy.go │ │ ├── kafka │ │ │ ├── doc.go │ │ │ └── service.go │ │ ├── mysql │ │ │ └── service.go │ │ └── service.go │ ├── inflight │ │ ├── doc.go │ │ ├── errors.go │ │ ├── inflight.go │ │ └── mem │ │ │ ├── mem.go │ │ │ └── mem_test.go │ ├── job │ │ ├── dummy │ │ │ └── job.go │ │ ├── errors.go │ │ ├── model.go │ │ ├── mysql │ │ │ ├── db.sql │ │ │ ├── doc.go │ │ │ ├── idgen.go │ │ │ ├── mysql.go │ │ │ ├── util.go │ │ │ └── util_test.go │ │ └── store.go │ ├── logo.go │ ├── main.go │ ├── manager │ │ ├── doc.go │ │ ├── dummy │ │ │ └── dummy.go │ │ ├── errors.go │ │ ├── manager.go │ │ ├── mysql │ │ │ ├── api.go │ │ │ ├── api_test.go │ │ │ ├── bench_test.go │ │ │ ├── config.go │ │ │ ├── db.go │ │ │ ├── db.sql │ │ │ └── schema.go │ │ └── open │ │ │ ├── api.go │ │ │ ├── api_test.go │ │ │ ├── bench_test.go │ │ │ ├── config.go │ │ │ ├── db.go │ │ │ └── schema.go │ ├── meta │ │ ├── errors.go │ │ ├── meta.go │ │ └── zkmeta │ │ │ ├── config.go │ │ │ ├── director.go │ │ │ ├── errors.go │ │ │ ├── zk.go │ │ │ └── zk_test.go │ ├── play.sh │ ├── store │ │ ├── dummy │ │ │ ├── dumb_test.go │ │ │ ├── fetcher.go │ │ │ ├── pub.go │ │ │ └── sub.go │ │ ├── errors.go │ │ ├── kafka │ │ │ ├── bench_test.go │ │ │ ├── compress_test.go │ │ │ ├── init.go │ │ │ ├── msgpool_go10.go │ │ │ ├── msgpool_go13.go │ │ │ ├── partitioner.go │ │ │ ├── partitioner_test.go │ │ │ ├── pubapi.go │ │ │ ├── pubclient.go │ │ │ ├── pubfactory.go │ │ │ ├── pubpool.go │ │ │ ├── pubstore.go │ │ │ ├── subfetcher.go │ │ │ ├── submanager.go │ │ │ ├── submux.go │ │ │ ├── submux_test.go │ │ │ └── substore.go │ │ ├── pub.go │ │ └── sub.go │ └── structs │ │ └── types.go ├── kguard │ ├── README.md │ ├── bootstrap.go │ ├── main.go │ ├── monitor │ │ ├── alert.go │ │ ├── api.go │ │ ├── context.go │ │ ├── lags.go │ │ ├── monitor.go │ │ └── watcher.go │ ├── sos │ │ ├── doc.go │ │ └── sos.go │ └── watchers │ │ ├── actord │ │ └── actord.go │ │ ├── anomaly │ │ ├── qps.go │ │ └── qps_test.go │ │ ├── dbus │ │ └── dbus.go │ │ ├── external │ │ └── exec.go │ │ ├── gc │ │ └── kafka.go │ │ ├── haproxy │ │ └── haproxy.go │ │ ├── influxdb │ │ ├── influxalive.go │ │ └── influxd.go │ │ ├── influxquery │ │ ├── kateway.go │ │ ├── nginx.go │ │ ├── query.go │ │ └── redis.go │ │ ├── kafka │ │ ├── broker.go │ │ ├── cluster.go │ │ ├── consumer.go │ │ ├── controller.go │ │ ├── replica.go │ │ ├── topic.go │ │ └── zombie.go │ │ ├── kateway │ │ ├── apperr.go │ │ ├── kateway.go │ │ ├── pubsub.go │ │ ├── sub.go │ │ └── topics.go │ │ ├── redis │ │ ├── info.go │ │ ├── info_test.go │ │ └── slowlog.go │ │ ├── zk │ │ └── zk.go │ │ └── zone │ │ └── loadavg.go └── zk │ ├── README.md │ ├── command │ ├── acl.go │ ├── console.go │ ├── create.go │ ├── dump.go │ ├── get.go │ ├── ls.go │ ├── replace.go │ ├── rm.go │ ├── set.go │ ├── stat.go │ ├── top.go │ ├── util.go │ └── zones.go │ ├── commands.go │ └── main.go ├── ctx ├── api.go ├── config.go ├── config_test.go ├── const.go ├── doc.go ├── gafka.cf ├── load.go ├── util.go ├── util_test.go └── zone.go ├── diagnostics └── agent │ └── agent.go ├── manifest.json ├── mpool ├── bench_test.go ├── const.go ├── doc.go ├── intern.go ├── intern_test.go ├── message.go ├── message_test.go ├── mpool_go10.go └── mpool_go13.go ├── registry ├── doc.go ├── dummy │ └── reg.go ├── eureka │ └── eureka.go ├── registry.go └── zk │ ├── zk.go │ └── zk_test.go ├── sla ├── api.go ├── doc.go ├── err.go ├── sla.go └── sla_test.go ├── telemetry ├── Makefile ├── console │ └── reporter.go ├── influxdb │ ├── config.go │ ├── dump.go │ ├── dump_test.go │ ├── runner.go │ └── tag.go ├── reporter.go ├── sos.go ├── tag.go └── tag_test.go ├── ver.go └── zk ├── config.go ├── dbus.go ├── dbus_test.go ├── doc.go ├── error.go ├── es.go ├── flw.go ├── kafka.go ├── meta.go ├── meta_test.go ├── orchestrator.go ├── orchestrator_test.go ├── path.go ├── path_test.go ├── redis.go ├── sos.go ├── util.go ├── util_test.go ├── zkcluster.go ├── zkstat.go ├── zkstat_test.go └── zkzone.go /.dockerignore: -------------------------------------------------------------------------------- 1 | .git/FETCH_HEAD 2 | .git/modules 3 | cmd/kateway/kateway 4 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: go 2 | 3 | go: 4 | - 1.7 5 | - 1.8 6 | 7 | before_install: 8 | - sudo apt-get install -qq libpcap-dev 9 | - go get github.com/funkygao/go-metrics 10 | - go get github.com/jteeuwen/go-bindata 11 | - go install github.com/jteeuwen/go-bindata/go-bindata 12 | 13 | install: 14 | - go get -d -t -v ./... 15 | - ./build.sh -a 16 | 17 | test: 18 | - go test github.com/funkygao/gafka/cmd/kateway/gateway/... 19 | - go test github.com/funkygao/gafka/cmd/kguard/... 20 | - go test github.com/funkygao/gafka/sla 21 | - go test github.com/funkygao/gafka/telemetry 22 | 23 | # this should give us faster builds according to 24 | # http://docs.travis-ci.com/user/migrating-from-legacy/ 25 | sudo: false 26 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | ############################################################ 2 | ## Dockerfile to build kateway container images 3 | # 4 | # to run it: 5 | # 0. curl -sSL https://get.docker.com/ | sh; service docker start 6 | # 1. docker build -t "kateway:latest" . 7 | # 2. docker run -d --name kateway -p 10191:9191 -p 10192:9192 -p 10193:9193 kateway:latest /go/bin/kateway -z prod -id 1 -log kateway.log -crashlog panic -influxdbaddr http://10.213.1.223:8086 8 | ############################################################# 9 | FROM golang:1.5.2 10 | 11 | MAINTAINER Funky Gao "funky.gao@gmail.com" 12 | 13 | ENV PATH $PATH:$GOPATH/bin 14 | 15 | VOLUME /opt 16 | 17 | WORKDIR /go/src/github.com/funkygao/gafka/ 18 | ADD . /go/src/github.com/funkygao/gafka/ 19 | 20 | RUN go get ./cmd/kateway/... 21 | RUN ./build.sh -it kateway 22 | 23 | EXPOSE 9191 9192 9193 24 | -------------------------------------------------------------------------------- /TODO.md: -------------------------------------------------------------------------------- 1 | # TODO 2 | 3 | ### gk 4 | 5 | - [ ] sniff 6 | - zookpeer/kafka protocol parser 7 | - text parser 8 | 9 | - [X] balance 10 | 11 | ### kateway 12 | 13 | - [X] hh 14 | - return offset -1 instead of 0 15 | - return HTTP 202 instead of 201 16 | 17 | -------------------------------------------------------------------------------- /cmd/actord/bootstrap/options.go: -------------------------------------------------------------------------------- 1 | package bootstrap 2 | 3 | var Options struct { 4 | Zone string 5 | ShowVersion bool 6 | LogFile string 7 | LogLevel string 8 | LogRotateSize int 9 | InfluxAddr string 10 | InfluxDbname string 11 | ListenAddr string 12 | ManagerType string 13 | HintedHandoffDir string 14 | } 15 | -------------------------------------------------------------------------------- /cmd/actord/controller/assign_test.go: -------------------------------------------------------------------------------- 1 | package controller 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/funkygao/assert" 7 | "github.com/funkygao/gafka/zk" 8 | ) 9 | 10 | func TestAssignResourcesToActors_Normal(t *testing.T) { 11 | jobs := zk.ResourceList([]string{"a", "b", "c", "d", "e"}) 12 | actors := zk.ActorList([]string{"1", "2"}) 13 | 14 | decision := assignResourcesToActors(actors, jobs) 15 | t.Logf("%+v", decision) 16 | assert.Equal(t, 3, len(decision["1"])) 17 | assert.Equal(t, 2, len(decision["2"])) 18 | } 19 | 20 | func TestAssignResourcesToActors_EmptyJobQueues(t *testing.T) { 21 | jobs := zk.ResourceList([]string{}) 22 | actors := zk.ActorList([]string{"1", "2"}) 23 | 24 | decision := assignResourcesToActors(actors, jobs) 25 | t.Logf("%+v", decision) 26 | } 27 | 28 | func TestAssignResourcesToActors_ActorMoreThanJobQueues(t *testing.T) { 29 | jobs := zk.ResourceList([]string{"job1"}) 30 | actors := zk.ActorList([]string{"1", "2"}) 31 | 32 | decision := assignResourcesToActors(actors, jobs) 33 | t.Logf("%+v", decision) 34 | assert.Equal(t, 0, len(decision["2"])) 35 | assert.Equal(t, 1, len(decision["1"])) 36 | } 37 | -------------------------------------------------------------------------------- /cmd/actord/controller/audit.go: -------------------------------------------------------------------------------- 1 | package controller 2 | 3 | import ( 4 | "os" 5 | 6 | log "github.com/funkygao/log4go" 7 | ) 8 | 9 | func (this *controller) setupAuditor() { 10 | this.auditor = log.NewDefaultLogger(log.TRACE) 11 | this.auditor.DeleteFilter("stdout") 12 | 13 | _ = os.Mkdir("audit", os.ModePerm) 14 | rotateEnabled, discardWhenDiskFull := true, false 15 | filer := log.NewFileLogWriter("audit/actord.log", rotateEnabled, discardWhenDiskFull, 0644) 16 | if filer == nil { 17 | panic("failed to open audit log") 18 | } 19 | filer.SetFormat("[%d %T] [%L] (%S) %M") 20 | filer.SetRotateLines(0) 21 | filer.SetRotateDaily(true) 22 | this.auditor.AddFilter("file", log.TRACE, filer) 23 | } 24 | -------------------------------------------------------------------------------- /cmd/actord/controller/httpd.go: -------------------------------------------------------------------------------- 1 | package controller 2 | 3 | import ( 4 | "net/http" 5 | 6 | log "github.com/funkygao/log4go" 7 | ) 8 | 9 | func (this *controller) runWebServer() { 10 | http.HandleFunc("/v1/status", this.statusHandler) 11 | log.Info("web server on %s ready", this.ListenAddr) 12 | err := http.ListenAndServe(this.ListenAddr, nil) 13 | if err != nil { 14 | log.Error("ListenAndServe: %s", err) 15 | } 16 | } 17 | 18 | func (this *controller) statusHandler(w http.ResponseWriter, r *http.Request) { 19 | w.Header().Set("Content-Type", "application/json; charset=utf8") 20 | w.Header().Set("Server", "actord") 21 | 22 | w.Write(this.Bytes()) 23 | } 24 | -------------------------------------------------------------------------------- /cmd/actord/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "fmt" 5 | "os" 6 | "runtime/debug" 7 | "time" 8 | 9 | "github.com/funkygao/gafka" 10 | "github.com/funkygao/gafka/cmd/actord/bootstrap" 11 | "github.com/funkygao/gafka/cmd/kateway/gateway" 12 | log "github.com/funkygao/log4go" 13 | ) 14 | 15 | func init() { 16 | gateway.EnsureServerUlimit() 17 | debug.SetGCPercent(800) // same as env GOGC 18 | } 19 | 20 | func main() { 21 | for _, arg := range os.Args[1:] { 22 | if arg == "-v" || arg == "-version" { 23 | fmt.Fprintf(os.Stderr, "%s-%s\n", gafka.Version, gafka.BuildId) 24 | return 25 | } 26 | } 27 | 28 | t0 := time.Now() 29 | bootstrap.Main() 30 | log.Info("actord[%s@%s] %s, bye!", gafka.BuildId, gafka.BuiltAt, time.Since(t0)) 31 | log.Close() 32 | } 33 | -------------------------------------------------------------------------------- /cmd/ehaproxy/Makefile: -------------------------------------------------------------------------------- 1 | all:build 2 | 3 | build: 4 | go generate ./... 5 | go install 6 | 7 | validate: 8 | @echo =============Pub============= 9 | curl -i http://localhost:9191/alive 10 | @echo 11 | @echo =============Sub============= 12 | curl -i http://localhost:9192/alive 13 | @echo 14 | @echo =============Man============= 15 | curl -i http://localhost:9193/alive 16 | @echo 17 | @echo =============dashboard============= 18 | curl -i http://localhost:10894/v1/status 19 | @echo 20 | @echo =============diagnostics=========== 21 | curl -i http://localhost:10120/debug/pprof/ 22 | 23 | -------------------------------------------------------------------------------- /cmd/ehaproxy/README.md: -------------------------------------------------------------------------------- 1 | # ehaproxy 2 | 3 | Elastic haproxy that sits in front of kateway. 4 | 5 | -------------------------------------------------------------------------------- /cmd/ehaproxy/command/config.go: -------------------------------------------------------------------------------- 1 | package command 2 | 3 | import ( 4 | "flag" 5 | "fmt" 6 | "io/ioutil" 7 | "strings" 8 | 9 | "github.com/funkygao/gocli" 10 | ) 11 | 12 | type Config struct { 13 | Ui cli.Ui 14 | Cmd string 15 | 16 | root string 17 | } 18 | 19 | func (this *Config) Run(args []string) (exitCode int) { 20 | cmdFlags := flag.NewFlagSet("config", flag.ContinueOnError) 21 | cmdFlags.Usage = func() { this.Ui.Output(this.Help()) } 22 | cmdFlags.StringVar(&this.root, "p", defaultPrefix, "") 23 | if err := cmdFlags.Parse(args); err != nil { 24 | return 1 25 | } 26 | 27 | b, e := ioutil.ReadFile(fmt.Sprintf("%s/%s", this.root, configFile)) 28 | swalllow(e) 29 | 30 | this.Ui.Output(string(b)) 31 | 32 | return 33 | } 34 | 35 | func (this *Config) Synopsis() string { 36 | return fmt.Sprintf("Display %s active configuration", this.Cmd) 37 | } 38 | 39 | func (this *Config) Help() string { 40 | help := fmt.Sprintf(` 41 | Usage: %s config [options] 42 | 43 | Display %s active configuration 44 | 45 | Options: 46 | 47 | -p prefix 48 | Default %s 49 | 50 | `, this.Cmd, this.Cmd, defaultPrefix) 51 | return strings.TrimSpace(help) 52 | } 53 | -------------------------------------------------------------------------------- /cmd/ehaproxy/command/consts.go: -------------------------------------------------------------------------------- 1 | package command 2 | 3 | const ( 4 | defaultPrefix = "/var/wd/ehaproxy" 5 | defaultLogfile = "ehaproxy.log" 6 | configFile = ".haproxy.cf" 7 | haproxyPidFile = "haproxy.pid" 8 | 9 | dashboardPortHead = 10910 10 | ) 11 | -------------------------------------------------------------------------------- /cmd/ehaproxy/command/templates/500.http: -------------------------------------------------------------------------------- 1 | HTTP/1.0 500 Internal Server Error 2 | Cache-Control: no-cache 3 | Connection: close 4 | Content-Type: application/json; charset=utf8 5 | 6 | {"errmsg":"ehaproxy - Load balancer internal error"} 7 | -------------------------------------------------------------------------------- /cmd/ehaproxy/command/templates/502.http: -------------------------------------------------------------------------------- 1 | HTTP/1.0 502 Bad Gateway 2 | Cache-Control: no-cache 3 | Connection: close 4 | Content-Type: application/json; charset=utf8 5 | 6 | {"errmsg":"ehaproxy - Load balancer encountered bad response from backend"} 7 | -------------------------------------------------------------------------------- /cmd/ehaproxy/command/templates/503.http: -------------------------------------------------------------------------------- 1 | HTTP/1.0 503 Service Unavailable 2 | Cache-Control: no-cache 3 | Connection: close 4 | Content-Type: application/json; charset=utf8 5 | 6 | {"errmsg":"ehaproxy - No server is available to handle this request"} 7 | -------------------------------------------------------------------------------- /cmd/ehaproxy/command/templates/504.http: -------------------------------------------------------------------------------- 1 | HTTP/1.0 504 Gateway Timeout 2 | Cache-Control: no-cache 3 | Connection: close 4 | Content-Type: application/json; charset=utf8 5 | 6 | {"errmsg":"ehaproxy - backend timeout"} 7 | -------------------------------------------------------------------------------- /cmd/ehaproxy/command/templates/haproxy-1.6.3.tar.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/ehaproxy/command/templates/haproxy-1.6.3.tar.gz -------------------------------------------------------------------------------- /cmd/ehaproxy/command/templates/hatop-0.7.7.tar.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/ehaproxy/command/templates/hatop-0.7.7.tar.gz -------------------------------------------------------------------------------- /cmd/ehaproxy/command/util.go: -------------------------------------------------------------------------------- 1 | package command 2 | 3 | import ( 4 | "sort" 5 | ) 6 | 7 | func swalllow(err error) { 8 | if err != nil { 9 | panic(err) 10 | } 11 | } 12 | 13 | func sortBackendByName(all []Backend) []Backend { 14 | m := make(map[string]Backend, len(all)) 15 | sortedNames := make([]string, 0, len(all)) 16 | for _, b := range all { 17 | m[b.Name] = b 18 | sortedNames = append(sortedNames, b.Name) 19 | } 20 | sort.Strings(sortedNames) 21 | 22 | r := make([]Backend, 0, len(all)) 23 | for _, name := range sortedNames { 24 | r = append(r, m[name]) 25 | } 26 | 27 | return r 28 | } 29 | -------------------------------------------------------------------------------- /cmd/ehaproxy/command/util_test.go: -------------------------------------------------------------------------------- 1 | package command 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/funkygao/assert" 7 | ) 8 | 9 | func TestSortBackendByName(t *testing.T) { 10 | all := []Backend{ 11 | {Name: "p2"}, 12 | {Name: "p1"}, 13 | {Name: "p3"}, 14 | } 15 | 16 | r := sortBackendByName(all) 17 | t.Logf("%+v", r) 18 | assert.Equal(t, "p1", r[0].Name) 19 | assert.Equal(t, "p2", r[1].Name) 20 | assert.Equal(t, "p3", r[2].Name) 21 | } 22 | -------------------------------------------------------------------------------- /cmd/ehaproxy/commands.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "os" 5 | 6 | "github.com/funkygao/gafka/cmd/ehaproxy/command" 7 | "github.com/funkygao/gocli" 8 | ) 9 | 10 | var commands map[string]cli.CommandFactory 11 | 12 | func init() { 13 | ui := &cli.ColoredUi{ 14 | Ui: &cli.BasicUi{ 15 | Writer: os.Stdout, 16 | Reader: os.Stdin, 17 | ErrorWriter: os.Stderr, 18 | }, 19 | InfoColor: cli.UiColorGreen, 20 | ErrorColor: cli.UiColorRed, 21 | WarnColor: cli.UiColorYellow, 22 | } 23 | cmd := os.Args[0] 24 | 25 | commands = map[string]cli.CommandFactory{ 26 | "config": func() (cli.Command, error) { 27 | return &command.Config{ 28 | Ui: ui, 29 | Cmd: cmd, 30 | }, nil 31 | }, 32 | 33 | "deploy": func() (cli.Command, error) { 34 | return &command.Deploy{ 35 | Ui: ui, 36 | Cmd: cmd, 37 | }, nil 38 | }, 39 | 40 | "start": func() (cli.Command, error) { 41 | return &command.Start{ 42 | Ui: ui, 43 | Cmd: cmd, 44 | }, nil 45 | }, 46 | } 47 | 48 | } 49 | -------------------------------------------------------------------------------- /cmd/es/README.md: -------------------------------------------------------------------------------- 1 | # es 2 | 3 | ElasticSearch console 4 | 5 | ### Usage 6 | 7 | $es 8 | ElasticSearch Console 9 | 10 | usage: es [--version] [--help] [] 11 | 12 | Available commands are: 13 | aliases Currently configured aliases to indices 14 | allocation Display #shards and disk space used by data node 15 | clusters ElasticSearch clusters 16 | count Document count of the entire cluster 17 | health Health of cluster 18 | indices List indices 19 | merge Visualize segments merge process 20 | nodes Display nodes of cluster 21 | pending Pending tasks 22 | plugins Provides a view per node of running plugins 23 | segments Display low level segments in shards 24 | shards Detailed view of what nodes contain which shards 25 | threads Show cluster wide thread pool per node 26 | top Unix “top” like utility for ElasticSearch 27 | -------------------------------------------------------------------------------- /cmd/es/command/aliases.go: -------------------------------------------------------------------------------- 1 | package command 2 | 3 | import ( 4 | "flag" 5 | "fmt" 6 | "strings" 7 | 8 | "github.com/funkygao/gafka/ctx" 9 | "github.com/funkygao/gafka/zk" 10 | "github.com/funkygao/gocli" 11 | ) 12 | 13 | type Aliases struct { 14 | Ui cli.Ui 15 | Cmd string 16 | } 17 | 18 | func (this *Aliases) Run(args []string) (exitCode int) { 19 | var ( 20 | zone string 21 | cluster string 22 | ) 23 | cmdFlags := flag.NewFlagSet("aliases", flag.ContinueOnError) 24 | cmdFlags.Usage = func() { this.Ui.Output(this.Help()) } 25 | cmdFlags.StringVar(&zone, "z", ctx.EsDefaultZone(), "") 26 | cmdFlags.StringVar(&cluster, "c", "", "") 27 | if err := cmdFlags.Parse(args); err != nil { 28 | return 1 29 | } 30 | 31 | zkzone := zk.NewZkZone(zk.DefaultConfig(zone, ctx.ZoneZkAddrs(zone))) 32 | handleCatCommand(this.Ui, zkzone, cluster, "aliases") 33 | 34 | return 35 | } 36 | 37 | func (*Aliases) Synopsis() string { 38 | return "Currently configured aliases to indices" 39 | } 40 | 41 | func (this *Aliases) Help() string { 42 | help := fmt.Sprintf(` 43 | Usage: %s aliases [options] 44 | 45 | %s 46 | 47 | Options: 48 | 49 | -z zone 50 | 51 | -c cluster 52 | 53 | `, this.Cmd, this.Synopsis()) 54 | return strings.TrimSpace(help) 55 | } 56 | -------------------------------------------------------------------------------- /cmd/es/command/count.go: -------------------------------------------------------------------------------- 1 | package command 2 | 3 | import ( 4 | "flag" 5 | "fmt" 6 | "strings" 7 | 8 | "github.com/funkygao/gafka/ctx" 9 | "github.com/funkygao/gafka/zk" 10 | "github.com/funkygao/gocli" 11 | ) 12 | 13 | type Count struct { 14 | Ui cli.Ui 15 | Cmd string 16 | } 17 | 18 | func (this *Count) Run(args []string) (exitCode int) { 19 | var ( 20 | zone string 21 | cluster string 22 | ) 23 | cmdFlags := flag.NewFlagSet("count", flag.ContinueOnError) 24 | cmdFlags.Usage = func() { this.Ui.Output(this.Help()) } 25 | cmdFlags.StringVar(&zone, "z", ctx.EsDefaultZone(), "") 26 | cmdFlags.StringVar(&cluster, "c", "", "") 27 | if err := cmdFlags.Parse(args); err != nil { 28 | return 1 29 | } 30 | 31 | zkzone := zk.NewZkZone(zk.DefaultConfig(zone, ctx.ZoneZkAddrs(zone))) 32 | handleCatCommand(this.Ui, zkzone, cluster, "count") 33 | 34 | return 35 | } 36 | 37 | func (*Count) Synopsis() string { 38 | return "Document count of the entire cluster" 39 | } 40 | 41 | func (this *Count) Help() string { 42 | help := fmt.Sprintf(` 43 | Usage: %s count [options] 44 | 45 | %s 46 | 47 | Options: 48 | 49 | -z zone 50 | 51 | -c cluster 52 | 53 | `, this.Cmd, this.Synopsis()) 54 | return strings.TrimSpace(help) 55 | } 56 | -------------------------------------------------------------------------------- /cmd/es/command/pending.go: -------------------------------------------------------------------------------- 1 | package command 2 | 3 | import ( 4 | "flag" 5 | "fmt" 6 | "strings" 7 | 8 | "github.com/funkygao/gafka/ctx" 9 | "github.com/funkygao/gafka/zk" 10 | "github.com/funkygao/gocli" 11 | ) 12 | 13 | type Pending struct { 14 | Ui cli.Ui 15 | Cmd string 16 | } 17 | 18 | func (this *Pending) Run(args []string) (exitCode int) { 19 | var ( 20 | zone string 21 | cluster string 22 | ) 23 | cmdFlags := flag.NewFlagSet("pending", flag.ContinueOnError) 24 | cmdFlags.Usage = func() { this.Ui.Output(this.Help()) } 25 | cmdFlags.StringVar(&zone, "z", ctx.EsDefaultZone(), "") 26 | cmdFlags.StringVar(&cluster, "c", "", "") 27 | if err := cmdFlags.Parse(args); err != nil { 28 | return 1 29 | } 30 | 31 | zkzone := zk.NewZkZone(zk.DefaultConfig(zone, ctx.ZoneZkAddrs(zone))) 32 | handleCatCommand(this.Ui, zkzone, cluster, "pending_tasks") 33 | 34 | return 35 | } 36 | 37 | func (*Pending) Synopsis() string { 38 | return "Pending tasks" 39 | } 40 | 41 | func (this *Pending) Help() string { 42 | help := fmt.Sprintf(` 43 | Usage: %s pending [options] 44 | 45 | %s 46 | 47 | Options: 48 | 49 | -z zone 50 | 51 | -c cluster 52 | 53 | `, this.Cmd, this.Synopsis()) 54 | return strings.TrimSpace(help) 55 | } 56 | -------------------------------------------------------------------------------- /cmd/es/command/plugins.go: -------------------------------------------------------------------------------- 1 | package command 2 | 3 | import ( 4 | "flag" 5 | "fmt" 6 | "strings" 7 | 8 | "github.com/funkygao/gafka/ctx" 9 | "github.com/funkygao/gafka/zk" 10 | "github.com/funkygao/gocli" 11 | ) 12 | 13 | type Plugins struct { 14 | Ui cli.Ui 15 | Cmd string 16 | } 17 | 18 | func (this *Plugins) Run(args []string) (exitCode int) { 19 | var ( 20 | zone string 21 | cluster string 22 | ) 23 | cmdFlags := flag.NewFlagSet("plugins", flag.ContinueOnError) 24 | cmdFlags.Usage = func() { this.Ui.Output(this.Help()) } 25 | cmdFlags.StringVar(&zone, "z", ctx.EsDefaultZone(), "") 26 | cmdFlags.StringVar(&cluster, "c", "", "") 27 | if err := cmdFlags.Parse(args); err != nil { 28 | return 1 29 | } 30 | 31 | zkzone := zk.NewZkZone(zk.DefaultConfig(zone, ctx.ZoneZkAddrs(zone))) 32 | handleCatCommand(this.Ui, zkzone, cluster, "plugins") 33 | 34 | return 35 | } 36 | 37 | func (*Plugins) Synopsis() string { 38 | return "Provides a view per node of running plugins" 39 | } 40 | 41 | func (this *Plugins) Help() string { 42 | help := fmt.Sprintf(` 43 | Usage: %s plugins [options] 44 | 45 | %s 46 | 47 | Options: 48 | 49 | -z zone 50 | 51 | -c cluster 52 | 53 | `, this.Cmd, this.Synopsis()) 54 | return strings.TrimSpace(help) 55 | } 56 | -------------------------------------------------------------------------------- /cmd/es/command/segments.go: -------------------------------------------------------------------------------- 1 | package command 2 | 3 | import ( 4 | "flag" 5 | "fmt" 6 | "strings" 7 | 8 | "github.com/funkygao/gafka/ctx" 9 | "github.com/funkygao/gafka/zk" 10 | "github.com/funkygao/gocli" 11 | ) 12 | 13 | type Segments struct { 14 | Ui cli.Ui 15 | Cmd string 16 | } 17 | 18 | func (this *Segments) Run(args []string) (exitCode int) { 19 | var ( 20 | zone string 21 | cluster string 22 | ) 23 | cmdFlags := flag.NewFlagSet("segments", flag.ContinueOnError) 24 | cmdFlags.Usage = func() { this.Ui.Output(this.Help()) } 25 | cmdFlags.StringVar(&zone, "z", ctx.EsDefaultZone(), "") 26 | cmdFlags.StringVar(&cluster, "c", "", "") 27 | if err := cmdFlags.Parse(args); err != nil { 28 | return 1 29 | } 30 | 31 | zkzone := zk.NewZkZone(zk.DefaultConfig(zone, ctx.ZoneZkAddrs(zone))) 32 | handleCatCommand(this.Ui, zkzone, cluster, "segments") 33 | return 34 | } 35 | 36 | func (*Segments) Synopsis() string { 37 | return "Display low level segments in shards" 38 | } 39 | 40 | func (this *Segments) Help() string { 41 | help := fmt.Sprintf(` 42 | Usage: %s segments [options] 43 | 44 | %s 45 | 46 | Options: 47 | 48 | -z zone 49 | 50 | -c cluster 51 | 52 | `, this.Cmd, this.Synopsis()) 53 | return strings.TrimSpace(help) 54 | } 55 | -------------------------------------------------------------------------------- /cmd/es/command/top.go: -------------------------------------------------------------------------------- 1 | package command 2 | 3 | import ( 4 | "flag" 5 | "fmt" 6 | "strings" 7 | 8 | "github.com/funkygao/gafka/ctx" 9 | "github.com/funkygao/gocli" 10 | ) 11 | 12 | type Top struct { 13 | Ui cli.Ui 14 | Cmd string 15 | } 16 | 17 | func (this *Top) Run(args []string) (exitCode int) { 18 | var ( 19 | zone string 20 | cluster string 21 | ) 22 | cmdFlags := flag.NewFlagSet("top", flag.ContinueOnError) 23 | cmdFlags.Usage = func() { this.Ui.Output(this.Help()) } 24 | cmdFlags.StringVar(&zone, "z", ctx.EsDefaultZone(), "") 25 | cmdFlags.StringVar(&cluster, "c", "", "") 26 | if err := cmdFlags.Parse(args); err != nil { 27 | return 1 28 | } 29 | return 30 | } 31 | 32 | func (*Top) Synopsis() string { 33 | return "Unix “top” like utility for ElasticSearch" 34 | } 35 | 36 | func (this *Top) Help() string { 37 | help := fmt.Sprintf(` 38 | Usage: %s top 39 | 40 | %s 41 | 42 | `, this.Cmd, this.Synopsis()) 43 | return strings.TrimSpace(help) 44 | } 45 | -------------------------------------------------------------------------------- /cmd/gk/command/agent/Makefile: -------------------------------------------------------------------------------- 1 | a1: 2 | gk agent -start -port 9001 3 | 4 | a2: 5 | gk agent -start -port 9101 -join localhost:9001 6 | -------------------------------------------------------------------------------- /cmd/gk/command/agent/api.go: -------------------------------------------------------------------------------- 1 | package agent 2 | 3 | import ( 4 | "encoding/json" 5 | "fmt" 6 | "net/http" 7 | 8 | log "github.com/funkygao/log4go" 9 | ) 10 | 11 | func (a *Agent) startAPIServer(port int) { 12 | http.HandleFunc("/v1/state", a.stateHandler) 13 | http.HandleFunc("/v1/members", a.membersHandler) 14 | 15 | // FIXME security 16 | addr := fmt.Sprintf(":%d", port) 17 | log.Info("api server ready on %s", addr) 18 | http.ListenAndServe(addr, nil) 19 | } 20 | 21 | func (a *Agent) stateHandler(w http.ResponseWriter, r *http.Request) { 22 | b, _ := json.Marshal(a.State()) 23 | w.Write(b) 24 | } 25 | 26 | func (a *Agent) membersHandler(w http.ResponseWriter, r *http.Request) { 27 | b, _ := json.Marshal(a.State()["delegate"]) 28 | w.Write(b) 29 | } 30 | 31 | func (a *Agent) membersUri(port int) string { 32 | return fmt.Sprintf("http://localhost:%d/v1/members", apiPort(port)) 33 | } 34 | -------------------------------------------------------------------------------- /cmd/gk/command/agent/client.go: -------------------------------------------------------------------------------- 1 | package agent 2 | 3 | import ( 4 | "encoding/json" 5 | "fmt" 6 | 7 | "github.com/funkygao/gorequest" 8 | ) 9 | 10 | func (a *Agent) ListMembers(port int) { 11 | _, body, _ := gorequest.New().Get(a.membersUri(port)).End() 12 | v := map[string]interface{}{} 13 | err := json.Unmarshal([]byte(body), &v) 14 | if err != nil { 15 | panic(err) 16 | } 17 | b, err := json.MarshalIndent(v, "", " ") 18 | if err != nil { 19 | panic(err) 20 | } 21 | 22 | fmt.Println(string(b)) 23 | } 24 | -------------------------------------------------------------------------------- /cmd/gk/command/agent/util.go: -------------------------------------------------------------------------------- 1 | package agent 2 | 3 | func apiPort(port int) int { 4 | return port + 1 5 | } 6 | -------------------------------------------------------------------------------- /cmd/gk/command/audit.go: -------------------------------------------------------------------------------- 1 | package command 2 | 3 | import ( 4 | "fmt" 5 | "strings" 6 | 7 | "github.com/funkygao/gocli" 8 | ) 9 | 10 | type Audit struct { 11 | Ui cli.Ui 12 | Cmd string 13 | } 14 | 15 | func (this *Audit) Run(args []string) (exitCode int) { 16 | return 17 | } 18 | 19 | func (*Audit) Synopsis() string { 20 | return "Audit of the message streams TODO" 21 | } 22 | 23 | func (this *Audit) Help() string { 24 | help := fmt.Sprintf(` 25 | Usage: %s audit [options] 26 | 27 | %s 28 | 29 | `, this.Cmd, this.Synopsis()) 30 | return strings.TrimSpace(help) 31 | } 32 | -------------------------------------------------------------------------------- /cmd/gk/command/auth.go: -------------------------------------------------------------------------------- 1 | package command 2 | 3 | type AuthFunc func(user, pass string) bool 4 | 5 | const ( 6 | adminPasswd = "gAfKa" 7 | ) 8 | 9 | var ( 10 | Authenticator AuthFunc 11 | ) 12 | 13 | func init() { 14 | // default auth component, caller can override this 15 | Authenticator = func(user, pass string) bool { 16 | if pass == adminPasswd { 17 | return true 18 | } 19 | 20 | return false 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /cmd/gk/command/capacity.go: -------------------------------------------------------------------------------- 1 | package command 2 | 3 | import ( 4 | "flag" 5 | "fmt" 6 | "strings" 7 | 8 | "github.com/funkygao/gocli" 9 | ) 10 | 11 | // chain of dependencies, performance metrics, prioritization 12 | type Capacity struct { 13 | Ui cli.Ui 14 | Cmd string 15 | } 16 | 17 | func (this *Capacity) Run(args []string) (exitCode int) { 18 | cmdFlags := flag.NewFlagSet("capacity", flag.ContinueOnError) 19 | cmdFlags.Usage = func() { this.Ui.Output(this.Help()) } 20 | if err := cmdFlags.Parse(args); err != nil { 21 | return 1 22 | } 23 | 24 | return 25 | } 26 | 27 | func (this *Capacity) Synopsis() string { 28 | return "Intent-based capacity planning generate resource allocation plan" 29 | } 30 | 31 | func (this *Capacity) Help() string { 32 | help := fmt.Sprintf(` 33 | Usage: %s capacity [options] 34 | 35 | %s 36 | 37 | Options: 38 | 39 | 40 | `, this.Cmd, this.Synopsis()) 41 | return strings.TrimSpace(help) 42 | } 43 | -------------------------------------------------------------------------------- /cmd/gk/command/comma.go: -------------------------------------------------------------------------------- 1 | package command 2 | 3 | import ( 4 | "flag" 5 | "fmt" 6 | "strconv" 7 | "strings" 8 | 9 | "github.com/funkygao/gocli" 10 | "github.com/funkygao/golib/gofmt" 11 | ) 12 | 13 | type Comma struct { 14 | Ui cli.Ui 15 | Cmd string 16 | } 17 | 18 | func (this *Comma) Run(args []string) (exitCode int) { 19 | cmdFlags := flag.NewFlagSet("comma", flag.ContinueOnError) 20 | cmdFlags.Usage = func() { this.Ui.Output(this.Help()) } 21 | if err := cmdFlags.Parse(args); err != nil { 22 | return 2 23 | } 24 | 25 | if len(args) == 0 { 26 | this.Ui.Error("missing ") 27 | return 2 28 | } 29 | 30 | arg := args[len(args)-1] 31 | for _, n := range strings.Split(arg, ",") { 32 | i, err := strconv.ParseInt(n, 10, 64) 33 | swallow(err) 34 | this.Ui.Outputf("%s -> %s", n, gofmt.Comma(i)) 35 | } 36 | 37 | return 38 | } 39 | 40 | func (*Comma) Synopsis() string { 41 | return "Place commas after every three orders of magnitude" 42 | } 43 | 44 | func (this *Comma) Help() string { 45 | help := fmt.Sprintf(` 46 | Usage: %s comma 47 | 48 | %s 49 | 50 | `, this.Cmd, this.Synopsis()) 51 | return strings.TrimSpace(help) 52 | } 53 | -------------------------------------------------------------------------------- /cmd/gk/command/cpu.go: -------------------------------------------------------------------------------- 1 | package command 2 | 3 | import ( 4 | "fmt" 5 | "strings" 6 | 7 | "github.com/funkygao/gocli" 8 | "github.com/funkygao/golib/gofmt" 9 | "github.com/klauspost/cpuid" 10 | ) 11 | 12 | type Cpu struct { 13 | Ui cli.Ui 14 | Cmd string 15 | } 16 | 17 | func (this *Cpu) Run(args []string) (exitCode int) { 18 | // might not work under VM 19 | fmt.Println(cpuid.CPU.BrandName, "Family", cpuid.CPU.Family, "Model:", cpuid.CPU.Model) 20 | fmt.Println("PhysicalCores:", cpuid.CPU.PhysicalCores, "ThreadsPerCore:", cpuid.CPU.ThreadsPerCore, "LogicalCores:", cpuid.CPU.LogicalCores) 21 | fmt.Println("Cacheline bytes:", cpuid.CPU.CacheLine) 22 | fmt.Println("L1 Data Cache:", gofmt.ByteSize(cpuid.CPU.Cache.L1D), "Instruction Cache:", gofmt.ByteSize(cpuid.CPU.Cache.L1D)) 23 | fmt.Println("L2 Cache:", gofmt.ByteSize(cpuid.CPU.Cache.L2)) 24 | fmt.Println("L3 Cache:", gofmt.ByteSize(cpuid.CPU.Cache.L3)) 25 | 26 | return 27 | } 28 | 29 | func (*Cpu) Synopsis() string { 30 | return "Detect information about the CPU" 31 | } 32 | 33 | func (this *Cpu) Help() string { 34 | help := fmt.Sprintf(` 35 | Usage: %s cpu 36 | 37 | %s 38 | 39 | `, this.Cmd, this.Synopsis()) 40 | return strings.TrimSpace(help) 41 | } 42 | -------------------------------------------------------------------------------- /cmd/gk/command/deploy_test.go: -------------------------------------------------------------------------------- 1 | package command 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/funkygao/assert" 7 | "github.com/funkygao/gocli" 8 | ) 9 | 10 | func TestValidateLogDirs(t *testing.T) { 11 | d := Deploy{Ui: &cli.BasicUi{}} 12 | type fixture struct { 13 | dirs string 14 | expected string 15 | } 16 | fixtures := []fixture{ 17 | {"/non-exist/kfk_demo", "/non-exist/kfk_demo"}, 18 | {"/non-exist/kfk_demo/", "/non-exist/kfk_demo/"}, 19 | {"/tmp/kfk_demo", ""}, 20 | {"/tmp/kfk_demo/", ""}, 21 | {"/kfk_demo1", ""}, 22 | {"/kfk_demo1/", ""}, 23 | } 24 | for _, f := range fixtures { 25 | assert.Equal(t, f.expected, d.validateLogDirs(f.dirs)) 26 | } 27 | 28 | } 29 | -------------------------------------------------------------------------------- /cmd/gk/command/faq.go: -------------------------------------------------------------------------------- 1 | package command 2 | 3 | import ( 4 | "fmt" 5 | "strings" 6 | 7 | "github.com/funkygao/gocli" 8 | ) 9 | 10 | type Faq struct { 11 | Ui cli.Ui 12 | Cmd string 13 | } 14 | 15 | func (this *Faq) Run(args []string) (exitCode int) { 16 | content := fmt.Sprintf(` 17 | What is zone? 18 | zone is named after AWS zone with the same sematics. 19 | It is a group of kafka clusters that share the same Zookeeper ensemble. 20 | They can be located in different IDC or in the same IDC with different environments. 21 | %s will automatically install zones in your $HOME/.gafka.cf 22 | 23 | What is cluster? 24 | cluster is a kafka cluster: a group of kafka brokers that share the 25 | same zookeeper.connect and different broker.id 26 | 27 | How can I get help? 28 | %s -h 29 | 30 | `, this.Cmd, this.Cmd) 31 | 32 | this.Ui.Output(strings.TrimSpace(content)) 33 | return 34 | } 35 | 36 | func (*Faq) Synopsis() string { 37 | return "FAQ" 38 | } 39 | 40 | func (this *Faq) Help() string { 41 | help := fmt.Sprintf(` 42 | Usage: %s ? 43 | 44 | %s 45 | 46 | `, this.Cmd, this.Synopsis()) 47 | return strings.TrimSpace(help) 48 | } 49 | -------------------------------------------------------------------------------- /cmd/gk/command/init.go: -------------------------------------------------------------------------------- 1 | package command 2 | 3 | import ( 4 | //_ "github.com/go-sql-driver/mysql" 5 | ) 6 | -------------------------------------------------------------------------------- /cmd/gk/command/jmx.go: -------------------------------------------------------------------------------- 1 | package command 2 | 3 | import ( 4 | "flag" 5 | "fmt" 6 | "strings" 7 | 8 | "github.com/funkygao/gafka/ctx" 9 | "github.com/funkygao/gocli" 10 | ) 11 | 12 | type Jmx struct { 13 | Ui cli.Ui 14 | Cmd string 15 | 16 | zone string 17 | cluster string 18 | } 19 | 20 | func (this *Jmx) Run(args []string) (exitCode int) { 21 | cmdFlags := flag.NewFlagSet("jmx", flag.ContinueOnError) 22 | cmdFlags.Usage = func() { this.Ui.Output(this.Help()) } 23 | cmdFlags.StringVar(&this.zone, "z", ctx.DefaultZone(), "") 24 | cmdFlags.StringVar(&this.cluster, "c", "", "") 25 | if err := cmdFlags.Parse(args); err != nil { 26 | return 1 27 | } 28 | 29 | return 30 | } 31 | 32 | func (*Jmx) Synopsis() string { 33 | return "Generate config for jmxtrans to monitor kafka with JMX" 34 | } 35 | 36 | func (this *Jmx) Help() string { 37 | help := fmt.Sprintf(` 38 | Usage: %s jmx [options] 39 | 40 | %s 41 | 42 | Options: 43 | 44 | -z zone 45 | 46 | -c cluster 47 | 48 | `, this.Cmd, this.Synopsis()) 49 | return strings.TrimSpace(help) 50 | } 51 | -------------------------------------------------------------------------------- /cmd/gk/command/lookup.go: -------------------------------------------------------------------------------- 1 | package command 2 | 3 | import ( 4 | "flag" 5 | "fmt" 6 | "net" 7 | "strings" 8 | 9 | "github.com/funkygao/gafka/ctx" 10 | "github.com/funkygao/gocli" 11 | ) 12 | 13 | type Lookup struct { 14 | Ui cli.Ui 15 | Cmd string 16 | } 17 | 18 | func (this *Lookup) Run(args []string) (exitCode int) { 19 | cmdFlags := flag.NewFlagSet("lookup", flag.ContinueOnError) 20 | cmdFlags.Usage = func() { this.Ui.Output(this.Help()) } 21 | if err := cmdFlags.Parse(args); err != nil { 22 | return 1 23 | } 24 | 25 | if len(args) == 0 { 26 | this.Ui.Error("missing ip:port") 27 | return 2 28 | } 29 | 30 | ipPort := args[len(args)-1] 31 | ip, port, err := net.SplitHostPort(ipPort) 32 | swallow(err) 33 | 34 | for _, host := range ctx.LookupIpPort(ip, port) { 35 | this.Ui.Output(host) 36 | } 37 | 38 | return 39 | } 40 | 41 | func (this *Lookup) Synopsis() string { 42 | return "Internal reverse DNS lookup utility" 43 | } 44 | 45 | func (this *Lookup) Help() string { 46 | help := fmt.Sprintf(` 47 | Usage: %s lookup [ip]:[port] 48 | 49 | %s 50 | 51 | e,g 52 | gk lookup 1.2.3.3: 53 | gk lookup :10008 54 | 55 | `, this.Cmd, this.Synopsis()) 56 | return strings.TrimSpace(help) 57 | } 58 | -------------------------------------------------------------------------------- /cmd/gk/command/migrate_test.go: -------------------------------------------------------------------------------- 1 | package command 2 | 3 | import ( 4 | "testing" 5 | ) 6 | 7 | func TestMigratePartitionNormalize(t *testing.T) { 8 | m := Migrate{ 9 | partition: "1-5", 10 | } 11 | m.normalizePartitions() 12 | t.Logf("%s", m.partition) 13 | } 14 | -------------------------------------------------------------------------------- /cmd/gk/command/mirror/config_test.go: -------------------------------------------------------------------------------- 1 | package mirror 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/funkygao/assert" 7 | ) 8 | 9 | func TestRealTopicsDumb(t *testing.T) { 10 | cf := DefaultConfig() 11 | topics := []string{"t1", "t2", "__consumer_offsets"} 12 | assert.Equal(t, []string{"t1", "t2"}, cf.realTopics(topics)) 13 | 14 | topics = []string{} 15 | assert.Equal(t, topics, cf.realTopics(topics)) 16 | } 17 | 18 | func TestRealTopicsWithExclusion(t *testing.T) { 19 | cf := DefaultConfig() 20 | cf.ExcludedTopics = map[string]struct{}{ 21 | "t1": {}, 22 | } 23 | topics := []string{"t1", "t2", "__consumer_offsets"} 24 | assert.Equal(t, []string{"t2"}, cf.realTopics(topics)) 25 | 26 | topics = []string{} 27 | assert.Equal(t, topics, cf.realTopics(topics)) 28 | } 29 | 30 | func TestRealTopicsOnly(t *testing.T) { 31 | cf := DefaultConfig() 32 | cf.TopicsOnly = map[string]struct{}{ 33 | "t1": {}, 34 | } 35 | topics := []string{"t1", "t2", "__consumer_offsets"} 36 | assert.Equal(t, []string{"t1"}, cf.realTopics(topics)) 37 | 38 | topics = []string{} 39 | assert.Equal(t, topics, cf.realTopics(topics)) 40 | } 41 | -------------------------------------------------------------------------------- /cmd/gk/command/mirror/const.go: -------------------------------------------------------------------------------- 1 | package mirror 2 | 3 | var ( 4 | internalTopics = map[string]struct{}{ 5 | "__consumer_offsets": {}, 6 | } 7 | ) 8 | -------------------------------------------------------------------------------- /cmd/gk/command/perf.go: -------------------------------------------------------------------------------- 1 | package command 2 | 3 | import ( 4 | "flag" 5 | "fmt" 6 | "strings" 7 | 8 | "github.com/funkygao/gocli" 9 | ) 10 | 11 | type Perf struct { 12 | Ui cli.Ui 13 | Cmd string 14 | } 15 | 16 | func (this *Perf) Run(args []string) (exitCode int) { 17 | var mode string 18 | cmdFlags := flag.NewFlagSet("perf", flag.ContinueOnError) 19 | cmdFlags.Usage = func() { this.Ui.Output(this.Help()) } 20 | cmdFlags.StringVar(&mode, "mode", "io", "") 21 | if err := cmdFlags.Parse(args); err != nil { 22 | return 1 23 | } 24 | 25 | switch mode { 26 | case "io": 27 | this.Ui.Output(fmt.Sprintf("perf record -e block:block_rq_complete -a sleep 10")) 28 | this.Ui.Output("perf script") 29 | } 30 | 31 | return 32 | } 33 | 34 | func (*Perf) Synopsis() string { 35 | return "Probe system low level performance problems with perf" 36 | } 37 | 38 | func (this *Perf) Help() string { 39 | help := fmt.Sprintf(` 40 | Usage: %s perf [options] 41 | 42 | %s 43 | 44 | -mode io|cpu 45 | 46 | 47 | `, this.Cmd, this.Synopsis()) 48 | return strings.TrimSpace(help) 49 | } 50 | -------------------------------------------------------------------------------- /cmd/gk/command/protos/ascii.go: -------------------------------------------------------------------------------- 1 | package protos 2 | 3 | type ascii struct{} 4 | 5 | func (a *ascii) Unmarshal(srcPort, dstPort uint16, payload []byte) string { 6 | return string(payload) 7 | } 8 | -------------------------------------------------------------------------------- /cmd/gk/command/protos/protocol.go: -------------------------------------------------------------------------------- 1 | package protos 2 | 3 | type Protocol interface { 4 | Unmarshal(srcPort, dstPort uint16, payload []byte) string 5 | } 6 | 7 | func New(prot string, serverPort int) Protocol { 8 | switch prot { 9 | case "ascii": 10 | return &ascii{} 11 | 12 | case "zk": 13 | return &zk{serverPort: serverPort} 14 | 15 | case "kafka": 16 | return &kafka{serverPort: serverPort} 17 | 18 | default: 19 | return nil 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /cmd/gk/command/template.go: -------------------------------------------------------------------------------- 1 | package command 2 | 3 | import ( 4 | "bytes" 5 | "io/ioutil" 6 | "os" 7 | "os/user" 8 | "path" 9 | "strconv" 10 | "text/template" 11 | ) 12 | 13 | func writeFileFromTemplate(tplSrc, dst string, perm os.FileMode, 14 | data interface{}, chownTo *user.User) { 15 | b, err := Asset(tplSrc) 16 | swallow(err) 17 | if data != nil { 18 | wr := &bytes.Buffer{} 19 | t := template.Must(template.New(tplSrc).Parse(string(b))) 20 | err = t.Execute(wr, data) 21 | swallow(err) 22 | 23 | if err = ioutil.WriteFile(dst, wr.Bytes(), perm); err != nil { 24 | os.MkdirAll(path.Dir(dst), 0755) 25 | } 26 | 27 | err = ioutil.WriteFile(dst, wr.Bytes(), perm) 28 | swallow(err) 29 | 30 | return 31 | } 32 | 33 | // no template, just file copy 34 | if err = ioutil.WriteFile(dst, b, perm); err != nil { 35 | os.MkdirAll(path.Dir(dst), 0755) 36 | } 37 | 38 | err = ioutil.WriteFile(dst, b, perm) 39 | swallow(err) 40 | 41 | if chownTo != nil { 42 | chown(dst, chownTo) 43 | } 44 | } 45 | 46 | func chown(fp string, chownTo *user.User) { 47 | uid, _ := strconv.Atoi(chownTo.Uid) 48 | gid, _ := strconv.Atoi(chownTo.Gid) 49 | swallow(os.Chown(fp, uid, gid)) 50 | } 51 | -------------------------------------------------------------------------------- /cmd/gk/command/template/bash_autocomplete.dbc: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | : ${PROG:=$(basename ${BASH_SOURCE})} 4 | 5 | _cli_bash_autocomplete() { 6 | local cur opts base 7 | COMPREPLY=() 8 | cur="${COMP_WORDS[COMP_CWORD]}" 9 | opts=$( ${COMP_WORDS[@]:0:$COMP_CWORD} --generate-bash-completion ) 10 | COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) ) 11 | return 0 12 | } 13 | 14 | complete -F _cli_bash_autocomplete dbc 15 | -------------------------------------------------------------------------------- /cmd/gk/command/template/bash_autocomplete.es: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | : ${PROG:=$(basename ${BASH_SOURCE})} 4 | 5 | _cli_bash_autocomplete() { 6 | local cur opts base 7 | COMPREPLY=() 8 | cur="${COMP_WORDS[COMP_CWORD]}" 9 | opts=$( ${COMP_WORDS[@]:0:$COMP_CWORD} --generate-bash-completion ) 10 | COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) ) 11 | return 0 12 | } 13 | 14 | complete -F _cli_bash_autocomplete es 15 | -------------------------------------------------------------------------------- /cmd/gk/command/template/bash_autocomplete.gk: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | : ${PROG:=$(basename ${BASH_SOURCE})} 4 | 5 | _cli_bash_autocomplete() { 6 | local cur opts base 7 | COMPREPLY=() 8 | cur="${COMP_WORDS[COMP_CWORD]}" 9 | opts=$( ${COMP_WORDS[@]:0:$COMP_CWORD} --generate-bash-completion ) 10 | COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) ) 11 | return 0 12 | } 13 | 14 | complete -F _cli_bash_autocomplete gk 15 | -------------------------------------------------------------------------------- /cmd/gk/command/template/bash_autocomplete.zk: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | : ${PROG:=$(basename ${BASH_SOURCE})} 4 | 5 | _cli_bash_autocomplete() { 6 | local cur opts base 7 | COMPREPLY=() 8 | cur="${COMP_WORDS[COMP_CWORD]}" 9 | opts=$( ${COMP_WORDS[@]:0:$COMP_CWORD} --generate-bash-completion ) 10 | COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) ) 11 | return 0 12 | } 13 | 14 | complete -F _cli_bash_autocomplete zk 15 | -------------------------------------------------------------------------------- /cmd/gk/command/template/bin/kafka-preferred-replica-election.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Licensed to the Apache Software Foundation (ASF) under one or more 3 | # contributor license agreements. See the NOTICE file distributed with 4 | # this work for additional information regarding copyright ownership. 5 | # The ASF licenses this file to You under the Apache License, Version 2.0 6 | # (the "License"); you may not use this file except in compliance with 7 | # the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | exec $(dirname $0)/kafka-run-class.sh kafka.admin.PreferredReplicaLeaderElectionCommand $@ 18 | -------------------------------------------------------------------------------- /cmd/gk/command/template/bin/kafka-reassign-partitions.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Licensed to the Apache Software Foundation (ASF) under one or more 3 | # contributor license agreements. See the NOTICE file distributed with 4 | # this work for additional information regarding copyright ownership. 5 | # The ASF licenses this file to You under the Apache License, Version 2.0 6 | # (the "License"); you may not use this file except in compliance with 7 | # the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | exec $(dirname $0)/kafka-run-class.sh kafka.admin.ReassignPartitionsCommand $@ 18 | -------------------------------------------------------------------------------- /cmd/gk/command/template/bin/kafka-topics.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Licensed to the Apache Software Foundation (ASF) under one or more 3 | # contributor license agreements. See the NOTICE file distributed with 4 | # this work for additional information regarding copyright ownership. 5 | # The ASF licenses this file to You under the Apache License, Version 2.0 6 | # (the "License"); you may not use this file except in compliance with 7 | # the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | exec $(dirname $0)/kafka-run-class.sh kafka.admin.TopicCommand $@ 18 | -------------------------------------------------------------------------------- /cmd/gk/command/template/bin/setenv.sh: -------------------------------------------------------------------------------- 1 | export KAFKA_HEAP_OPTS="-Xmx4G -Xms4G" 2 | -------------------------------------------------------------------------------- /cmd/gk/command/template/helix-core-0.6.7/conf/log4j.properties: -------------------------------------------------------------------------------- 1 | # Set root logger level to DEBUG and its only appender to A1. 2 | log4j.rootLogger=ERROR,ROLLINGFILE 3 | 4 | # A1 5 | log4j.appender.A1=org.apache.log4j.ConsoleAppender 6 | log4j.appender.A1.layout=org.apache.log4j.PatternLayout 7 | log4j.appender.A1.layout.ConversionPattern=%-4r [%t] %-5p %c %x - %m%n 8 | 9 | # ROLLINGFILE 10 | log4j.appender.ROLLINGFILE=org.apache.log4j.RollingFileAppender 11 | log4j.appender.ROLLINGFILE.Threshold=ERROR 12 | log4j.appender.ROLLINGFILE.File=helix.log 13 | log4j.appender.ROLLINGFILE.MaxFileSize=500MB 14 | log4j.appender.ROLLINGFILE.MaxBackupIndex=10 15 | log4j.appender.ROLLINGFILE.layout=org.apache.log4j.PatternLayout 16 | log4j.appender.ROLLINGFILE.layout.ConversionPattern=%-4r [%t] %-5p %c %x - %m%n 17 | 18 | log4j.logger.org.I0Itec=ERROR 19 | log4j.logger.org.apache=ERROR 20 | -------------------------------------------------------------------------------- /cmd/gk/command/template/helix-core-0.6.7/repo/com/101tec/zkclient/0.5/zkclient-0.5.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/helix-core-0.6.7/repo/com/101tec/zkclient/0.5/zkclient-0.5.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/helix-core-0.6.7/repo/com/google/guava/guava/15.0/guava-15.0.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/helix-core-0.6.7/repo/com/google/guava/guava/15.0/guava-15.0.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/helix-core-0.6.7/repo/commons-cli/commons-cli/1.2/commons-cli-1.2.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/helix-core-0.6.7/repo/commons-cli/commons-cli/1.2/commons-cli-1.2.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/helix-core-0.6.7/repo/commons-codec/commons-codec/1.6/commons-codec-1.6.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/helix-core-0.6.7/repo/commons-codec/commons-codec/1.6/commons-codec-1.6.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/helix-core-0.6.7/repo/commons-io/commons-io/1.4/commons-io-1.4.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/helix-core-0.6.7/repo/commons-io/commons-io/1.4/commons-io-1.4.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/helix-core-0.6.7/repo/io/netty/netty/3.10.5.Final/netty-3.10.5.Final.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/helix-core-0.6.7/repo/io/netty/netty/3.10.5.Final/netty-3.10.5.Final.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/helix-core-0.6.7/repo/jline/jline/0.9.94/jline-0.9.94.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/helix-core-0.6.7/repo/jline/jline/0.9.94/jline-0.9.94.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/helix-core-0.6.7/repo/log4j/log4j/1.2.15/log4j-1.2.15.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/helix-core-0.6.7/repo/log4j/log4j/1.2.15/log4j-1.2.15.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/helix-core-0.6.7/repo/org/apache/commons/commons-math/2.1/commons-math-2.1.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/helix-core-0.6.7/repo/org/apache/commons/commons-math/2.1/commons-math-2.1.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/helix-core-0.6.7/repo/org/apache/helix/helix-core/0.6.7/helix-core-0.6.7.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/helix-core-0.6.7/repo/org/apache/helix/helix-core/0.6.7/helix-core-0.6.7.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/helix-core-0.6.7/repo/org/apache/zookeeper/zookeeper/3.4.9/zookeeper-3.4.9.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/helix-core-0.6.7/repo/org/apache/zookeeper/zookeeper/3.4.9/zookeeper-3.4.9.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/helix-core-0.6.7/repo/org/codehaus/jackson/jackson-core-asl/1.8.5/jackson-core-asl-1.8.5.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/helix-core-0.6.7/repo/org/codehaus/jackson/jackson-core-asl/1.8.5/jackson-core-asl-1.8.5.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/helix-core-0.6.7/repo/org/codehaus/jackson/jackson-mapper-asl/1.8.5/jackson-mapper-asl-1.8.5.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/helix-core-0.6.7/repo/org/codehaus/jackson/jackson-mapper-asl/1.8.5/jackson-mapper-asl-1.8.5.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/helix-core-0.6.7/repo/org/slf4j/slf4j-api/1.6.1/slf4j-api-1.6.1.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/helix-core-0.6.7/repo/org/slf4j/slf4j-api/1.6.1/slf4j-api-1.6.1.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/helix-core-0.6.7/repo/org/slf4j/slf4j-log4j12/1.6.1/slf4j-log4j12-1.6.1.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/helix-core-0.6.7/repo/org/slf4j/slf4j-log4j12/1.6.1/slf4j-log4j12-1.6.1.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/helix-core-0.6.7/repo/org/yaml/snakeyaml/1.12/snakeyaml-1.12.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/helix-core-0.6.7/repo/org/yaml/snakeyaml/1.12/snakeyaml-1.12.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/init.d/zookeeper: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | #chkconfig:2345 20 90 3 | #description:zookeeper 4 | #processname:zookeeper 5 | case $1 in 6 | start) su root {{.RootPath}}/bin/zkServer.sh start;; 7 | stop) su root {{.RootPath}}/bin/zkServer.sh stop;; 8 | status) su root {{.RootPath}}/bin/zkServer.sh status;; 9 | restart) su root {{.RootPath}}/bin/zkServer.sh restart;; 10 | *) echo "require start|stop|status|restart" ;; 11 | esac 12 | -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/bin/connect-distributed.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # Licensed to the Apache Software Foundation (ASF) under one or more 3 | # contributor license agreements. See the NOTICE file distributed with 4 | # this work for additional information regarding copyright ownership. 5 | # The ASF licenses this file to You under the Apache License, Version 2.0 6 | # (the "License"); you may not use this file except in compliance with 7 | # the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | base_dir=$(dirname $0) 18 | 19 | if [ "x$KAFKA_LOG4J_OPTS" = "x" ]; then 20 | export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/connect-log4j.properties" 21 | fi 22 | 23 | exec $(dirname $0)/kafka-run-class.sh org.apache.kafka.connect.cli.ConnectDistributed "$@" 24 | -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/bin/connect-standalone.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # Licensed to the Apache Software Foundation (ASF) under one or more 3 | # contributor license agreements. See the NOTICE file distributed with 4 | # this work for additional information regarding copyright ownership. 5 | # The ASF licenses this file to You under the Apache License, Version 2.0 6 | # (the "License"); you may not use this file except in compliance with 7 | # the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | base_dir=$(dirname $0) 18 | 19 | if [ "x$KAFKA_LOG4J_OPTS" = "x" ]; then 20 | export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/connect-log4j.properties" 21 | fi 22 | 23 | exec $(dirname $0)/kafka-run-class.sh org.apache.kafka.connect.cli.ConnectStandalone "$@" 24 | -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/bin/kafka-acls.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Licensed to the Apache Software Foundation (ASF) under one or more 3 | # contributor license agreements. See the NOTICE file distributed with 4 | # this work for additional information regarding copyright ownership. 5 | # The ASF licenses this file to You under the Apache License, Version 2.0 6 | # (the "License"); you may not use this file except in compliance with 7 | # the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | exec $(dirname $0)/kafka-run-class.sh kafka.admin.AclCommand "$@" 18 | -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/bin/kafka-configs.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Licensed to the Apache Software Foundation (ASF) under one or more 3 | # contributor license agreements. See the NOTICE file distributed with 4 | # this work for additional information regarding copyright ownership. 5 | # The ASF licenses this file to You under the Apache License, Version 2.0 6 | # (the "License"); you may not use this file except in compliance with 7 | # the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | exec $(dirname $0)/kafka-run-class.sh kafka.admin.ConfigCommand "$@" 18 | -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/bin/kafka-console-consumer.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Licensed to the Apache Software Foundation (ASF) under one or more 3 | # contributor license agreements. See the NOTICE file distributed with 4 | # this work for additional information regarding copyright ownership. 5 | # The ASF licenses this file to You under the Apache License, Version 2.0 6 | # (the "License"); you may not use this file except in compliance with 7 | # the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then 18 | export KAFKA_HEAP_OPTS="-Xmx512M" 19 | fi 20 | 21 | exec $(dirname $0)/kafka-run-class.sh kafka.tools.ConsoleConsumer "$@" 22 | -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/bin/kafka-console-producer.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Licensed to the Apache Software Foundation (ASF) under one or more 3 | # contributor license agreements. See the NOTICE file distributed with 4 | # this work for additional information regarding copyright ownership. 5 | # The ASF licenses this file to You under the Apache License, Version 2.0 6 | # (the "License"); you may not use this file except in compliance with 7 | # the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then 18 | export KAFKA_HEAP_OPTS="-Xmx512M" 19 | fi 20 | exec $(dirname $0)/kafka-run-class.sh kafka.tools.ConsoleProducer "$@" 21 | -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/bin/kafka-consumer-groups.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Licensed to the Apache Software Foundation (ASF) under one or more 3 | # contributor license agreements. See the NOTICE file distributed with 4 | # this work for additional information regarding copyright ownership. 5 | # The ASF licenses this file to You under the Apache License, Version 2.0 6 | # (the "License"); you may not use this file except in compliance with 7 | # the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | exec $(dirname $0)/kafka-run-class.sh kafka.admin.ConsumerGroupCommand "$@" 18 | -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/bin/kafka-consumer-offset-checker.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Licensed to the Apache Software Foundation (ASF) under one or more 3 | # contributor license agreements. See the NOTICE file distributed with 4 | # this work for additional information regarding copyright ownership. 5 | # The ASF licenses this file to You under the Apache License, Version 2.0 6 | # (the "License"); you may not use this file except in compliance with 7 | # the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | exec $(dirname $0)/kafka-run-class.sh kafka.tools.ConsumerOffsetChecker "$@" 18 | -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/bin/kafka-consumer-perf-test.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Licensed to the Apache Software Foundation (ASF) under one or more 3 | # contributor license agreements. See the NOTICE file distributed with 4 | # this work for additional information regarding copyright ownership. 5 | # The ASF licenses this file to You under the Apache License, Version 2.0 6 | # (the "License"); you may not use this file except in compliance with 7 | # the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then 18 | export KAFKA_HEAP_OPTS="-Xmx512M" 19 | fi 20 | exec $(dirname $0)/kafka-run-class.sh kafka.tools.ConsumerPerformance "$@" 21 | -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/bin/kafka-mirror-maker.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Licensed to the Apache Software Foundation (ASF) under one or more 3 | # contributor license agreements. See the NOTICE file distributed with 4 | # this work for additional information regarding copyright ownership. 5 | # The ASF licenses this file to You under the Apache License, Version 2.0 6 | # (the "License"); you may not use this file except in compliance with 7 | # the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | exec $(dirname $0)/kafka-run-class.sh kafka.tools.MirrorMaker "$@" 18 | -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/bin/kafka-preferred-replica-election.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Licensed to the Apache Software Foundation (ASF) under one or more 3 | # contributor license agreements. See the NOTICE file distributed with 4 | # this work for additional information regarding copyright ownership. 5 | # The ASF licenses this file to You under the Apache License, Version 2.0 6 | # (the "License"); you may not use this file except in compliance with 7 | # the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | exec $(dirname $0)/kafka-run-class.sh kafka.admin.PreferredReplicaLeaderElectionCommand "$@" 18 | -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/bin/kafka-producer-perf-test.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Licensed to the Apache Software Foundation (ASF) under one or more 3 | # contributor license agreements. See the NOTICE file distributed with 4 | # this work for additional information regarding copyright ownership. 5 | # The ASF licenses this file to You under the Apache License, Version 2.0 6 | # (the "License"); you may not use this file except in compliance with 7 | # the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then 18 | export KAFKA_HEAP_OPTS="-Xmx512M" 19 | fi 20 | exec $(dirname $0)/kafka-run-class.sh org.apache.kafka.tools.ProducerPerformance "$@" 21 | -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/bin/kafka-reassign-partitions.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Licensed to the Apache Software Foundation (ASF) under one or more 3 | # contributor license agreements. See the NOTICE file distributed with 4 | # this work for additional information regarding copyright ownership. 5 | # The ASF licenses this file to You under the Apache License, Version 2.0 6 | # (the "License"); you may not use this file except in compliance with 7 | # the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | exec $(dirname $0)/kafka-run-class.sh kafka.admin.ReassignPartitionsCommand "$@" 18 | -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/bin/kafka-replay-log-producer.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Licensed to the Apache Software Foundation (ASF) under one or more 3 | # contributor license agreements. See the NOTICE file distributed with 4 | # this work for additional information regarding copyright ownership. 5 | # The ASF licenses this file to You under the Apache License, Version 2.0 6 | # (the "License"); you may not use this file except in compliance with 7 | # the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | exec $(dirname $0)/kafka-run-class.sh kafka.tools.ReplayLogProducer "$@" 18 | -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/bin/kafka-replica-verification.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Licensed to the Apache Software Foundation (ASF) under one or more 3 | # contributor license agreements. See the NOTICE file distributed with 4 | # this work for additional information regarding copyright ownership. 5 | # The ASF licenses this file to You under the Apache License, Version 2.0 6 | # (the "License"); you may not use this file except in compliance with 7 | # the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | exec $(dirname $0)/kafka-run-class.sh kafka.tools.ReplicaVerificationTool "$@" 18 | -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/bin/kafka-server-stop.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # Licensed to the Apache Software Foundation (ASF) under one or more 3 | # contributor license agreements. See the NOTICE file distributed with 4 | # this work for additional information regarding copyright ownership. 5 | # The ASF licenses this file to You under the Apache License, Version 2.0 6 | # (the "License"); you may not use this file except in compliance with 7 | # the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | PIDS=$(ps ax | grep -i 'kafka\.Kafka' | grep java | grep -v grep | awk '{print $1}') 17 | 18 | if [ -z "$PIDS" ]; then 19 | echo "No kafka server to stop" 20 | exit 1 21 | else 22 | kill -s TERM $PIDS 23 | fi 24 | 25 | -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/bin/kafka-simple-consumer-shell.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Licensed to the Apache Software Foundation (ASF) under one or more 3 | # contributor license agreements. See the NOTICE file distributed with 4 | # this work for additional information regarding copyright ownership. 5 | # The ASF licenses this file to You under the Apache License, Version 2.0 6 | # (the "License"); you may not use this file except in compliance with 7 | # the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | exec $(dirname $0)/kafka-run-class.sh kafka.tools.SimpleConsumerShell "$@" 18 | -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/bin/kafka-topics.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Licensed to the Apache Software Foundation (ASF) under one or more 3 | # contributor license agreements. See the NOTICE file distributed with 4 | # this work for additional information regarding copyright ownership. 5 | # The ASF licenses this file to You under the Apache License, Version 2.0 6 | # (the "License"); you may not use this file except in compliance with 7 | # the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | exec $(dirname $0)/kafka-run-class.sh kafka.admin.TopicCommand "$@" 18 | -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/bin/kafka-verifiable-consumer.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Licensed to the Apache Software Foundation (ASF) under one or more 3 | # contributor license agreements. See the NOTICE file distributed with 4 | # this work for additional information regarding copyright ownership. 5 | # The ASF licenses this file to You under the Apache License, Version 2.0 6 | # (the "License"); you may not use this file except in compliance with 7 | # the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then 18 | export KAFKA_HEAP_OPTS="-Xmx512M" 19 | fi 20 | exec $(dirname $0)/kafka-run-class.sh org.apache.kafka.tools.VerifiableConsumer "$@" 21 | -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/bin/kafka-verifiable-producer.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Licensed to the Apache Software Foundation (ASF) under one or more 3 | # contributor license agreements. See the NOTICE file distributed with 4 | # this work for additional information regarding copyright ownership. 5 | # The ASF licenses this file to You under the Apache License, Version 2.0 6 | # (the "License"); you may not use this file except in compliance with 7 | # the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then 18 | export KAFKA_HEAP_OPTS="-Xmx512M" 19 | fi 20 | exec $(dirname $0)/kafka-run-class.sh org.apache.kafka.tools.VerifiableProducer "$@" 21 | -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/bin/zookeeper-security-migration.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Licensed to the Apache Software Foundation (ASF) under one or more 3 | # contributor license agreements. See the NOTICE file distributed with 4 | # this work for additional information regarding copyright ownership. 5 | # The ASF licenses this file to You under the Apache License, Version 2.0 6 | # (the "License"); you may not use this file except in compliance with 7 | # the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | exec $(dirname $0)/kafka-run-class.sh kafka.admin.ZkSecurityMigrator "$@" 18 | -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/bin/zookeeper-server-stop.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # Licensed to the Apache Software Foundation (ASF) under one or more 3 | # contributor license agreements. See the NOTICE file distributed with 4 | # this work for additional information regarding copyright ownership. 5 | # The ASF licenses this file to You under the Apache License, Version 2.0 6 | # (the "License"); you may not use this file except in compliance with 7 | # the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | PIDS=$(ps ax | grep java | grep -i QuorumPeerMain | grep -v grep | awk '{print $1}') 17 | 18 | if [ -z "$PIDS" ]; then 19 | echo "No zookeeper server to stop" 20 | exit 1 21 | else 22 | kill -s TERM $PIDS 23 | fi 24 | 25 | -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/bin/zookeeper-shell.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # Licensed to the Apache Software Foundation (ASF) under one or more 3 | # contributor license agreements. See the NOTICE file distributed with 4 | # this work for additional information regarding copyright ownership. 5 | # The ASF licenses this file to You under the Apache License, Version 2.0 6 | # (the "License"); you may not use this file except in compliance with 7 | # the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | if [ $# -lt 1 ]; 18 | then 19 | echo "USAGE: $0 zookeeper_host:port[/path] [args...]" 20 | exit 1 21 | fi 22 | 23 | exec $(dirname $0)/kafka-run-class.sh org.apache.zookeeper.ZooKeeperMain -server "$@" 24 | -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/aopalliance-repackaged-2.4.0-b34.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/aopalliance-repackaged-2.4.0-b34.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/argparse4j-0.5.0.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/argparse4j-0.5.0.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/connect-api-0.10.0.0.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/connect-api-0.10.0.0.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/connect-file-0.10.0.0.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/connect-file-0.10.0.0.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/connect-json-0.10.0.0.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/connect-json-0.10.0.0.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/connect-runtime-0.10.0.0.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/connect-runtime-0.10.0.0.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/guava-18.0.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/guava-18.0.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/hk2-api-2.4.0-b34.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/hk2-api-2.4.0-b34.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/hk2-locator-2.4.0-b34.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/hk2-locator-2.4.0-b34.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/hk2-utils-2.4.0-b34.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/hk2-utils-2.4.0-b34.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/jackson-annotations-2.6.0.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/jackson-annotations-2.6.0.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/jackson-core-2.6.3.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/jackson-core-2.6.3.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/jackson-databind-2.6.3.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/jackson-databind-2.6.3.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/jackson-jaxrs-base-2.6.3.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/jackson-jaxrs-base-2.6.3.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/jackson-jaxrs-json-provider-2.6.3.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/jackson-jaxrs-json-provider-2.6.3.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/jackson-module-jaxb-annotations-2.6.3.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/jackson-module-jaxb-annotations-2.6.3.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/javassist-3.18.2-GA.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/javassist-3.18.2-GA.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/javax.annotation-api-1.2.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/javax.annotation-api-1.2.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/javax.inject-1.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/javax.inject-1.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/javax.inject-2.4.0-b34.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/javax.inject-2.4.0-b34.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/javax.servlet-api-3.1.0.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/javax.servlet-api-3.1.0.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/javax.ws.rs-api-2.0.1.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/javax.ws.rs-api-2.0.1.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/jersey-client-2.22.2.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/jersey-client-2.22.2.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/jersey-common-2.22.2.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/jersey-common-2.22.2.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/jersey-container-servlet-2.22.2.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/jersey-container-servlet-2.22.2.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/jersey-container-servlet-core-2.22.2.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/jersey-container-servlet-core-2.22.2.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/jersey-guava-2.22.2.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/jersey-guava-2.22.2.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/jersey-media-jaxb-2.22.2.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/jersey-media-jaxb-2.22.2.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/jersey-server-2.22.2.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/jersey-server-2.22.2.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/jetty-continuation-9.2.15.v20160210.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/jetty-continuation-9.2.15.v20160210.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/jetty-http-9.2.15.v20160210.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/jetty-http-9.2.15.v20160210.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/jetty-io-9.2.15.v20160210.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/jetty-io-9.2.15.v20160210.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/jetty-security-9.2.15.v20160210.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/jetty-security-9.2.15.v20160210.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/jetty-server-9.2.15.v20160210.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/jetty-server-9.2.15.v20160210.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/jetty-servlet-9.2.15.v20160210.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/jetty-servlet-9.2.15.v20160210.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/jetty-servlets-9.2.15.v20160210.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/jetty-servlets-9.2.15.v20160210.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/jetty-util-9.2.15.v20160210.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/jetty-util-9.2.15.v20160210.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/jopt-simple-4.9.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/jopt-simple-4.9.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/kafka-clients-0.10.0.0.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/kafka-clients-0.10.0.0.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/kafka-log4j-appender-0.10.0.0.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/kafka-log4j-appender-0.10.0.0.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/kafka-streams-0.10.0.0.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/kafka-streams-0.10.0.0.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/kafka-streams-examples-0.10.0.0.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/kafka-streams-examples-0.10.0.0.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/kafka-tools-0.10.0.0.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/kafka-tools-0.10.0.0.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/kafka_2.10-0.10.0.0.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/kafka_2.10-0.10.0.0.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/log4j-1.2.17.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/log4j-1.2.17.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/lz4-1.3.0.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/lz4-1.3.0.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/metrics-core-2.2.0.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/metrics-core-2.2.0.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/osgi-resource-locator-1.0.1.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/osgi-resource-locator-1.0.1.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/reflections-0.9.10.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/reflections-0.9.10.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/rocksdbjni-4.4.1.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/rocksdbjni-4.4.1.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/scala-library-2.10.6.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/scala-library-2.10.6.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/slf4j-api-1.7.21.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/slf4j-api-1.7.21.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/slf4j-log4j12-1.7.21.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/slf4j-log4j12-1.7.21.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/snappy-java-1.1.2.4.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/snappy-java-1.1.2.4.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/validation-api-1.1.0.Final.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/validation-api-1.1.0.Final.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/zkclient-0.8.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/zkclient-0.8.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/zookeeper-3.4.6.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.10.0.0/libs/zookeeper-3.4.6.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.8.1.1/bin/kafka-preferred-replica-election.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Licensed to the Apache Software Foundation (ASF) under one or more 3 | # contributor license agreements. See the NOTICE file distributed with 4 | # this work for additional information regarding copyright ownership. 5 | # The ASF licenses this file to You under the Apache License, Version 2.0 6 | # (the "License"); you may not use this file except in compliance with 7 | # the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | exec $(dirname $0)/kafka-run-class.sh kafka.admin.PreferredReplicaLeaderElectionCommand $@ 18 | -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.8.1.1/bin/kafka-reassign-partitions.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Licensed to the Apache Software Foundation (ASF) under one or more 3 | # contributor license agreements. See the NOTICE file distributed with 4 | # this work for additional information regarding copyright ownership. 5 | # The ASF licenses this file to You under the Apache License, Version 2.0 6 | # (the "License"); you may not use this file except in compliance with 7 | # the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | exec $(dirname $0)/kafka-run-class.sh kafka.admin.ReassignPartitionsCommand $@ 18 | -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.8.1.1/bin/kafka-replay-log-producer.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Licensed to the Apache Software Foundation (ASF) under one or more 3 | # contributor license agreements. See the NOTICE file distributed with 4 | # this work for additional information regarding copyright ownership. 5 | # The ASF licenses this file to You under the Apache License, Version 2.0 6 | # (the "License"); you may not use this file except in compliance with 7 | # the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | exec $(dirname $0)/kafka-run-class.sh kafka.tools.ReplayLogProducer $@ 18 | -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.8.1.1/bin/kafka-server-stop.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # Licensed to the Apache Software Foundation (ASF) under one or more 3 | # contributor license agreements. See the NOTICE file distributed with 4 | # this work for additional information regarding copyright ownership. 5 | # The ASF licenses this file to You under the Apache License, Version 2.0 6 | # (the "License"); you may not use this file except in compliance with 7 | # the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | ps ax | grep -i 'kafka\.Kafka' | grep java | grep -v grep | awk '{print $1}' | xargs kill -SIGINT 17 | 18 | -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.8.1.1/bin/kafka-topics.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Licensed to the Apache Software Foundation (ASF) under one or more 3 | # contributor license agreements. See the NOTICE file distributed with 4 | # this work for additional information regarding copyright ownership. 5 | # The ASF licenses this file to You under the Apache License, Version 2.0 6 | # (the "License"); you may not use this file except in compliance with 7 | # the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | exec $(dirname $0)/kafka-run-class.sh kafka.admin.TopicCommand $@ 18 | 19 | -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.8.1.1/libs/jopt-simple-3.2.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.8.1.1/libs/jopt-simple-3.2.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.8.1.1/libs/kafka_2.10-0.8.1.1.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.8.1.1/libs/kafka_2.10-0.8.1.1.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.8.1.1/libs/log4j-1.2.15.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.8.1.1/libs/log4j-1.2.15.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.8.1.1/libs/metrics-core-2.2.0.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.8.1.1/libs/metrics-core-2.2.0.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.8.1.1/libs/scala-library-2.10.1.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.8.1.1/libs/scala-library-2.10.1.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.8.1.1/libs/slf4j-api-1.7.2.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.8.1.1/libs/slf4j-api-1.7.2.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.8.1.1/libs/snappy-java-1.0.5.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.8.1.1/libs/snappy-java-1.0.5.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.8.1.1/libs/zkclient-0.3.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.8.1.1/libs/zkclient-0.3.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.8.1.1/libs/zookeeper-3.3.4.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.8.1.1/libs/zookeeper-3.3.4.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.8.2.2/bin/kafka-mirror-maker.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Licensed to the Apache Software Foundation (ASF) under one or more 3 | # contributor license agreements. See the NOTICE file distributed with 4 | # this work for additional information regarding copyright ownership. 5 | # The ASF licenses this file to You under the Apache License, Version 2.0 6 | # (the "License"); you may not use this file except in compliance with 7 | # the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | exec $(dirname $0)/kafka-run-class.sh kafka.tools.MirrorMaker $@ 18 | -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.8.2.2/bin/kafka-preferred-replica-election.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Licensed to the Apache Software Foundation (ASF) under one or more 3 | # contributor license agreements. See the NOTICE file distributed with 4 | # this work for additional information regarding copyright ownership. 5 | # The ASF licenses this file to You under the Apache License, Version 2.0 6 | # (the "License"); you may not use this file except in compliance with 7 | # the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | exec $(dirname $0)/kafka-run-class.sh kafka.admin.PreferredReplicaLeaderElectionCommand $@ 18 | -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.8.2.2/bin/kafka-reassign-partitions.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Licensed to the Apache Software Foundation (ASF) under one or more 3 | # contributor license agreements. See the NOTICE file distributed with 4 | # this work for additional information regarding copyright ownership. 5 | # The ASF licenses this file to You under the Apache License, Version 2.0 6 | # (the "License"); you may not use this file except in compliance with 7 | # the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | exec $(dirname $0)/kafka-run-class.sh kafka.admin.ReassignPartitionsCommand $@ 18 | -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.8.2.2/bin/kafka-replay-log-producer.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Licensed to the Apache Software Foundation (ASF) under one or more 3 | # contributor license agreements. See the NOTICE file distributed with 4 | # this work for additional information regarding copyright ownership. 5 | # The ASF licenses this file to You under the Apache License, Version 2.0 6 | # (the "License"); you may not use this file except in compliance with 7 | # the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | exec $(dirname $0)/kafka-run-class.sh kafka.tools.ReplayLogProducer $@ 18 | -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.8.2.2/bin/kafka-replica-verification.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Licensed to the Apache Software Foundation (ASF) under one or more 3 | # contributor license agreements. See the NOTICE file distributed with 4 | # this work for additional information regarding copyright ownership. 5 | # The ASF licenses this file to You under the Apache License, Version 2.0 6 | # (the "License"); you may not use this file except in compliance with 7 | # the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | exec $(dirname $0)/kafka-run-class.sh kafka.tools.ReplicaVerificationTool $@ 18 | -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.8.2.2/bin/kafka-server-stop.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # Licensed to the Apache Software Foundation (ASF) under one or more 3 | # contributor license agreements. See the NOTICE file distributed with 4 | # this work for additional information regarding copyright ownership. 5 | # The ASF licenses this file to You under the Apache License, Version 2.0 6 | # (the "License"); you may not use this file except in compliance with 7 | # the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | ps ax | grep -i 'kafka\.Kafka' | grep java | grep -v grep | awk '{print $1}' | xargs kill -SIGTERM 17 | -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.8.2.2/bin/kafka-topics.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Licensed to the Apache Software Foundation (ASF) under one or more 3 | # contributor license agreements. See the NOTICE file distributed with 4 | # this work for additional information regarding copyright ownership. 5 | # The ASF licenses this file to You under the Apache License, Version 2.0 6 | # (the "License"); you may not use this file except in compliance with 7 | # the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | exec $(dirname $0)/kafka-run-class.sh kafka.admin.TopicCommand $@ 18 | -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.8.2.2/libs/jopt-simple-3.2.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.8.2.2/libs/jopt-simple-3.2.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.8.2.2/libs/kafka-clients-0.8.2.2.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.8.2.2/libs/kafka-clients-0.8.2.2.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.8.2.2/libs/kafka_2.10-0.8.2.2.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.8.2.2/libs/kafka_2.10-0.8.2.2.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.8.2.2/libs/log4j-1.2.16.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.8.2.2/libs/log4j-1.2.16.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.8.2.2/libs/lz4-1.2.0.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.8.2.2/libs/lz4-1.2.0.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.8.2.2/libs/metrics-core-2.2.0.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.8.2.2/libs/metrics-core-2.2.0.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.8.2.2/libs/scala-library-2.10.4.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.8.2.2/libs/scala-library-2.10.4.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.8.2.2/libs/slf4j-api-1.7.6.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.8.2.2/libs/slf4j-api-1.7.6.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.8.2.2/libs/slf4j-log4j12-1.6.1.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.8.2.2/libs/slf4j-log4j12-1.6.1.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.8.2.2/libs/snappy-java-1.1.1.7.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.8.2.2/libs/snappy-java-1.1.1.7.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.8.2.2/libs/zkclient-0.3.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.8.2.2/libs/zkclient-0.3.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.8.2.2/libs/zookeeper-3.4.6.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.8.2.2/libs/zookeeper-3.4.6.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/kafka_2.10-0.8.2.2/libs/zzz-kafka-influxdb-reporter-1.0.0-uber.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/kafka_2.10-0.8.2.2/libs/zzz-kafka-influxdb-reporter-1.0.0-uber.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/zk/conf/zoo.cfg: -------------------------------------------------------------------------------- 1 | tickTime=2000 2 | initLimit=10 3 | syncLimit=5 4 | 5 | # dataLogDir(txn log) and dataDir(snapshot) should be placed in 2 disk devices 6 | dataDir={{.RootPath}}/data 7 | dataLogDir={{.RootPath}}/log 8 | 9 | clientPort=2181 10 | maxClientCnxns=0 11 | 12 | // server.$id=$ip:leader_port:quorum_port 13 | {{.Servers}} 14 | 15 | # The number of snapshots to retain in dataDir 16 | autopurge.snapRetainCount=3 17 | # Purge task interval in hours 18 | autopurge.purgeInterval=1 19 | -------------------------------------------------------------------------------- /cmd/gk/command/template/zk/data/myid: -------------------------------------------------------------------------------- 1 | {{.MyId}} 2 | -------------------------------------------------------------------------------- /cmd/gk/command/template/zk/lib/jline-0.9.94.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/zk/lib/jline-0.9.94.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/zk/lib/log4j-1.2.16.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/zk/lib/log4j-1.2.16.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/zk/lib/netty-3.7.0.Final.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/zk/lib/netty-3.7.0.Final.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/zk/lib/slf4j-api-1.6.1.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/zk/lib/slf4j-api-1.6.1.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/zk/lib/slf4j-log4j12-1.6.1.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/zk/lib/slf4j-log4j12-1.6.1.jar -------------------------------------------------------------------------------- /cmd/gk/command/template/zk/zookeeper-3.4.6.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/funkygao/gafka/077b43fb2687eb43715b32c59896cb40bc68a4e3/cmd/gk/command/template/zk/zookeeper-3.4.6.jar -------------------------------------------------------------------------------- /cmd/gk/command/utils_test.go: -------------------------------------------------------------------------------- 1 | package command 2 | 3 | import ( 4 | "strconv" 5 | "testing" 6 | 7 | "github.com/funkygao/assert" 8 | ) 9 | 10 | func TestSortMap(t *testing.T) { 11 | m := make(map[string]int) 12 | for i := 0; i < 10; i++ { 13 | m[strconv.Itoa(i+10)] = i * 10 14 | } 15 | 16 | r := sortStrMap(m) 17 | t.Logf("%+v", r) 18 | } 19 | 20 | func TestPatternMatched(t *testing.T) { 21 | assert.Equal(t, true, patternMatched("orderstatus", "order")) 22 | assert.Equal(t, false, patternMatched("orderstatus", "~order")) 23 | assert.Equal(t, true, patternMatched("flashtrade_web", "~svc_hippo")) 24 | assert.Equal(t, false, patternMatched("34.StrollSearchRuleMsg.v1", "laxinRiskControl")) 25 | assert.Equal(t, true, patternMatched("34.laxinRiskControl.v1", "laxinRiskControl")) 26 | } 27 | 28 | func TestShortIp(t *testing.T) { 29 | assert.Equal(t, "44.212", shortIp("12.21.44.212")) 30 | } 31 | -------------------------------------------------------------------------------- /cmd/gk/command/webhook.go: -------------------------------------------------------------------------------- 1 | package command 2 | 3 | import ( 4 | "fmt" 5 | "strings" 6 | 7 | "github.com/funkygao/gocli" 8 | ) 9 | 10 | type Webhook struct { 11 | Ui cli.Ui 12 | Cmd string 13 | } 14 | 15 | func (this *Webhook) Run(args []string) (exitCode int) { 16 | return 17 | } 18 | 19 | func (*Webhook) Synopsis() string { 20 | return "Display kateway webhooks TODO" 21 | } 22 | 23 | func (this *Webhook) Help() string { 24 | help := fmt.Sprintf(` 25 | Usage: %s webhook [options] 26 | 27 | %s 28 | 29 | `, this.Cmd, this.Synopsis()) 30 | return strings.TrimSpace(help) 31 | } 32 | -------------------------------------------------------------------------------- /cmd/kateway/.gdbinit: -------------------------------------------------------------------------------- 1 | add-auto-load-safe-path /usr/local/go/src/runtime/runtime-gdb.py 2 | set confirm off 3 | set print pretty on 4 | source /usr/local/go/src/runtime/runtime-gdb.py 5 | -------------------------------------------------------------------------------- /cmd/kateway/api/v1/config.go: -------------------------------------------------------------------------------- 1 | package api 2 | 3 | import ( 4 | "time" 5 | ) 6 | 7 | type Config struct { 8 | AppId string 9 | Secret string 10 | 11 | Timeout time.Duration 12 | KeepAlive time.Duration 13 | 14 | Sub struct { 15 | Scheme string // https or http 16 | Endpoint string // host:port 17 | } 18 | 19 | Pub struct { 20 | Scheme string // http or https 21 | Endpoint string // host:port 22 | } 23 | 24 | Admin struct { 25 | Scheme string 26 | Endpoint string 27 | } 28 | 29 | Debug bool 30 | } 31 | 32 | func DefaultConfig(appid, secret string) *Config { 33 | cf := &Config{ 34 | AppId: appid, 35 | Secret: secret, 36 | Timeout: time.Minute, 37 | KeepAlive: time.Minute, 38 | Debug: false, 39 | } 40 | cf.Sub.Scheme = "http" 41 | cf.Pub.Scheme = "http" 42 | cf.Admin.Scheme = "http" 43 | return cf 44 | } 45 | -------------------------------------------------------------------------------- /cmd/kateway/api/v1/globals.go: -------------------------------------------------------------------------------- 1 | package api 2 | 3 | import ( 4 | "errors" 5 | ) 6 | 7 | var ( 8 | ErrSubStop = errors.New("sub stopped") 9 | ErrInvalidBury = errors.New("invalid bury name") 10 | ) 11 | 12 | const ( 13 | ShadowRetry = "retry" 14 | ShadowDead = "dead" 15 | 16 | UserAgent = "pubsub-go v0.1" 17 | ) 18 | -------------------------------------------------------------------------------- /cmd/kateway/api/v2/client.go: -------------------------------------------------------------------------------- 1 | package pubsub 2 | 3 | type Client struct { 4 | } 5 | 6 | func New(options ...func(c *Client) error) (*Client, error) { 7 | c := &Client{} 8 | for _, option := range options { 9 | if err := option(c); err != nil { 10 | return nil, err 11 | } 12 | } 13 | 14 | return c, nil 15 | } 16 | -------------------------------------------------------------------------------- /cmd/kateway/api/v2/config.go: -------------------------------------------------------------------------------- 1 | package pubsub 2 | 3 | import ( 4 | "time" 5 | ) 6 | 7 | type Config struct { 8 | Timeout time.Duration 9 | MaxRetries int 10 | } 11 | 12 | func NewConfig() *Config { 13 | return &Config{} 14 | } 15 | 16 | func (c *Config) WithTimeout(timeout time.Duration) *Config { 17 | c.Timeout = timeout 18 | return c 19 | } 20 | 21 | func (c *Config) WithMaxRetries(max int) *Config { 22 | c.MaxRetries = max 23 | return c 24 | } 25 | 26 | func (c *Config) MergeIn(cfgs ...*Config) { 27 | for _, other := range cfgs { 28 | mergeInConfig(c, other) 29 | } 30 | } 31 | 32 | func (c *Config) Copy(cfgs ...*Config) *Config { 33 | dst := NewConfig() 34 | dst.MergeIn(c) 35 | dst.MergeIn(cfgs...) 36 | return dst 37 | } 38 | 39 | func mergeInConfig(dst *Config, src *Config) { 40 | } 41 | -------------------------------------------------------------------------------- /cmd/kateway/api/v2/service.go: -------------------------------------------------------------------------------- 1 | package pubsub 2 | 3 | import ( 4 | "golang.org/x/net/context" 5 | ) 6 | 7 | // 8 | type service interface { 9 | publishMessage(ctx context.Context, topic string, key, value []byte) 10 | fetchMessages(ctx context.Context) 11 | addJob(ctx context.Context) 12 | acknowledge(ctx context.Context) 13 | } 14 | -------------------------------------------------------------------------------- /cmd/kateway/bench/fasthttpd.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "flag" 5 | "fmt" 6 | "syscall" 7 | 8 | "github.com/valyala/fasthttp" 9 | ) 10 | 11 | var ( 12 | port int 13 | ) 14 | 15 | func init() { 16 | flag.IntVar(&port, "p", 9090, "http port to bind") 17 | flag.Parse() 18 | } 19 | 20 | func hello(ctx *fasthttp.RequestCtx) { 21 | ctx.SetContentType("text/plain; charset=utf8") 22 | ctx.Write([]byte("hello world")) 23 | 24 | } 25 | 26 | func main() { 27 | syscall.Dup2(1, 2) 28 | 29 | listen := fmt.Sprintf(":%d", port) 30 | fmt.Printf("listening on %s\n", listen) 31 | 32 | if err := fasthttp.ListenAndServe(listen, hello); err != nil { 33 | fmt.Printf("Error in ListenAndServe: %s\n", err) 34 | } 35 | 36 | } 37 | -------------------------------------------------------------------------------- /cmd/kateway/bench/webhookd.go: -------------------------------------------------------------------------------- 1 | // webhookd is a webhook endpoint that is used to demo kateway webhook feature. 2 | package main 3 | 4 | import ( 5 | "io/ioutil" 6 | "log" 7 | "net/http" 8 | ) 9 | 10 | func main() { 11 | http.HandleFunc("/", handle) 12 | log.Println("listening on :9876") 13 | log.Fatal(http.ListenAndServe(":9876", nil)) 14 | } 15 | 16 | func handle(w http.ResponseWriter, r *http.Request) { 17 | defer r.Body.Close() 18 | 19 | body, err := ioutil.ReadAll(r.Body) 20 | if err != nil { 21 | log.Println(err) 22 | return 23 | } 24 | 25 | log.Printf("%s %s %+v %s", r.Method, r.RequestURI, r.Header, string(body)) 26 | } 27 | -------------------------------------------------------------------------------- /cmd/kateway/demo/demo.php: -------------------------------------------------------------------------------- 1 | 0) { 18 | // e,g timeout 19 | $ret = array(); 20 | $ret['_err'] = $errno; 21 | } 22 | 23 | //curl_close($handle); 24 | return $ret; 25 | } 26 | 27 | // sub 28 | for ($i=0; $i<5; $i++) { 29 | $ret = rest_call("http://localhost:9192/topics/v1/foobar/mygroup1?limit=5000000"); 30 | echo $i+1, ". ", $ret, "\n"; 31 | } 32 | -------------------------------------------------------------------------------- /cmd/kateway/demo/java/Makefile: -------------------------------------------------------------------------------- 1 | all:run 2 | 3 | generate: 4 | mvn archetype:generate -DgroupId=com.foo -DartifactId=PubsubDemo -DarchetypeArtifactId=maven-archetype-quickstart -DinteractiveMode=false 5 | 6 | run: 7 | cd PubsubDemo; mvn package; mvn exec:java -Dexec.mainClass=com.foo.App 8 | 9 | idea: 10 | cd PubsubDemo; mvn idea:idea 11 | 12 | dist: 13 | cd PubsubDemo; mvn package assembly:single 14 | 15 | clean: 16 | cd PubsubDemo; mvn clean 17 | -------------------------------------------------------------------------------- /cmd/kateway/demo/java/PubsubDemo/src/main/assembly/assembly.xml: -------------------------------------------------------------------------------- 1 | 2 | package 3 | 4 | dir 5 | 6 | false 7 | 8 | 9 | true 10 | lib 11 | 12 | 13 | 14 | 15 | src/main/webapp 16 | /config 17 | 18 | 19 | true 20 | src/main/resources 21 | /config 22 | 23 | 24 | 25 | -------------------------------------------------------------------------------- /cmd/kateway/demo/java/PubsubDemo/src/main/resources/log4j.properties: -------------------------------------------------------------------------------- 1 | # Set root logger level to DEBUG and its only appender to A1. 2 | log4j.rootLogger=INFO, A1 3 | 4 | # A1 is set to be a ConsoleAppender. 5 | log4j.appender.A1=org.apache.log4j.ConsoleAppender 6 | 7 | # A1 uses PatternLayout. 8 | log4j.appender.A1.layout=org.apache.log4j.PatternLayout 9 | log4j.appender.A1.layout.ConversionPattern=%-4r [%t] %-5p %c %x - %m%n 10 | -------------------------------------------------------------------------------- /cmd/kateway/demo/java/PubsubDemo/src/test/java/com/foo/AppTest.java: -------------------------------------------------------------------------------- 1 | package com.foo; 2 | 3 | import junit.framework.Test; 4 | import junit.framework.TestCase; 5 | import junit.framework.TestSuite; 6 | 7 | /** 8 | * Unit test for simple App. 9 | */ 10 | public class AppTest 11 | extends TestCase 12 | { 13 | /** 14 | * Create the test case 15 | * 16 | * @param testName name of the test case 17 | */ 18 | public AppTest( String testName ) 19 | { 20 | super( testName ); 21 | } 22 | 23 | /** 24 | * @return the suite of tests being tested 25 | */ 26 | public static Test suite() 27 | { 28 | return new TestSuite( AppTest.class ); 29 | } 30 | 31 | /** 32 | * Rigourous Test :-) 33 | */ 34 | public void testApp() 35 | { 36 | assertTrue( true ); 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /cmd/kateway/gateway/const.go: -------------------------------------------------------------------------------- 1 | package gateway 2 | 3 | const ( 4 | HttpHeaderXForwardedFor = "X-Forwarded-For" 5 | HttpHeaderPartition = "X-Partition" 6 | HttpHeaderOffset = "X-Offset" 7 | HttpHeaderMsgBury = "X-Bury" 8 | HttpHeaderMsgKey = "X-Key" 9 | HttpHeaderMsgTag = "X-Tag" 10 | HttpHeaderJobId = "X-Job-Id" 11 | HttpHeaderAcceptEncoding = "Accept-Encoding" 12 | HttpHeaderContentEncoding = "Content-Encoding" 13 | HttpEncodingGzip = "gzip" 14 | 15 | UrlParamTopic = "topic" 16 | UrlParamVersion = "ver" 17 | UrlParamAppid = "appid" 18 | UrlParamGroup = "group" 19 | 20 | MaxPartitionKeyLen = 256 21 | ) 22 | 23 | var ( 24 | ResponseOk = []byte(`{"ok":1}`) 25 | 26 | HttpHeaderAppid = "Appid" 27 | HttpHeaderPubkey = "Pubkey" 28 | HttpHeaderSubkey = "Subkey" 29 | ) 30 | -------------------------------------------------------------------------------- /cmd/kateway/gateway/doc.go: -------------------------------------------------------------------------------- 1 | // Package gateway implements the full PubSub gateway features. 2 | // It's the core of PubSub system. 3 | package gateway 4 | -------------------------------------------------------------------------------- /cmd/kateway/gateway/errors.go: -------------------------------------------------------------------------------- 1 | package gateway 2 | 3 | import ( 4 | "errors" 5 | ) 6 | 7 | var ( 8 | ErrClientGone = errors.New("remote client gone") 9 | ErrTooBigMessage = errors.New("too big message") 10 | ErrTooSmallMessage = errors.New("too small message") 11 | ErrIllegalTaggedMessage = errors.New("illegal tagged message") 12 | ErrClientKilled = errors.New("client killed") 13 | ErrBadResponseWriter = errors.New("ResponseWriter Close not supported") 14 | ErrPartitionOutOfRange = errors.New("partition out of range") 15 | ErrOffsetOutOfRange = errors.New("offset out of range") 16 | ) 17 | -------------------------------------------------------------------------------- /cmd/kateway/gateway/handler_metrics.go: -------------------------------------------------------------------------------- 1 | package gateway 2 | 3 | import ( 4 | "net/http" 5 | "net/url" 6 | 7 | "github.com/funkygao/httprouter" 8 | "github.com/influxdata/influxdb/client" 9 | ) 10 | 11 | //go:generate goannotation $GOFILE 12 | // @rest TODO 13 | func (this *Gateway) appMetricsHandler(w http.ResponseWriter, r *http.Request, params httprouter.Params) { 14 | myAppid := r.Header.Get(HttpHeaderAppid) // TODO auth 15 | if myAppid == "" { 16 | 17 | } 18 | 19 | u, _ := url.Parse(Options.InfluxServer) 20 | conn, err := client.NewClient(client.Config{ 21 | URL: *u, 22 | }) 23 | if err != nil { 24 | return 25 | } 26 | conn.Query(client.Query{ 27 | Command: "", 28 | Database: Options.InfluxDbName, 29 | }) 30 | 31 | } 32 | -------------------------------------------------------------------------------- /cmd/kateway/gateway/handler_pub_ws.go: -------------------------------------------------------------------------------- 1 | // +build !fasthttp 2 | 3 | package gateway 4 | 5 | import ( 6 | "net/http" 7 | 8 | "github.com/funkygao/httprouter" 9 | log "github.com/funkygao/log4go" 10 | ) 11 | 12 | //go:generate goannotation $GOFILE 13 | // @rest POST /v1/ws/msgs/:topic/:ver 14 | // TODO not implemented yet 15 | func (this *pubServer) pubWsHandler(w http.ResponseWriter, r *http.Request, params httprouter.Params) { 16 | ws, err := upgrader.Upgrade(w, r, nil) 17 | if err != nil { 18 | log.Error("%s: %v", r.RemoteAddr, err) 19 | return 20 | } 21 | 22 | defer ws.Close() 23 | } 24 | -------------------------------------------------------------------------------- /cmd/kateway/gateway/handler_sub_ack_test.go: -------------------------------------------------------------------------------- 1 | package gateway 2 | 3 | import ( 4 | "encoding/json" 5 | "testing" 6 | 7 | "github.com/funkygao/assert" 8 | ) 9 | 10 | func TestJsonUnmarshalAckOffsets(t *testing.T) { 11 | var acks ackOffsets 12 | s := ` 13 | [{"partition":5,"offset":124},{"partition":2,"offset":893}] 14 | ` 15 | err := json.Unmarshal([]byte(s), &acks) 16 | assert.Equal(t, nil, err) 17 | assert.Equal(t, 2, len(acks)) 18 | t.Logf("%#v", acks) 19 | assert.Equal(t, 2, acks[1].Partition) 20 | assert.Equal(t, int64(124), acks[0].Offset) 21 | } 22 | -------------------------------------------------------------------------------- /cmd/kateway/gateway/handler_token.go: -------------------------------------------------------------------------------- 1 | // +build !fasthttp 2 | 3 | package gateway 4 | 5 | import ( 6 | "net/http" 7 | 8 | //"github.com/funkygao/gafka/cmd/kateway/manager" 9 | "github.com/funkygao/httprouter" 10 | ) 11 | 12 | //go:generate goannotation $GOFILE 13 | // @rest GET /v1/auth 14 | func (this *pubServer) authHandler(w http.ResponseWriter, r *http.Request, params httprouter.Params) { 15 | var ( 16 | appid = r.Header.Get("X-App-Id") 17 | secret = r.Header.Get("X-App-Secret") 18 | ) 19 | 20 | // TODO manager auth first 21 | 22 | tokenString, err := jwtToken(appid, secret) 23 | if err != nil { 24 | // TODO 25 | return 26 | } 27 | 28 | w.Write([]byte(tokenString)) 29 | } 30 | -------------------------------------------------------------------------------- /cmd/kateway/gateway/hijack.go: -------------------------------------------------------------------------------- 1 | package gateway 2 | 3 | import ( 4 | "io" 5 | "net/http" 6 | ) 7 | 8 | func hijackServer(w http.ResponseWriter) (io.ReadCloser, io.Writer, error) { 9 | conn, _, err := w.(http.Hijacker).Hijack() 10 | if err != nil { 11 | return nil, nil, err 12 | } 13 | 14 | // Flush the options to make sure the client sets the raw mode 15 | conn.Write([]byte{}) 16 | return conn, conn, nil 17 | } 18 | 19 | func closeStreams(streams ...interface{}) { 20 | for _, stream := range streams { 21 | if tcpc, ok := stream.(interface { 22 | CloseWrite() error 23 | }); ok { 24 | tcpc.CloseWrite() 25 | } else if closer, ok := stream.(io.Closer); ok { 26 | closer.Close() 27 | } 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /cmd/kateway/gateway/jwt.go: -------------------------------------------------------------------------------- 1 | package gateway 2 | 3 | import ( 4 | "errors" 5 | 6 | "github.com/dgrijalva/jwt-go" 7 | "github.com/funkygao/golib/hack" 8 | ) 9 | 10 | var errInvalidToken = errors.New("Invalid token") 11 | 12 | func jwtToken(appid, secret string) (string, error) { 13 | token := jwt.NewWithClaims(jwt.SigningMethodHS256, jwt.MapClaims{ 14 | "appid": appid, 15 | }) 16 | 17 | // Sign and get the complete encoded token as a string using the secret 18 | tokenString, err := token.SignedString(hack.Byte(secret)) 19 | if err != nil { 20 | return "", err 21 | } 22 | 23 | return tokenString, nil 24 | } 25 | 26 | func tokenDecode(tokenString string) (appid string, err error) { 27 | token, err := jwt.Parse(tokenString, func(token *jwt.Token) (interface{}, error) { 28 | return "", nil 29 | }) 30 | 31 | if claims, ok := token.Claims.(jwt.MapClaims); ok && token.Valid { 32 | return claims["appid"].(string), nil 33 | } 34 | 35 | return "", errInvalidToken 36 | } 37 | -------------------------------------------------------------------------------- /cmd/kateway/gateway/jwt_test.go: -------------------------------------------------------------------------------- 1 | package gateway 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/funkygao/assert" 7 | ) 8 | 9 | func TestJwtToken(t *testing.T) { 10 | t.SkipNow() 11 | 12 | token, err := jwtToken("appid", "secret") 13 | assert.Equal(t, nil, err) 14 | assert.Equal(t, "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJhcHBpZCI6ImFwcGlkIn0.YaURi84SXE2SYFgVnJzN8MW5PdN2xgHRqdNzPF_-usY", token) 15 | 16 | appid, err := tokenDecode(token) 17 | assert.Equal(t, nil, err) 18 | assert.Equal(t, "exp", appid) 19 | } 20 | -------------------------------------------------------------------------------- /cmd/kateway/gateway/metrics_test.go: -------------------------------------------------------------------------------- 1 | package gateway 2 | 3 | import ( 4 | "testing" 5 | "time" 6 | 7 | "github.com/funkygao/gafka/ctx" 8 | log "github.com/funkygao/log4go" 9 | ) 10 | 11 | func init() { 12 | ctx.LoadFromHome() 13 | log.Disable() 14 | } 15 | 16 | func BenchmarkMetricsCounterWithoutLock(b *testing.B) { 17 | s := NewServerMetrics(time.Hour, nil) 18 | for i := 0; i < b.N; i++ { 19 | s.TotalConns.Inc(1) 20 | } 21 | } 22 | 23 | func BenchmarkMetricsPubTryQps(b *testing.B) { 24 | p := NewPubMetrics(nil) 25 | for i := 0; i < b.N; i++ { 26 | p.PubTryQps.Mark(1) 27 | } 28 | } 29 | 30 | func BenchmarkMetricsPubOkCounter(b *testing.B) { 31 | p := NewPubMetrics(nil) 32 | for i := 0; i < b.N; i++ { 33 | p.PubOk("appid", "topic", "ver") 34 | } 35 | } 36 | 37 | func BenchmarkMetricsQpsMeter(b *testing.B) { 38 | p := NewPubMetrics(nil) 39 | for i := 0; i < b.N; i++ { 40 | p.PubQps.Mark(1) 41 | } 42 | } 43 | 44 | func BenchmarkMetricsLatencyHistogram(b *testing.B) { 45 | p := NewPubMetrics(nil) 46 | for i := 0; i < b.N; i++ { 47 | p.PubLatency.Update(5) 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /cmd/kateway/gateway/middleware_fast.go: -------------------------------------------------------------------------------- 1 | // +build fasthttp 2 | 3 | package gateway 4 | -------------------------------------------------------------------------------- /cmd/kateway/gateway/middleware_test.go: -------------------------------------------------------------------------------- 1 | package gateway 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/funkygao/gafka/mpool" 7 | ) 8 | 9 | // 764 ns/op 96 B/op 4 allocs/op 10 | func BenchmarkBuildCommonLogLine(b *testing.B) { 11 | gw := &Gateway{} 12 | r, err := mockHttpRequest() 13 | if err != nil { 14 | b.Fatal(err) 15 | } 16 | 17 | b.ReportAllocs() 18 | for i := 0; i < b.N; i++ { 19 | buf := mpool.AccessLogLineBufferGet()[0:] 20 | gw.buildCommonLogLine(buf, r, 200, 100) 21 | mpool.AccessLogLineBufferPut(buf) 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /cmd/kateway/gateway/server.go: -------------------------------------------------------------------------------- 1 | package gateway 2 | 3 | import ( 4 | "crypto/tls" 5 | "net" 6 | "net/http" 7 | ) 8 | 9 | type waitExitFunc func(exit <-chan struct{}) 10 | type connStateFunc func(c net.Conn, cs http.ConnState) 11 | type onConnNewFunc func(net.Conn) 12 | type onConnCloseFunc func(net.Conn) 13 | 14 | func setupHttpsListener(listener net.Listener, certFile, keyFile string) (net.Listener, *tls.Config, error) { 15 | cer, err := tls.LoadX509KeyPair(certFile, keyFile) 16 | if err != nil { 17 | return nil, nil, err 18 | } 19 | 20 | config := &tls.Config{ 21 | NextProtos: []string{"http/1.1", "h2"}, 22 | Certificates: []tls.Certificate{cer}, 23 | } 24 | 25 | tlsListener := tls.NewListener(listener, config) 26 | return tlsListener, config, nil 27 | } 28 | -------------------------------------------------------------------------------- /cmd/kateway/gateway/server_man.go: -------------------------------------------------------------------------------- 1 | package gateway 2 | 3 | import ( 4 | "time" 5 | 6 | "github.com/funkygao/golib/ratelimiter" 7 | ) 8 | 9 | // management server 10 | type manServer struct { 11 | *webServer 12 | 13 | throttleAddTopic *ratelimiter.LeakyBuckets 14 | throttleSubStatus *ratelimiter.LeakyBuckets 15 | } 16 | 17 | func newManServer(httpAddr, httpsAddr string, maxClients int, gw *Gateway) *manServer { 18 | this := &manServer{ 19 | webServer: newWebServer("man_server", httpAddr, httpsAddr, maxClients, time.Minute, gw), 20 | throttleAddTopic: ratelimiter.NewLeakyBuckets(60, time.Minute), 21 | throttleSubStatus: ratelimiter.NewLeakyBuckets(60, time.Minute), 22 | } 23 | 24 | return this 25 | } 26 | -------------------------------------------------------------------------------- /cmd/kateway/gateway/server_test.go: -------------------------------------------------------------------------------- 1 | package gateway 2 | 3 | import ( 4 | "sync" 5 | "testing" 6 | ) 7 | 8 | func BenchmarkConcurrentChannel(b *testing.B) { 9 | ch := make(chan struct{}, 500) 10 | go func() { 11 | for { 12 | <-ch 13 | } 14 | }() 15 | 16 | b.RunParallel(func(pb *testing.PB) { 17 | for pb.Next() { 18 | ch <- struct{}{} 19 | } 20 | }) 21 | } 22 | 23 | func BenchmarkConcurrentMutex(b *testing.B) { 24 | var mu sync.Mutex 25 | b.RunParallel(func(pb *testing.PB) { 26 | for pb.Next() { 27 | mu.Lock() 28 | mu.Unlock() 29 | } 30 | }) 31 | } 32 | 33 | func BenchmarkConcurrentRWMutex(b *testing.B) { 34 | var mu sync.RWMutex 35 | b.RunParallel(func(pb *testing.PB) { 36 | for pb.Next() { 37 | mu.RLock() 38 | mu.RUnlock() 39 | } 40 | }) 41 | } 42 | -------------------------------------------------------------------------------- /cmd/kateway/gateway/ulimit_darwin.go: -------------------------------------------------------------------------------- 1 | // +build darwin 2 | 3 | package gateway 4 | 5 | func EnsureServerUlimit() { 6 | checkUlimit(10000) 7 | } 8 | -------------------------------------------------------------------------------- /cmd/kateway/gateway/ulimit_linux.go: -------------------------------------------------------------------------------- 1 | // +build linux 2 | 3 | package gateway 4 | 5 | func EnsureServerUlimit() { 6 | checkUlimit(65535) 7 | } 8 | -------------------------------------------------------------------------------- /cmd/kateway/gateway/ws.go: -------------------------------------------------------------------------------- 1 | package gateway 2 | 3 | import ( 4 | "github.com/gorilla/websocket" 5 | ) 6 | 7 | var ( 8 | upgrader = websocket.Upgrader{ 9 | ReadBufferSize: 1024, 10 | WriteBufferSize: 1024, 11 | } 12 | ) 13 | -------------------------------------------------------------------------------- /cmd/kateway/hh/disk/block_test.go: -------------------------------------------------------------------------------- 1 | package disk 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/funkygao/assert" 7 | ) 8 | 9 | func TestBlockBasic(t *testing.T) { 10 | b := block{ 11 | magic: currentMagic, 12 | key: []byte("abc"), 13 | value: []byte("12345678"), 14 | } 15 | 16 | assert.Equal(t, uint32(3), b.keyLen()) 17 | assert.Equal(t, uint32(8), b.valueLen()) 18 | } 19 | 20 | func TestBlockReadWrite(t *testing.T) { 21 | t.SkipNow() 22 | } 23 | -------------------------------------------------------------------------------- /cmd/kateway/hh/disk/config.go: -------------------------------------------------------------------------------- 1 | package disk 2 | 3 | import ( 4 | "errors" 5 | "time" 6 | ) 7 | 8 | type Config struct { 9 | Dirs []string 10 | PurgeInterval time.Duration 11 | MaxAge time.Duration 12 | } 13 | 14 | func DefaultConfig() *Config { 15 | return &Config{ 16 | PurgeInterval: defaultPurgeInterval, 17 | MaxAge: defaultMaxAge, 18 | } 19 | } 20 | 21 | func (this *Config) Validate() error { 22 | if len(this.Dirs) == 0 { 23 | return errors.New("hh Dirs must be specified") 24 | } 25 | 26 | return nil 27 | } 28 | -------------------------------------------------------------------------------- /cmd/kateway/hh/disk/disk_test.go: -------------------------------------------------------------------------------- 1 | package disk 2 | 3 | import ( 4 | "fmt" 5 | "os" 6 | "testing" 7 | 8 | "github.com/funkygao/assert" 9 | ) 10 | 11 | func TestConfigValidate(t *testing.T) { 12 | cfg := DefaultConfig() 13 | assert.NotEqual(t, nil, cfg.Validate()) 14 | } 15 | 16 | func TestServiceNextBaseDir(t *testing.T) { 17 | cfg := DefaultConfig() 18 | cfg.Dirs = []string{"a", "b", "c"} 19 | defer func() { 20 | for _, dir := range cfg.Dirs { 21 | os.RemoveAll(dir) 22 | } 23 | }() 24 | 25 | s := New(cfg) 26 | assert.Equal(t, nil, s.Start()) 27 | s.Append("c1", "t1", []byte("key"), []byte("value")) 28 | for i := 0; i < 10; i++ { 29 | s.Append(fmt.Sprintf("c%d", i), "t1", []byte("key"), []byte("value")) 30 | 31 | t.Logf("next dir: %s", s.(*Service).nextBaseDir()) 32 | } 33 | 34 | s.Stop() 35 | } 36 | -------------------------------------------------------------------------------- /cmd/kateway/hh/disk/doc.go: -------------------------------------------------------------------------------- 1 | // Package disk implements a disk-backend hinted handoff which 2 | // uses raft for replication. 3 | package disk 4 | -------------------------------------------------------------------------------- /cmd/kateway/hh/disk/errors.go: -------------------------------------------------------------------------------- 1 | package disk 2 | 3 | import ( 4 | "fmt" 5 | ) 6 | 7 | var ( 8 | ErrNotOpen = fmt.Errorf("service not open") 9 | ErrQueueNotOpen = fmt.Errorf("queue not open") 10 | ErrQueueOpen = fmt.Errorf("queue is open") 11 | ErrQueueFull = fmt.Errorf("queue is full") 12 | ErrSegmentNotOpen = fmt.Errorf("segment not open") 13 | ErrSegmentCorrupt = fmt.Errorf("segment file corrupted") 14 | ErrSegmentFull = fmt.Errorf("segment is full") 15 | ErrEOQ = fmt.Errorf("end of queue") 16 | ErrCursorNotFound = fmt.Errorf("cursor not found") 17 | ErrCursorOutOfRange = fmt.Errorf("cursor out of range") 18 | ErrHeadIsTail = fmt.Errorf("head is tail") 19 | ) 20 | -------------------------------------------------------------------------------- /cmd/kateway/hh/disk/globals.go: -------------------------------------------------------------------------------- 1 | package disk 2 | 3 | import ( 4 | "time" 5 | 6 | "github.com/funkygao/golib/timewheel" 7 | log "github.com/funkygao/log4go" 8 | ) 9 | 10 | const ( 11 | cursorFile = "cursor.dmp" 12 | 13 | defaultSegmentSize = 100 << 20 // if each block=1k, can hold up to 100k blocks 14 | maxBlockSize = 1 << 20 15 | 16 | defaultPurgeInterval = time.Minute * 10 17 | defaultMaxAge = time.Hour * 24 * 7 18 | initialBackoff = time.Second 19 | maxBackoff = time.Second * 31 20 | defaultMaxQueueSize = -1 // unlimited 21 | defaultMaxRetries = 5 22 | flusherMaxRetries = 3 23 | pollSleep = time.Second 24 | dumpPerBlocks = 100 25 | ) 26 | 27 | var ( 28 | DisableBufio = true 29 | Auditor *log.Logger 30 | 31 | currentMagic = [2]byte{0, 0} 32 | 33 | timer *timewheel.TimeWheel 34 | 35 | // group commit 36 | flushEveryBlocks = 100 37 | flushInterval = time.Second 38 | ) 39 | -------------------------------------------------------------------------------- /cmd/kateway/hh/disk/housekeep.go: -------------------------------------------------------------------------------- 1 | package disk 2 | 3 | import ( 4 | "time" 5 | 6 | log "github.com/funkygao/log4go" 7 | ) 8 | 9 | func (q *queue) housekeeping() { 10 | defer func() { 11 | log.Trace("queue[%s] housekeeping done", q.ident()) 12 | q.wg.Done() 13 | }() 14 | 15 | log.Trace("queue[%s] start housekeeping...", q.ident()) 16 | 17 | purgeTick := time.NewTicker(q.purgeInterval) 18 | defer purgeTick.Stop() 19 | 20 | cursorChkpnt := time.NewTicker(time.Second) 21 | defer cursorChkpnt.Stop() 22 | 23 | for { 24 | select { 25 | case <-purgeTick.C: 26 | if err := q.Purge(); err != nil { 27 | log.Error("queue[%s] purge: %s", q.ident(), err) 28 | } 29 | 30 | case <-cursorChkpnt.C: 31 | if err := q.cursor.dump(); err != nil { 32 | log.Error("queue[%s] cursor checkpoint: %s", q.ident(), err) 33 | } 34 | 35 | case <-q.quit: 36 | return 37 | } 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /cmd/kateway/hh/disk/index.go: -------------------------------------------------------------------------------- 1 | package disk 2 | 3 | // index is a memory only heap struct which is rebuilt on boot. 4 | type index struct { 5 | ctx *queue 6 | } 7 | 8 | func newIndex(ctx *queue) *index { 9 | return &index{ctx: ctx} 10 | } 11 | -------------------------------------------------------------------------------- /cmd/kateway/hh/disk/replicator.go: -------------------------------------------------------------------------------- 1 | package disk 2 | 3 | type replicator struct { 4 | } 5 | -------------------------------------------------------------------------------- /cmd/kateway/hh/disk/segment_test.go: -------------------------------------------------------------------------------- 1 | package disk 2 | 3 | import ( 4 | "os" 5 | "testing" 6 | 7 | "github.com/funkygao/assert" 8 | ) 9 | 10 | func TestSegment(t *testing.T) { 11 | path := "/Users/funky/gopkg/src/github.com/funkygao/gafka/cmd/kateway/hh/disk/segment.001" 12 | defer os.Remove(path) 13 | 14 | s, err := newSegment(1, path, 2<<20) 15 | assert.Equal(t, nil, err) 16 | b := &block{ 17 | key: []byte("hello"), 18 | value: []byte("world"), 19 | } 20 | s.Append(b) 21 | s.flush() 22 | 23 | t.Logf("%s", s.wfile.Name()) 24 | 25 | s.Seek(0) 26 | b1 := new(block) 27 | s.ReadOne(b1) 28 | assert.Equal(t, "hello", string(b1.key)) 29 | assert.Equal(t, "world", string(b1.value)) 30 | 31 | } 32 | -------------------------------------------------------------------------------- /cmd/kateway/hh/disk/types.go: -------------------------------------------------------------------------------- 1 | package disk 2 | 3 | import ( 4 | "path/filepath" 5 | ) 6 | 7 | type clusterTopic struct { 8 | cluster, topic string 9 | } 10 | 11 | func (ct clusterTopic) ClusterDir(base string) string { 12 | return filepath.Join(base, ct.cluster) 13 | } 14 | 15 | func (ct clusterTopic) TopicDir(base string) string { 16 | return filepath.Join(base, ct.cluster, ct.topic) 17 | } 18 | -------------------------------------------------------------------------------- /cmd/kateway/hh/disk/types_test.go: -------------------------------------------------------------------------------- 1 | package disk 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/funkygao/assert" 7 | ) 8 | 9 | func TestClusterTopicDir(t *testing.T) { 10 | ct := clusterTopic{"cluster", "topic"} 11 | assert.Equal(t, "/var/cluster", ct.ClusterDir("/var")) 12 | assert.Equal(t, "/cluster/topic", ct.TopicDir("/")) 13 | } 14 | -------------------------------------------------------------------------------- /cmd/kateway/hh/disk/util.go: -------------------------------------------------------------------------------- 1 | package disk 2 | 3 | import ( 4 | "os" 5 | 6 | gio "github.com/funkygao/golib/io" 7 | ) 8 | 9 | func mkdirIfNotExist(dir string) (err error) { 10 | if gio.DirExists(dir) { 11 | return 12 | } 13 | 14 | err = os.MkdirAll(dir, 0700) 15 | return 16 | } 17 | -------------------------------------------------------------------------------- /cmd/kateway/hh/disk/util_test.go: -------------------------------------------------------------------------------- 1 | package disk 2 | 3 | import ( 4 | "os" 5 | "testing" 6 | 7 | "github.com/funkygao/assert" 8 | ) 9 | 10 | func TestMkdirIfNotExist(t *testing.T) { 11 | dir := "xxx" 12 | defer os.Remove(dir) 13 | 14 | assert.Equal(t, nil, mkdirIfNotExist(dir)) 15 | assert.Equal(t, nil, mkdirIfNotExist(dir)) 16 | } 17 | -------------------------------------------------------------------------------- /cmd/kateway/hh/dummy/dummy.go: -------------------------------------------------------------------------------- 1 | package dummy 2 | 3 | import ( 4 | "github.com/funkygao/gafka/cmd/kateway/hh" 5 | ) 6 | 7 | var _ hh.Service = &dummyStore{} 8 | 9 | type dummyStore struct { 10 | } 11 | 12 | func New() hh.Service { 13 | return &dummyStore{} 14 | } 15 | 16 | func (this *dummyStore) Start() (err error) { 17 | return 18 | } 19 | 20 | func (this *dummyStore) Stop() {} 21 | 22 | func (this *dummyStore) Name() string { 23 | return "dummy" 24 | } 25 | 26 | func (this *dummyStore) Append(cluster, topic string, key, value []byte) error { 27 | return nil 28 | } 29 | 30 | func (this *dummyStore) Empty(cluster, topic string) bool { 31 | return true 32 | } 33 | 34 | func (this *dummyStore) FlushInflights() {} 35 | 36 | func (this *dummyStore) Inflights() int64 { 37 | return 0 38 | } 39 | 40 | func (this *dummyStore) AppendN() int64 { 41 | return 0 42 | } 43 | 44 | func (this *dummyStore) DeliverN() int64 { 45 | return 0 46 | } 47 | 48 | func (this *dummyStore) ResetCounters() {} 49 | -------------------------------------------------------------------------------- /cmd/kateway/hh/kafka/doc.go: -------------------------------------------------------------------------------- 1 | // Package kafka implements a kafka-backend hinted handoff. 2 | // 3 | // When pub fails, kafka hinted handoff will publish to another 4 | // cluster, and it continuously consumes the handoff cluster and 5 | // pub to the original cluster. 6 | package kafka 7 | -------------------------------------------------------------------------------- /cmd/kateway/hh/kafka/service.go: -------------------------------------------------------------------------------- 1 | package kafka 2 | 3 | type Service struct { 4 | } 5 | 6 | func New() *Service { 7 | return &Service{} 8 | } 9 | -------------------------------------------------------------------------------- /cmd/kateway/hh/mysql/service.go: -------------------------------------------------------------------------------- 1 | package mysql 2 | 3 | type Service struct { 4 | } 5 | -------------------------------------------------------------------------------- /cmd/kateway/hh/service.go: -------------------------------------------------------------------------------- 1 | // Package hh provides a hinted handoff service for Pub. 2 | // 3 | // Hinted handoff is helpful for quick recovery from short term outages like 4 | // server restarts or rebalancing. 5 | package hh 6 | 7 | type Service interface { 8 | 9 | // Start the hinted handoff service. 10 | Start() error 11 | 12 | // Stop the hinted handoff service. 13 | Stop() 14 | 15 | // Name returns the underlying implementation name. 16 | Name() string 17 | 18 | // Append add key/value byte slice to end of the buffer. 19 | Append(cluster, topic string, key, value []byte) error 20 | 21 | // Empty returns whether the buffer has no inflight entries. 22 | Empty(cluster, topic string) bool 23 | 24 | // FlushInflights flush all inflight entries inside buffer to final message storage. 25 | FlushInflights() 26 | 27 | // Inflights() returns all queues inflights messages count total. 28 | Inflights() int64 29 | 30 | // AppendN returns all queues successfully appended messages count total. 31 | AppendN() int64 32 | 33 | // DeliverN returns all queues successfully delivered messages count total. 34 | DeliverN() int64 35 | 36 | // ResetCounters reset AppendN and DeliverN to 0. 37 | ResetCounters() 38 | } 39 | 40 | var Default Service 41 | -------------------------------------------------------------------------------- /cmd/kateway/inflight/doc.go: -------------------------------------------------------------------------------- 1 | // Deprecated pkg 2 | // Package inflight provides storage for manipulating inflight 3 | // message offsets. 4 | // 5 | // server client 6 | // | | 7 | // | Sub | 8 | // |<-------------------| 9 | // | | 10 | // | Ok/TakeOff(1) | 11 | // |------------------->| 12 | // | | 13 | // | Sub/Land(1) | 14 | // |<-------------------| 15 | // | | 16 | // | Ok/TakeOff(2) | 17 | // |------------------->| 18 | // | | 19 | // | Sub/Land(2) | 20 | // |<-------------------| 21 | // | | 22 | package inflight 23 | -------------------------------------------------------------------------------- /cmd/kateway/inflight/errors.go: -------------------------------------------------------------------------------- 1 | package inflight 2 | 3 | import ( 4 | "errors" 5 | ) 6 | 7 | var ( 8 | ErrOutOfOrder = errors.New("out of order inflight offset") 9 | ) 10 | -------------------------------------------------------------------------------- /cmd/kateway/inflight/inflight.go: -------------------------------------------------------------------------------- 1 | package inflight 2 | 3 | type Inflight interface { 4 | Land(cluster, topic, group, partition string, offset int64) error 5 | 6 | LandX(cluster, topic, group, partition string, offset int64) ([]byte, error) 7 | 8 | TakeOff(cluster, topic, group, partition string, offset int64, msg []byte) error 9 | 10 | Init() error 11 | Stop() error 12 | } 13 | 14 | var Default Inflight 15 | -------------------------------------------------------------------------------- /cmd/kateway/job/dummy/job.go: -------------------------------------------------------------------------------- 1 | package dummy 2 | 3 | import ( 4 | "github.com/funkygao/gafka/cmd/kateway/job" 5 | ) 6 | 7 | type dummy struct{} 8 | 9 | func New() job.JobStore { 10 | return &dummy{} 11 | } 12 | 13 | func (this *dummy) Add(appid, topic string, payload []byte, due int64) (jobId string, err error) { 14 | return 15 | } 16 | 17 | func (this *dummy) Delete(appid, topic, jobId string) (err error) { 18 | return 19 | } 20 | 21 | func (this *dummy) CreateJobQueue(shardId int, appid, topic string) (err error) { 22 | return 23 | } 24 | 25 | func (this *dummy) Name() string { 26 | return "dummy" 27 | } 28 | 29 | func (this *dummy) Start() error { 30 | return nil 31 | } 32 | 33 | func (this *dummy) Stop() {} 34 | -------------------------------------------------------------------------------- /cmd/kateway/job/errors.go: -------------------------------------------------------------------------------- 1 | package job 2 | 3 | import "errors" 4 | 5 | var ( 6 | ErrNothingDeleted = errors.New("nothing deleted") 7 | ) 8 | -------------------------------------------------------------------------------- /cmd/kateway/job/model.go: -------------------------------------------------------------------------------- 1 | // Package job implements the schedulable message(job) underlying storage. 2 | package job 3 | 4 | import ( 5 | "fmt" 6 | ) 7 | 8 | type JobItem struct { 9 | JobId int64 10 | Payload []byte 11 | Ctime int64 12 | DueTime int64 13 | } 14 | 15 | func (this JobItem) String() string { 16 | return fmt.Sprintf("{%d:%d %s}", this.JobId, this.DueTime, string(this.Payload)) 17 | } 18 | 19 | func (this JobItem) PayloadString(limit int) string { 20 | if limit > 0 && len(this.Payload) > limit { 21 | return string(this.Payload[:limit+1]) 22 | } 23 | 24 | return string(this.Payload) 25 | } 26 | -------------------------------------------------------------------------------- /cmd/kateway/job/mysql/db.sql: -------------------------------------------------------------------------------- 1 | 2 | CREATE TABLE IF NOT EXISTS AppLookup ( 3 | entityId bigint unsigned NOT NULL DEFAULT 0, 4 | shardId mediumint unsigned NOT NULL DEFAULT 0, 5 | name varchar(64) NOT NULL DEFAULT "", 6 | shardLock tinyint unsigned NOT NULL DEFAULT 0, 7 | ctime timestamp NOT NULL DEFAULT 0, 8 | mtime timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, 9 | PRIMARY KEY (entityId) 10 | ) ENGINE = INNODB DEFAULT CHARSET=utf8; 11 | 12 | INSERT IGNORE INTO AppLookup(entityId, shardId, name, shardLock, ctime) VALUES(65601907, 1, "app1", 0, now()); 13 | 14 | -------------------------------------------------------------------------------- /cmd/kateway/job/mysql/doc.go: -------------------------------------------------------------------------------- 1 | // Package mysql implements a job store with mysql as backend. 2 | package mysql 3 | -------------------------------------------------------------------------------- /cmd/kateway/job/mysql/idgen.go: -------------------------------------------------------------------------------- 1 | package mysql 2 | 3 | import ( 4 | "time" 5 | 6 | "github.com/funkygao/golib/idgen" 7 | log "github.com/funkygao/log4go" 8 | ) 9 | 10 | func (this *mysqlStore) nextId() int64 { 11 | for { 12 | id, err := this.idgen.Next() 13 | if err != nil { 14 | if err == idgen.ErrorClockBackwards { 15 | log.Warn("%s, sleep 50ms", err) 16 | 17 | time.Sleep(time.Millisecond * 50) 18 | continue 19 | } else { 20 | // should never happen 21 | panic(err) 22 | } 23 | } 24 | 25 | return id 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /cmd/kateway/job/mysql/util.go: -------------------------------------------------------------------------------- 1 | package mysql 2 | 3 | import ( 4 | "hash/adler32" 5 | "strings" 6 | ) 7 | 8 | const jobTablePrefix = "job_" 9 | 10 | // JobTable converts a topic name to a mysql table name. 11 | func JobTable(topic string) string { 12 | return jobTablePrefix + strings.Replace(topic, ".", "_", -1) 13 | } 14 | 15 | // HistoryTable converts a topic name to a mysql history table name. 16 | func HistoryTable(topic string) string { 17 | return JobTable(topic) + "_archive" 18 | } 19 | 20 | // App_id convert a string appid to hash int which is used to locate shard. 21 | func App_id(appid string) int { 22 | return int(adler32.Checksum([]byte(appid))) 23 | } 24 | -------------------------------------------------------------------------------- /cmd/kateway/job/mysql/util_test.go: -------------------------------------------------------------------------------- 1 | package mysql 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/funkygao/assert" 7 | ) 8 | 9 | func TestAppId(t *testing.T) { 10 | assert.Equal(t, 65601907, App_id("app1")) 11 | } 12 | 13 | func TestJobTable(t *testing.T) { 14 | assert.Equal(t, "job_app1_foobar_v1", JobTable("app1.foobar.v1")) 15 | assert.Equal(t, "job_app1_foobar_v1_34", JobTable("app1.foobar.v1.34")) 16 | assert.Equal(t, "job_app1_foobar_v1_34_archive", HistoryTable("app1.foobar.v1.34")) 17 | } 18 | -------------------------------------------------------------------------------- /cmd/kateway/job/store.go: -------------------------------------------------------------------------------- 1 | // Package job implements the schedulable message(job) underlying storage. 2 | package job 3 | 4 | // JobStore is the backend storage layer for jobs(schedulable message). 5 | type JobStore interface { 6 | 7 | // Name returns the underlying storage name. 8 | Name() string 9 | 10 | Start() error 11 | Stop() 12 | 13 | // CreateJobQueue creates a storage container where jobs will persist. 14 | CreateJobQueue(shardId int, appid, topic string) (err error) 15 | 16 | // Add pubs a schedulable message(job) synchronously. 17 | Add(appid, topic string, payload []byte, due int64) (jobId string, err error) 18 | 19 | // Delete removes a job by jobId. 20 | Delete(appid, topic, jobId string) (err error) 21 | } 22 | 23 | var Default JobStore 24 | -------------------------------------------------------------------------------- /cmd/kateway/logo.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | const ( 4 | logo = ` 5 | _/ _/ _/ 6 | _/ _/ _/_/_/ _/_/_/_/ _/_/ _/ _/ _/ _/_/_/ _/ _/ 7 | _/_/ _/ _/ _/ _/_/_/_/ _/ _/ _/ _/ _/ _/ _/ 8 | _/ _/ _/ _/ _/ _/ _/ _/ _/ _/ _/ _/ _/ _/ 9 | _/ _/ _/_/_/ _/_/ _/_/_/ _/ _/ _/_/_/ _/_/_/ 10 | _/ 11 | _/_/ 12 | ` 13 | ) 14 | -------------------------------------------------------------------------------- /cmd/kateway/manager/doc.go: -------------------------------------------------------------------------------- 1 | // Package manager bridges data with pubsub manager. 2 | package manager 3 | -------------------------------------------------------------------------------- /cmd/kateway/manager/errors.go: -------------------------------------------------------------------------------- 1 | package manager 2 | 3 | import ( 4 | "errors" 5 | ) 6 | 7 | var ( 8 | ErrDisabledTopic = errors.New("pub to a disabled topic not allowed") 9 | ErrEmptyIdentity = errors.New("auth with empty identity or key") 10 | ErrAuthenticationFail = errors.New("authentication fails") 11 | ErrAuthorizationFail = errors.New("authorization fails") 12 | ErrInvalidGroup = errors.New("group must be registered before usage") 13 | ErrSchemaNotFound = errors.New("schema not found") 14 | ) 15 | -------------------------------------------------------------------------------- /cmd/kateway/manager/mysql/config.go: -------------------------------------------------------------------------------- 1 | package mysql 2 | 3 | import ( 4 | "time" 5 | ) 6 | 7 | type config struct { 8 | Zone string 9 | Refresh time.Duration 10 | } 11 | 12 | func DefaultConfig(zone string) *config { 13 | return &config{ 14 | Zone: zone, 15 | Refresh: time.Minute * 5, 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /cmd/kateway/manager/mysql/schema.go: -------------------------------------------------------------------------------- 1 | package mysql 2 | 3 | type applicationRecord struct { 4 | AppId, Cluster, AppSecret string 5 | } 6 | 7 | type appTopicRecord struct { 8 | AppId, TopicName, Status string 9 | } 10 | 11 | type appSubscribeRecord struct { 12 | AppId, TopicName string 13 | } 14 | 15 | type appConsumerGroupRecord struct { 16 | AppId, GroupName string 17 | } 18 | 19 | type shadowQueueRecord struct { 20 | HisAppId, TopicName, Ver string 21 | MyAppid, Group string 22 | } 23 | 24 | type deadPartitionRecord struct { 25 | KafkaTopic string 26 | PartitionId int32 27 | } 28 | 29 | type topicSchemaRecord struct { 30 | AppId, TopicName, Ver string 31 | Schema string 32 | } 33 | -------------------------------------------------------------------------------- /cmd/kateway/manager/open/config.go: -------------------------------------------------------------------------------- 1 | package open 2 | 3 | import ( 4 | "time" 5 | ) 6 | 7 | type config struct { 8 | Zone string 9 | Refresh time.Duration 10 | } 11 | 12 | func DefaultConfig(zone string) *config { 13 | return &config{ 14 | Zone: zone, 15 | Refresh: time.Minute * 5, 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /cmd/kateway/manager/open/schema.go: -------------------------------------------------------------------------------- 1 | package open 2 | 3 | type applicationRecord struct { 4 | AppId, Cluster, AppSecret string 5 | } 6 | 7 | type appTopicRecord struct { 8 | AppId, TopicName, Status string 9 | } 10 | 11 | type appSubscribeRecord struct { 12 | AppId, TopicName string 13 | } 14 | 15 | type appConsumerGroupRecord struct { 16 | AppId, GroupName string 17 | } 18 | 19 | type shadowQueueRecord struct { 20 | HisAppId, TopicName, Ver string 21 | MyAppid, Group string 22 | } 23 | 24 | type deadPartitionRecord struct { 25 | KafkaTopic string 26 | PartitionId int32 27 | } 28 | 29 | type topicSchemaRecord struct { 30 | AppId, TopicName, Ver string 31 | Schema string 32 | } 33 | -------------------------------------------------------------------------------- /cmd/kateway/meta/errors.go: -------------------------------------------------------------------------------- 1 | package meta 2 | 3 | import ( 4 | "errors" 5 | ) 6 | 7 | var ( 8 | ErrInvalidCluster = errors.New("invalid cluster") 9 | ) 10 | -------------------------------------------------------------------------------- /cmd/kateway/meta/meta.go: -------------------------------------------------------------------------------- 1 | // Package meta manages the global topology information. 2 | package meta 3 | 4 | import ( 5 | "github.com/funkygao/gafka/zk" 6 | ) 7 | 8 | // MetaStore is a generic storage that fetches topology meta data. 9 | type MetaStore interface { 10 | Name() string 11 | 12 | Start() 13 | Stop() 14 | 15 | // RefreshEvent is fired whenever meta data is refreshed. 16 | RefreshEvent() <-chan struct{} 17 | 18 | ZkCluster(cluster string) *zk.ZkCluster 19 | 20 | // ClusterNames returns all live cluster names within the current zone. 21 | ClusterNames() []string 22 | 23 | // AssignClusters is director of cluster distribution. 24 | AssignClusters() []map[string]string 25 | 26 | ZkAddrs() []string 27 | ZkChroot(cluster string) string 28 | 29 | // BrokerList returns the live brokers address list. 30 | BrokerList(cluster string) []string 31 | } 32 | 33 | var Default MetaStore 34 | -------------------------------------------------------------------------------- /cmd/kateway/meta/zkmeta/config.go: -------------------------------------------------------------------------------- 1 | package zkmeta 2 | 3 | import ( 4 | "time" 5 | ) 6 | 7 | type config struct { 8 | Refresh time.Duration 9 | } 10 | 11 | func DefaultConfig() *config { 12 | return &config{ 13 | Refresh: time.Minute * 10, 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /cmd/kateway/meta/zkmeta/director.go: -------------------------------------------------------------------------------- 1 | package zkmeta 2 | 3 | import ( 4 | "github.com/funkygao/gafka/zk" 5 | ) 6 | 7 | // Distribution of underly store clusters to multi-tenants. 8 | func (this *zkMetaStore) AssignClusters() []map[string]string { 9 | r := make([]map[string]string, 0) 10 | 11 | this.mu.RLock() 12 | defer this.mu.RUnlock() 13 | 14 | this.zkzone.ForSortedClusters(func(zkcluster *zk.ZkCluster) { 15 | info := zkcluster.RegisteredInfo() 16 | if !info.Public || info.Nickname == "" { 17 | // ignored for kateway manager 18 | return 19 | } 20 | 21 | c := make(map[string]string) 22 | c["name"] = info.Name() 23 | c["nickname"] = info.Nickname 24 | r = append(r, c) 25 | }) 26 | return r 27 | } 28 | -------------------------------------------------------------------------------- /cmd/kateway/meta/zkmeta/errors.go: -------------------------------------------------------------------------------- 1 | package zkmeta 2 | 3 | import ( 4 | "errors" 5 | ) 6 | 7 | var ( 8 | ErrZkBroken = errors.New("zk connection might be broken cause no clusters found") 9 | ) 10 | -------------------------------------------------------------------------------- /cmd/kateway/meta/zkmeta/zk_test.go: -------------------------------------------------------------------------------- 1 | package zkmeta 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/funkygao/assert" 7 | "github.com/funkygao/gafka/ctx" 8 | "github.com/funkygao/gafka/zk" 9 | ) 10 | 11 | func init() { 12 | ctx.LoadFromHome() 13 | } 14 | 15 | func TestAll(t *testing.T) { 16 | zkzone := zk.NewZkZone(zk.DefaultConfig(ctx.DefaultZone(), ctx.ZoneZkAddrs(ctx.DefaultZone()))) 17 | defer zkzone.Close() 18 | z := New(DefaultConfig(), zkzone) 19 | z.Start() 20 | 21 | assert.Equal(t, "/kafka_pubsub", z.ZkChroot("me")) 22 | assert.Equal(t, []string{"localhost:2181"}, z.ZkAddrs()) 23 | 24 | t.Logf("%+v", z.BrokerList("me")) 25 | t.Logf("%+v", z.ZkCluster("me")) 26 | } 27 | -------------------------------------------------------------------------------- /cmd/kateway/store/dummy/dumb_test.go: -------------------------------------------------------------------------------- 1 | package dummy 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/funkygao/assert" 7 | ) 8 | 9 | func TestPubName(t *testing.T) { 10 | p := NewPubStore(false) 11 | assert.Equal(t, "dummy", p.Name()) 12 | } 13 | 14 | func TestSubName(t *testing.T) { 15 | s := NewSubStore(nil, false) 16 | assert.Equal(t, "dummy", s.Name()) 17 | } 18 | -------------------------------------------------------------------------------- /cmd/kateway/store/dummy/fetcher.go: -------------------------------------------------------------------------------- 1 | package dummy 2 | 3 | import ( 4 | "github.com/Shopify/sarama" 5 | ) 6 | 7 | type consumerFetcher struct { 8 | ch chan *sarama.ConsumerMessage 9 | } 10 | 11 | func (this *consumerFetcher) Messages() <-chan *sarama.ConsumerMessage { 12 | return this.ch 13 | } 14 | 15 | func (this *consumerFetcher) Errors() <-chan *sarama.ConsumerError { 16 | return nil 17 | } 18 | 19 | func (this *consumerFetcher) CommitUpto(*sarama.ConsumerMessage) error { 20 | return nil 21 | } 22 | 23 | func (this *consumerFetcher) Close() error { 24 | return nil 25 | } 26 | -------------------------------------------------------------------------------- /cmd/kateway/store/dummy/pub.go: -------------------------------------------------------------------------------- 1 | package dummy 2 | 3 | type pubStore struct { 4 | } 5 | 6 | func NewPubStore(debug bool) *pubStore { 7 | return &pubStore{} 8 | } 9 | 10 | func (this *pubStore) Start() (err error) { 11 | return 12 | } 13 | 14 | func (this *pubStore) Stop() {} 15 | 16 | func (this *pubStore) Name() string { 17 | return "dummy" 18 | } 19 | 20 | func (this *pubStore) IsSystemError(error) bool { 21 | return false 22 | } 23 | 24 | func (this *pubStore) SyncAllPub(cluster string, topic string, key, 25 | msg []byte) (partition int32, offset int64, err error) { 26 | return 27 | } 28 | 29 | func (this *pubStore) SyncPub(cluster string, topic string, key, 30 | msg []byte) (partition int32, offset int64, err error) { 31 | return 32 | } 33 | 34 | func (this *pubStore) AsyncPub(cluster string, topic string, key, 35 | msg []byte) (partition int32, offset int64, err error) { 36 | 37 | return 38 | } 39 | -------------------------------------------------------------------------------- /cmd/kateway/store/dummy/sub.go: -------------------------------------------------------------------------------- 1 | package dummy 2 | 3 | import ( 4 | "github.com/Shopify/sarama" 5 | "github.com/funkygao/gafka/cmd/kateway/store" 6 | ) 7 | 8 | type subStore struct { 9 | fetcher *consumerFetcher 10 | } 11 | 12 | func NewSubStore(closedConnCh <-chan string, debug bool) *subStore { 13 | return &subStore{ 14 | fetcher: &consumerFetcher{ 15 | ch: make(chan *sarama.ConsumerMessage, 1000), 16 | }, 17 | } 18 | } 19 | 20 | func (this *subStore) Start() (err error) { 21 | msg := &sarama.ConsumerMessage{ 22 | Topic: "hello", 23 | Key: []byte("world"), 24 | Value: []byte("hello from dummy fetcher"), 25 | } 26 | 27 | go func() { 28 | for { 29 | this.fetcher.ch <- msg 30 | } 31 | }() 32 | 33 | return 34 | } 35 | 36 | func (this *subStore) Stop() {} 37 | 38 | func (this *subStore) Name() string { 39 | return "dummy" 40 | } 41 | 42 | func (this *subStore) IsSystemError(error) bool { 43 | return false 44 | } 45 | 46 | func (this *subStore) Fetch(cluster, topic, group, remoteAddr, realIp, 47 | reset string, permitStandby, mux bool) (store.Fetcher, error) { 48 | return this.fetcher, nil 49 | } 50 | -------------------------------------------------------------------------------- /cmd/kateway/store/errors.go: -------------------------------------------------------------------------------- 1 | package store 2 | 3 | import ( 4 | "errors" 5 | ) 6 | 7 | var ( 8 | ErrShuttingDown = errors.New("server shutting down") 9 | ErrBusy = errors.New("underlying store too busy") 10 | ErrTooManyConsumers = errors.New("consumers more than available partitions") 11 | ErrRebalancing = errors.New("rebalancing, please retry after a while") 12 | ErrInvalidTopic = errors.New("invalid topic") 13 | ErrInvalidCluster = errors.New("invalid cluster") 14 | ErrEmptyBrokers = errors.New("empty active brokers") 15 | ErrCircuitOpen = errors.New("circuit open, underlying store problems") 16 | ) 17 | -------------------------------------------------------------------------------- /cmd/kateway/store/kafka/init.go: -------------------------------------------------------------------------------- 1 | package kafka 2 | 3 | import ( 4 | "github.com/Shopify/sarama" 5 | log "github.com/funkygao/log4go" 6 | ) 7 | 8 | func init() { 9 | sarama.PanicHandler = func(err interface{}) { 10 | log.Warn("sarama got panic: %+v", err) 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /cmd/kateway/store/kafka/msgpool_go10.go: -------------------------------------------------------------------------------- 1 | // +build !go1.3 2 | 3 | package kafka 4 | 5 | import ( 6 | "sync" 7 | 8 | "github.com/Shopify/sarama" 9 | ) 10 | 11 | func pmGet() *sarama.ProducerMessage { 12 | return &sarama.ProducerMessage{} 13 | } 14 | 15 | func pmPut(m *sarama.ProducerMessage) {} 16 | -------------------------------------------------------------------------------- /cmd/kateway/store/kafka/msgpool_go13.go: -------------------------------------------------------------------------------- 1 | // +build go1.3 2 | 3 | package kafka 4 | 5 | import ( 6 | "sync" 7 | 8 | "github.com/Shopify/sarama" 9 | ) 10 | 11 | var ( 12 | // producer message pool 13 | pmPool sync.Pool 14 | ) 15 | 16 | func init() { 17 | pmPool.New = func() interface{} { 18 | return &sarama.ProducerMessage{} 19 | } 20 | } 21 | 22 | func pmGet() *sarama.ProducerMessage { 23 | return pmPool.Get().(*sarama.ProducerMessage) 24 | } 25 | 26 | func pmPut(m *sarama.ProducerMessage) { 27 | pmPool.Put(m) 28 | } 29 | -------------------------------------------------------------------------------- /cmd/kateway/store/kafka/subfetcher.go: -------------------------------------------------------------------------------- 1 | package kafka 2 | 3 | import ( 4 | "github.com/funkygao/kafka-cg/consumergroup" 5 | ) 6 | 7 | type consumerFetcher struct { 8 | *consumergroup.ConsumerGroup 9 | remoteAddr string 10 | store *subStore 11 | } 12 | 13 | func (this *consumerFetcher) Close() error { 14 | return this.store.subManager.killClient(this.remoteAddr) 15 | } 16 | -------------------------------------------------------------------------------- /cmd/kateway/store/kafka/submux_test.go: -------------------------------------------------------------------------------- 1 | package kafka 2 | 3 | import ( 4 | "fmt" 5 | "testing" 6 | 7 | "github.com/funkygao/kafka-cg/consumergroup" 8 | ) 9 | 10 | func TestSubMux(t *testing.T) { 11 | mux := newSubMux() 12 | cg := &consumergroup.ConsumerGroup{} 13 | for i := 0; i < 5; i++ { 14 | mux.register(fmt.Sprintf("127.0.0.1:%d", 10001+i), cg) 15 | } 16 | t.Logf("%+v", mux) 17 | 18 | for i := 0; i < 5; i++ { 19 | r := mux.kill(fmt.Sprintf("127.0.0.1:%d", 10001+i)) 20 | t.Logf("%+v", mux) 21 | if i == 4 && r != true { 22 | t.Fail() 23 | } 24 | } 25 | 26 | } 27 | -------------------------------------------------------------------------------- /cmd/kateway/store/pub.go: -------------------------------------------------------------------------------- 1 | package store 2 | 3 | // A PubStore is a generic store that can Pub sync/async. 4 | type PubStore interface { 5 | // Name returns the name of the underlying store. 6 | Name() string 7 | 8 | Start() error 9 | Stop() 10 | 11 | // SyncPub pub a keyed message to a topic of a cluster synchronously. 12 | SyncPub(cluster, topic string, key, msg []byte) (partition int32, offset int64, err error) 13 | 14 | // SyncAllPub pub a keyed message to all replicas before sending response. 15 | SyncAllPub(cluster, topic string, key, msg []byte) (partition int32, offset int64, err error) 16 | 17 | // AsyncPub pub a keyed message to a topic of a cluster asynchronously. 18 | AsyncPub(cluster, topic string, key, msg []byte) (partition int32, offset int64, err error) 19 | 20 | IsSystemError(error) bool 21 | } 22 | 23 | var DefaultPubStore PubStore 24 | -------------------------------------------------------------------------------- /cmd/kateway/store/sub.go: -------------------------------------------------------------------------------- 1 | package store 2 | 3 | import ( 4 | "github.com/Shopify/sarama" 5 | ) 6 | 7 | // A Fetcher is a generic high level streamed consumer. 8 | type Fetcher interface { 9 | // Messages returns a stream messages being consumed. 10 | Messages() <-chan *sarama.ConsumerMessage 11 | 12 | // Errors returns a stream errors during consuming. 13 | Errors() <-chan *sarama.ConsumerError 14 | 15 | // CommitUpto records the cursor/offset of where messages are consumed. 16 | CommitUpto(*sarama.ConsumerMessage) error 17 | 18 | // Close the Fetcher and do all the cleanups. 19 | Close() error 20 | } 21 | 22 | // A SubStore is a generic data source that can be used to fetch messages. 23 | type SubStore interface { 24 | // Name returns the name of the underlying store. 25 | Name() string 26 | 27 | Start() error 28 | Stop() 29 | 30 | // Fetch returns a Fetcher. 31 | Fetch(cluster, topic, group, remoteAddr, realIp, resetOffset string, permitStandby, mux bool) (Fetcher, error) 32 | 33 | IsSystemError(error) bool 34 | } 35 | 36 | var DefaultSubStore SubStore 37 | -------------------------------------------------------------------------------- /cmd/kateway/structs/types.go: -------------------------------------------------------------------------------- 1 | package structs 2 | 3 | import ( 4 | "fmt" 5 | ) 6 | 7 | type ClusterTopic struct { 8 | Cluster, Topic string 9 | } 10 | 11 | func (ct ClusterTopic) String() string { 12 | return ct.Cluster + "/" + ct.Topic 13 | } 14 | 15 | type AppTopic struct { 16 | AppID, Topic string 17 | } 18 | 19 | func (at AppTopic) String() string { 20 | return at.AppID + "/" + at.Topic 21 | } 22 | 23 | type AppTopicVer struct { 24 | AppID, Topic, Ver string 25 | } 26 | 27 | func (atv AppTopicVer) String() string { 28 | return atv.AppID + "/" + atv.Topic + "/" + atv.Ver 29 | } 30 | 31 | type TopicPartition struct { 32 | Topic string 33 | PartitionID int32 34 | } 35 | 36 | func (tp TopicPartition) String() string { 37 | return fmt.Sprintf("%s/%d", tp.Topic, tp.PartitionID) 38 | } 39 | 40 | type AppGroup struct { 41 | AppID, Group string 42 | } 43 | 44 | func (ag *AppGroup) String() string { 45 | return ag.AppID + "/" + ag.Group 46 | } 47 | 48 | type GroupTopicPartition struct { 49 | Group, Topic, PartitionID string 50 | } 51 | 52 | func (gtp GroupTopicPartition) String() string { 53 | return gtp.Group + "/" + gtp.Topic + "/" + gtp.PartitionID 54 | } 55 | -------------------------------------------------------------------------------- /cmd/kguard/bootstrap.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | // SOS 5 | _ "github.com/funkygao/gafka/cmd/kguard/sos" 6 | 7 | // internal watchers 8 | _ "github.com/funkygao/gafka/cmd/kguard/watchers/actord" 9 | _ "github.com/funkygao/gafka/cmd/kguard/watchers/anomaly" 10 | _ "github.com/funkygao/gafka/cmd/kguard/watchers/external" 11 | _ "github.com/funkygao/gafka/cmd/kguard/watchers/gc" 12 | _ "github.com/funkygao/gafka/cmd/kguard/watchers/haproxy" 13 | _ "github.com/funkygao/gafka/cmd/kguard/watchers/influxdb" 14 | _ "github.com/funkygao/gafka/cmd/kguard/watchers/influxquery" 15 | _ "github.com/funkygao/gafka/cmd/kguard/watchers/kafka" 16 | _ "github.com/funkygao/gafka/cmd/kguard/watchers/kateway" 17 | _ "github.com/funkygao/gafka/cmd/kguard/watchers/redis" 18 | _ "github.com/funkygao/gafka/cmd/kguard/watchers/zk" 19 | _ "github.com/funkygao/gafka/cmd/kguard/watchers/zone" 20 | 21 | // external watchers 22 | _ "github.com/funkygao/dbus/watchers" 23 | ) 24 | -------------------------------------------------------------------------------- /cmd/kguard/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "fmt" 5 | "os" 6 | 7 | "github.com/funkygao/gafka" 8 | "github.com/funkygao/gafka/cmd/kguard/monitor" 9 | ) 10 | 11 | func main() { 12 | for _, arg := range os.Args[1:] { 13 | if arg == "-v" || arg == "-version" { 14 | fmt.Fprintf(os.Stderr, "%s-%s\n", gafka.Version, gafka.BuildId) 15 | return 16 | } 17 | } 18 | 19 | var m monitor.Monitor 20 | m.Init() 21 | m.ServeForever() 22 | } 23 | -------------------------------------------------------------------------------- /cmd/kguard/monitor/alert.go: -------------------------------------------------------------------------------- 1 | package monitor 2 | 3 | import ( 4 | "net/http" 5 | 6 | "github.com/funkygao/httprouter" 7 | ) 8 | 9 | // POST /alertHook 10 | // so that we can auto-fix 11 | func (this *Monitor) alertHookHandler(w http.ResponseWriter, r *http.Request, 12 | params httprouter.Params) { 13 | 14 | } 15 | -------------------------------------------------------------------------------- /cmd/kguard/monitor/context.go: -------------------------------------------------------------------------------- 1 | package monitor 2 | 3 | import ( 4 | "sync" 5 | 6 | "github.com/funkygao/gafka/zk" 7 | ) 8 | 9 | // Context is the context container that will be passed to plugin watchers. 10 | type Context interface { 11 | ZkZone() *zk.ZkZone 12 | StopChan() <-chan struct{} 13 | Inflight() *sync.WaitGroup 14 | InfluxAddr() string 15 | InfluxDB() string 16 | ExternalDir() string 17 | } 18 | -------------------------------------------------------------------------------- /cmd/kguard/monitor/watcher.go: -------------------------------------------------------------------------------- 1 | package monitor 2 | 3 | import ( 4 | "fmt" 5 | ) 6 | 7 | var ( 8 | registeredWatchers = make(map[string]func() Watcher) 9 | ) 10 | 11 | // A Watcher is a plugin of monitor. 12 | type Watcher interface { 13 | Init(Context) 14 | Run() 15 | } 16 | 17 | type Setter interface { 18 | Set(key string) 19 | } 20 | 21 | func RegisterWatcher(name string, factory func() Watcher) { 22 | if _, present := registeredWatchers[name]; present { 23 | panic(fmt.Sprintf("watcher[%s] cannot register twice", name)) 24 | } 25 | 26 | registeredWatchers[name] = factory 27 | } 28 | -------------------------------------------------------------------------------- /cmd/kguard/sos/doc.go: -------------------------------------------------------------------------------- 1 | // Package sos is a web server that accepts SOS message from any gafka components. 2 | package sos 3 | -------------------------------------------------------------------------------- /cmd/kguard/sos/sos.go: -------------------------------------------------------------------------------- 1 | package sos 2 | 3 | import ( 4 | "fmt" 5 | "io/ioutil" 6 | "net/http" 7 | "time" 8 | 9 | "github.com/funkygao/gafka/telemetry" 10 | "github.com/funkygao/go-metrics" 11 | log "github.com/funkygao/log4go" 12 | ) 13 | 14 | var ( 15 | sosMetrics = metrics.NewRegisteredCounter("sos", nil) 16 | lastSos time.Time 17 | IdleTimeout = 5 * time.Minute 18 | ) 19 | 20 | func init() { 21 | http.HandleFunc("/", handleSOS) 22 | go http.ListenAndServe(fmt.Sprintf(":%d", telemetry.SOSPort), nil) 23 | go maintainSosCounter() 24 | } 25 | 26 | func handleSOS(w http.ResponseWriter, r *http.Request) { 27 | sosMsg, _ := ioutil.ReadAll(r.Body) 28 | r.Body.Close() 29 | 30 | sosMetrics.Inc(1) 31 | lastSos = time.Now() 32 | log.Critical("SOS[%s] from %s %s", r.Header.Get(telemetry.SOSIdentHeader), r.RemoteAddr, string(sosMsg)) 33 | 34 | w.WriteHeader(http.StatusAccepted) 35 | } 36 | 37 | func maintainSosCounter() { 38 | for { 39 | time.Sleep(time.Minute) 40 | 41 | if time.Since(lastSos) >= IdleTimeout { 42 | if sosMetrics.Count() > 0 { 43 | log.Info("SOS[#%d] idle over %s, metrics reset", sosMetrics.Count(), IdleTimeout) 44 | } 45 | sosMetrics.Clear() 46 | } 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /cmd/kguard/watchers/anomaly/qps_test.go: -------------------------------------------------------------------------------- 1 | package anomaly 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/funkygao/anomalyzer" 7 | "github.com/funkygao/assert" 8 | ) 9 | 10 | func TestAnamolyPkg(t *testing.T) { 11 | conf := &anomalyzer.AnomalyzerConf{ 12 | Sensitivity: 0.1, 13 | UpperBound: 5, 14 | LowerBound: 0, 15 | ActiveSize: 1, 16 | NSeasons: 4, 17 | Methods: []string{"diff", "fence", "highrank", "lowrank", "magnitude"}, 18 | } 19 | anomaly, err := anomalyzer.NewAnomalyzer(conf, nil) 20 | assert.Equal(t, nil, err) 21 | 22 | dataPoints := []float64{5, 3, 6, 7, 8, 3, 2, 9, 16} 23 | for _, d := range dataPoints { 24 | anomaly.Push(d) 25 | } 26 | 27 | for i := 0; i < 10; i++ { 28 | anomaly.Push(float64(i)) 29 | t.Logf("%+v %d %+v", anomaly.Data, i, anomaly.Eval()) 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /cmd/kguard/watchers/gc/kafka.go: -------------------------------------------------------------------------------- 1 | package gc 2 | 3 | import ( 4 | "sync" 5 | "time" 6 | 7 | "github.com/funkygao/gafka/cmd/kguard/monitor" 8 | log "github.com/funkygao/log4go" 9 | ) 10 | 11 | func init() { 12 | monitor.RegisterWatcher("kafka.gc", func() monitor.Watcher { 13 | return &WatchKafkaGC{} 14 | }) 15 | } 16 | 17 | type WatchKafkaGC struct { 18 | Stop <-chan struct{} 19 | Wg *sync.WaitGroup 20 | } 21 | 22 | func (this *WatchKafkaGC) Init(ctx monitor.Context) { 23 | this.Stop = ctx.StopChan() 24 | this.Wg = ctx.Inflight() 25 | } 26 | 27 | func (this *WatchKafkaGC) Run() { 28 | defer this.Wg.Done() 29 | 30 | ticker := time.NewTicker(time.Minute) 31 | defer ticker.Stop() 32 | 33 | for { 34 | select { 35 | case <-this.Stop: 36 | log.Info("kafka.gc stopped") 37 | return 38 | 39 | case <-ticker.C: 40 | 41 | } 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /cmd/kguard/watchers/haproxy/haproxy.go: -------------------------------------------------------------------------------- 1 | package haproxy 2 | 3 | import ( 4 | "sync" 5 | "time" 6 | 7 | "github.com/funkygao/gafka/cmd/kguard/monitor" 8 | "github.com/funkygao/gafka/zk" 9 | "github.com/funkygao/go-metrics" 10 | log "github.com/funkygao/log4go" 11 | ) 12 | 13 | func init() { 14 | monitor.RegisterWatcher("haproxy.haproxy", func() monitor.Watcher { 15 | return &WatchHaproxy{ 16 | Tick: time.Minute, 17 | } 18 | }) 19 | } 20 | 21 | // WatchHaproxy watches haproxy health. 22 | type WatchHaproxy struct { 23 | Zkzone *zk.ZkZone 24 | Stop <-chan struct{} 25 | Tick time.Duration 26 | Wg *sync.WaitGroup 27 | } 28 | 29 | func (this *WatchHaproxy) Init(ctx monitor.Context) { 30 | this.Zkzone = ctx.ZkZone() 31 | this.Stop = ctx.StopChan() 32 | this.Wg = ctx.Inflight() 33 | } 34 | 35 | func (this *WatchHaproxy) Run() { 36 | defer this.Wg.Done() 37 | 38 | ticker := time.NewTicker(this.Tick) 39 | defer ticker.Stop() 40 | 41 | instances := metrics.NewRegisteredGauge("haproxy.instances", nil) 42 | 43 | for { 44 | select { 45 | case <-this.Stop: 46 | log.Info("haproxy.haproxy stopped") 47 | return 48 | 49 | case <-ticker.C: 50 | instances.Update(2) // TODO 51 | 52 | } 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /cmd/kguard/watchers/influxquery/query.go: -------------------------------------------------------------------------------- 1 | package influxquery 2 | 3 | import ( 4 | "errors" 5 | 6 | "github.com/influxdata/influxdb/client/v2" 7 | ) 8 | 9 | var ( 10 | errInfluxResult = errors.New("bad reply from influx query result") 11 | influxClient client.Client 12 | ) 13 | 14 | func queryInfluxDB(addr, db, cmd string) (res []client.Result, err error) { 15 | // FIXME not atomic 16 | if influxClient == nil { 17 | influxClient, err = client.NewHTTPClient(client.HTTPConfig{ 18 | Addr: addr, 19 | Username: "", 20 | Password: "", 21 | }) 22 | if err != nil { 23 | return 24 | } 25 | } 26 | 27 | if response, err := influxClient.Query(client.Query{ 28 | Command: cmd, 29 | Database: db, 30 | }); err == nil { 31 | if response.Error() != nil { 32 | return res, response.Error() 33 | } 34 | res = response.Results 35 | } else { 36 | return res, err 37 | } 38 | 39 | return res, nil 40 | } 41 | -------------------------------------------------------------------------------- /cmd/kguard/watchers/kafka/cluster.go: -------------------------------------------------------------------------------- 1 | package kafka 2 | 3 | import ( 4 | "sync" 5 | "time" 6 | 7 | "github.com/funkygao/gafka/cmd/kguard/monitor" 8 | "github.com/funkygao/gafka/zk" 9 | "github.com/funkygao/go-metrics" 10 | log "github.com/funkygao/log4go" 11 | ) 12 | 13 | func init() { 14 | monitor.RegisterWatcher("kafka.cluster", func() monitor.Watcher { 15 | return &WatchClusters{ 16 | Tick: time.Minute, 17 | } 18 | }) 19 | } 20 | 21 | // WatchClusters montor num of kafka clusters over the time. 22 | type WatchClusters struct { 23 | Zkzone *zk.ZkZone 24 | Stop <-chan struct{} 25 | Tick time.Duration 26 | Wg *sync.WaitGroup 27 | } 28 | 29 | func (this *WatchClusters) Init(ctx monitor.Context) { 30 | this.Zkzone = ctx.ZkZone() 31 | this.Stop = ctx.StopChan() 32 | this.Wg = ctx.Inflight() 33 | } 34 | 35 | func (this *WatchClusters) Run() { 36 | defer this.Wg.Done() 37 | 38 | ticker := time.NewTicker(this.Tick) 39 | defer ticker.Stop() 40 | 41 | clusters := metrics.NewRegisteredGauge("clusters", nil) 42 | for { 43 | select { 44 | case <-this.Stop: 45 | log.Info("kafka.cluster stopped") 46 | return 47 | 48 | case <-ticker.C: 49 | clusters.Update(int64(len(this.Zkzone.Clusters()))) 50 | } 51 | } 52 | 53 | } 54 | -------------------------------------------------------------------------------- /cmd/kguard/watchers/redis/info_test.go: -------------------------------------------------------------------------------- 1 | package redis 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/funkygao/assert" 7 | ) 8 | 9 | func TestExtractKeysCount(t *testing.T) { 10 | line := "keys=15500,expires=15500,avg_ttl=27438570" 11 | keys := extractKeysCount(line) 12 | assert.Equal(t, int64(15500), keys) 13 | } 14 | -------------------------------------------------------------------------------- /cmd/zk/README.md: -------------------------------------------------------------------------------- 1 | # zk 2 | 3 | A handy zookeeper CLI that supports recursive query. 4 | 5 | ### Usage 6 | 7 | $zk 8 | A CLI tool for Zookeeper 9 | 10 | usage: zk [--version] [--help] [] 11 | 12 | Available commands are: 13 | acl Show znode ACL info 14 | console Interactive mode 15 | create Create znode with initial data 16 | dump Dump permanent directories and contents of Zookeeper 17 | get Show znode data 18 | ls List znode children 19 | rm Remove znode 20 | set Write znode data 21 | stat Show znode status info 22 | zones Print zones defined in $HOME/.gafka.cf 23 | 24 | -------------------------------------------------------------------------------- /ctx/config.go: -------------------------------------------------------------------------------- 1 | package ctx 2 | 3 | import ( 4 | "errors" 5 | "sort" 6 | ) 7 | 8 | var ( 9 | ErrInvalidZone = errors.New("Invalid zone") 10 | 11 | conf *config 12 | ) 13 | 14 | type config struct { 15 | hostname string // not by config, but runtime, cached value 16 | 17 | kafkaHome string 18 | logLevel string 19 | zkDefaultZone string // zk command default zone name 20 | esDefaultZone string 21 | upgradeCenter string 22 | zones map[string]*zone // name:zone 23 | aliases map[string]string 24 | reverseDns map[string][]string // ip: domain names 25 | } 26 | 27 | func (c *config) sortedZones() []string { 28 | sortedZones := make([]string, 0, len(c.zones)) 29 | for name := range c.zones { 30 | sortedZones = append(sortedZones, name) 31 | } 32 | sort.Strings(sortedZones) 33 | return sortedZones 34 | } 35 | 36 | func ensureLogLoaded() { 37 | if conf == nil { 38 | panic("call LoadConfig before this") 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /ctx/config_test.go: -------------------------------------------------------------------------------- 1 | package ctx 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/funkygao/assert" 7 | ) 8 | 9 | func TestLoadConfig(t *testing.T) { 10 | LoadConfig("gafka.cf") 11 | t.Logf("%+v", conf) 12 | assert.Equal(t, 1, len(conf.zones)) 13 | assert.Equal(t, "info", conf.logLevel) 14 | alias, present := Alias("localtopics") 15 | assert.Equal(t, true, present) 16 | assert.Equal(t, "topics -z local", alias) 17 | alias, present = Alias("non-existent") 18 | assert.Equal(t, false, present) 19 | assert.Equal(t, "", alias) 20 | 21 | host, present := ReverseDnsLookup("127.0.0.1", 0) 22 | assert.Equal(t, true, present) 23 | assert.Equal(t, "k10121a.demo.com", host) 24 | 25 | } 26 | -------------------------------------------------------------------------------- /ctx/const.go: -------------------------------------------------------------------------------- 1 | package ctx 2 | 3 | const ( 4 | DefaultConfig = ` 5 | { 6 | zones: [ 7 | { 8 | "name": "local" 9 | "zk": "localhost:2181" 10 | "influxdb": "localhost:8086" 11 | "swf": "http://localhost:9195/v1" 12 | } 13 | 14 | ] 15 | 16 | zk_default_zone: "local" 17 | kafka_home: "/opt/kafka_2.10-0.8.2.2" 18 | upgrade_center: "http://127.0.0.1" 19 | 20 | aliases: [ 21 | { 22 | "cmd": "toplocal" 23 | "alias": "top -z local" 24 | } 25 | 26 | ] 27 | 28 | reverse_dns: [ 29 | 30 | ] 31 | } 32 | ` 33 | ) 34 | -------------------------------------------------------------------------------- /ctx/doc.go: -------------------------------------------------------------------------------- 1 | // Package ctx provides configurations loading and exporting that is 2 | // shared across gafka project. 3 | package ctx 4 | -------------------------------------------------------------------------------- /ctx/gafka.cf: -------------------------------------------------------------------------------- 1 | { 2 | zones: [ 3 | { 4 | name: "local" 5 | zk: "localhost:2181" 6 | "swf": "http://localhost:9195/v1" 7 | } 8 | ] 9 | 10 | zk_default_zone: "local" 11 | kafka_home: "/opt/kafka_2.10-0.8.2.2" 12 | loglevel: "info" 13 | 14 | aliases: [ 15 | { 16 | cmd: "localtopics" 17 | alias: "topics -z local" 18 | } 19 | ] 20 | 21 | reverse_dns: [ 22 | "k10121a.demo.com:127.0.0.1" 23 | ] 24 | } 25 | -------------------------------------------------------------------------------- /ctx/util.go: -------------------------------------------------------------------------------- 1 | package ctx 2 | 3 | import ( 4 | "errors" 5 | "strconv" 6 | "strings" 7 | ) 8 | 9 | var errInvalidLoadAvg = errors.New("invalid load avg line") 10 | 11 | func ExtractLoadAvg1m(line string) (float64, error) { 12 | parts := strings.Split(line, "load average:") 13 | if len(parts) < 2 { 14 | return 0, errInvalidLoadAvg 15 | } 16 | 17 | loadAvg := strings.TrimSpace(parts[1]) 18 | avgs := strings.SplitN(loadAvg, ",", 3) 19 | loadAvg1m, _ := strconv.ParseFloat(strings.TrimSpace(avgs[0]), 64) 20 | return loadAvg1m, nil 21 | 22 | } 23 | -------------------------------------------------------------------------------- /ctx/util_test.go: -------------------------------------------------------------------------------- 1 | package ctx 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/funkygao/assert" 7 | ) 8 | 9 | func TestExtractLoadAvg1m(t *testing.T) { 10 | line := " hostname.foo.bar: 12:19:20 up 144 days, 3:21, 0 users, load average: 0.10, 0.18, 0.67" 11 | avg1m, err := ExtractLoadAvg1m(line) 12 | assert.Equal(t, 0.1, avg1m) 13 | assert.Equal(t, nil, err) 14 | 15 | line = "adsfasf asdfasdf" 16 | avg1m, err = ExtractLoadAvg1m(line) 17 | assert.Equal(t, errInvalidLoadAvg, err) 18 | 19 | line = "hostname.foo.bar: 12:19:20 up 144 days, 3:21, 0 users, load average: 1.10, 0.18, 0.67" 20 | avg1m, err = ExtractLoadAvg1m(line) 21 | assert.Equal(t, nil, err) 22 | assert.Equal(t, 1.1, avg1m) 23 | 24 | line = "hostname.foo.bar: 12:19:20 up 144 days, 3:21, 0 users, load average: 12.10, 0.18, 0.67" 25 | avg1m, err = ExtractLoadAvg1m(line) 26 | assert.Equal(t, nil, err) 27 | assert.Equal(t, 12.1, avg1m) 28 | } 29 | -------------------------------------------------------------------------------- /diagnostics/agent/agent.go: -------------------------------------------------------------------------------- 1 | // Package agent provides an HTTP endpoint for a program providing 2 | // diagnostics and statistics for a given task. 3 | package agent 4 | 5 | import ( 6 | "fmt" 7 | "net/http" 8 | _ "net/http/pprof" 9 | ) 10 | 11 | var ( 12 | HttpAddr = "localhost:10120" 13 | 14 | // Errors is the channel to receive errors of pprof agent. 15 | Errors = make(chan error, 1) 16 | ) 17 | 18 | // Start starts the diagnostics agent on a host process. Once agent started, 19 | // user can retrieve diagnostics via the HttpAddr endpoint. 20 | func Start() (endpoint string) { 21 | // TODO access log 22 | go func() { 23 | if err := http.ListenAndServe(HttpAddr, nil); err != nil { 24 | Errors <- fmt.Errorf("pprof agent: %v", err) 25 | } 26 | }() 27 | 28 | return HttpAddr 29 | } 30 | -------------------------------------------------------------------------------- /manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "dependencies": { 3 | "github.com/Shopify/sarama": { 4 | "revision": "388a9be86573de80c995eb4ca4c872a190cf12f0" 5 | }, 6 | "github.com/samuel/go-zookeeper": { 7 | "revision": "1d7be4effb13d2d908342d349d71a284a7542693" 8 | }, 9 | "github.com/coreos/etcd/raft": { 10 | "branch": "master", 11 | "revision": "1a962df59630a6a683135929c768412f3ad79b2c", 12 | "version": "release-3.1" 13 | }, 14 | "github.com/shirou/gopsutil": { 15 | "revision": "1123132e5ad392485dbd16e0f09a0db8c963a2ab" 16 | }, 17 | "github.com/influxdata/influxdb/client": { 18 | "revision": "390a16925d8bce2955ef7a27bc423762566cd931" 19 | } 20 | "github.com/hashicorp/memberlist": { 21 | "revision": "9800c50ab79c002353852a9b1095e9591b161513" 22 | } 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /mpool/const.go: -------------------------------------------------------------------------------- 1 | package mpool 2 | 3 | const ( 4 | accessLogLineMaxBytes = 1 << 9 5 | ) 6 | -------------------------------------------------------------------------------- /mpool/doc.go: -------------------------------------------------------------------------------- 1 | // Package mpool provides recycleable memory buffer to reduce gc. 2 | // 3 | // Based upon slab class allocation algorithm. 4 | package mpool 5 | -------------------------------------------------------------------------------- /mpool/intern.go: -------------------------------------------------------------------------------- 1 | package mpool 2 | 3 | import ( 4 | "sync" 5 | ) 6 | 7 | type Intern struct { 8 | sync.RWMutex 9 | lookup map[string]string 10 | } 11 | 12 | func NewIntern() *Intern { 13 | return &Intern{lookup: make(map[string]string, 10)} 14 | } 15 | 16 | func (this *Intern) String(s string) string { 17 | this.RLock() 18 | ss, present := this.lookup[s] 19 | this.RUnlock() 20 | if present { 21 | return ss 22 | } 23 | 24 | this.Lock() 25 | ss, present = this.lookup[s] 26 | if present { 27 | this.Unlock() 28 | return ss 29 | } 30 | 31 | this.lookup[s] = s 32 | this.Unlock() 33 | return s 34 | } 35 | -------------------------------------------------------------------------------- /mpool/intern_test.go: -------------------------------------------------------------------------------- 1 | package mpool 2 | 3 | import ( 4 | "testing" 5 | ) 6 | 7 | func BenchmarkIntern(b *testing.B) { 8 | i := NewIntern() 9 | b.RunParallel(func(pb *testing.PB) { 10 | var s string 11 | 12 | for pb.Next() { 13 | s = i.String("app1" + "." + "order" + "." + "v1") 14 | } 15 | 16 | _ = s 17 | }) 18 | } 19 | -------------------------------------------------------------------------------- /mpool/message_test.go: -------------------------------------------------------------------------------- 1 | package mpool 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/funkygao/assert" 7 | ) 8 | 9 | func TestMessageUsage(t *testing.T) { 10 | m := NewMessage(1029) 11 | msg := "hello world" 12 | m.Body = m.Body[:len(msg)] 13 | copy(m.Body, msg) 14 | assert.Equal(t, msg, string(m.Body)) 15 | assert.Equal(t, len(msg), len(m.Body)) 16 | } 17 | 18 | func TestRound(t *testing.T) { 19 | fixtures := assert.Fixtures{ 20 | assert.Fixture{Input: 2, Expected: 256}, 21 | assert.Fixture{Input: 198, Expected: 256}, 22 | assert.Fixture{Input: 256, Expected: 256}, 23 | } 24 | 25 | for _, test := range fixtures { 26 | assert.Equal(t, test.Expected.(int), round(test.Input.(int), 256)) 27 | } 28 | 29 | } 30 | -------------------------------------------------------------------------------- /mpool/mpool_go10.go: -------------------------------------------------------------------------------- 1 | // +build !go1.3 2 | 3 | package mpool 4 | 5 | import ( 6 | "bytes" 7 | ) 8 | 9 | func BytesBufferGet() *bytes.Buffer { 10 | return &bytes.Buffer{} 11 | } 12 | 13 | func BytesBufferPut(b *bytes.Buffer) {} 14 | 15 | func AccessLogLineBufferGet() []byte { 16 | return make([]byte, 0, accessLogLineMaxBytes) 17 | } 18 | 19 | func AccessLogLineBufferPut(b []byte) { 20 | bytesPool.Put(b) 21 | } 22 | -------------------------------------------------------------------------------- /mpool/mpool_go13.go: -------------------------------------------------------------------------------- 1 | // +build go1.3 2 | 3 | package mpool 4 | 5 | import ( 6 | "bytes" 7 | "sync" 8 | ) 9 | 10 | var ( 11 | bsb sync.Pool 12 | accessLogPool sync.Pool 13 | ) 14 | 15 | func init() { 16 | bsb.New = func() interface{} { 17 | return bytes.NewBuffer(make([]byte, 100)) 18 | } 19 | 20 | accessLogPool.New = func() interface{} { 21 | return make([]byte, 0, accessLogLineMaxBytes) 22 | } 23 | } 24 | 25 | func BytesBufferGet() *bytes.Buffer { 26 | return bsb.Get().(*bytes.Buffer) 27 | } 28 | 29 | func BytesBufferPut(b *bytes.Buffer) { 30 | bsb.Put(b) 31 | } 32 | 33 | func AccessLogLineBufferGet() []byte { 34 | return accessLogPool.Get().([]byte) 35 | } 36 | 37 | func AccessLogLineBufferPut(b []byte) { 38 | accessLogPool.Put(b) 39 | } 40 | -------------------------------------------------------------------------------- /registry/doc.go: -------------------------------------------------------------------------------- 1 | // Package registry defines interfaces that kateway will 2 | // use to register its running instance health and meta data. 3 | // 4 | // gk will use this meta data to check out online kateway instances. 5 | package registry 6 | -------------------------------------------------------------------------------- /registry/dummy/reg.go: -------------------------------------------------------------------------------- 1 | package zk 2 | 3 | import ( 4 | "github.com/funkygao/gafka/registry" 5 | "github.com/funkygao/gafka/zk" 6 | zklib "github.com/samuel/go-zookeeper/zk" 7 | ) 8 | 9 | type dummy struct { 10 | } 11 | 12 | func New(*zk.ZkZone) registry.Backend { 13 | return &dummy{} 14 | } 15 | 16 | func (this *dummy) Name() string { 17 | return "dummy" 18 | } 19 | 20 | func (this *dummy) Register(id string, data []byte) { 21 | return 22 | } 23 | 24 | func (this *dummy) Deregister(id string, oldData []byte) error { 25 | return nil 26 | } 27 | 28 | func (this *dummy) WatchInstances() ([]string, <-chan zklib.Event, error) { 29 | return nil, nil, nil 30 | } 31 | -------------------------------------------------------------------------------- /registry/eureka/eureka.go: -------------------------------------------------------------------------------- 1 | package zk 2 | 3 | import ( 4 | "github.com/funkygao/gafka/registry" 5 | "github.com/funkygao/gafka/zk" 6 | zklib "github.com/samuel/go-zookeeper/zk" 7 | ) 8 | 9 | // eureka is a service discovery implementation that uses netflix eureka 10 | // as backend, which is AP system. 11 | // 12 | // register, renew, cancel, get is all eureka provides. 13 | type eureka struct { 14 | } 15 | 16 | func New(*zk.ZkZone) registry.Backend { 17 | return &eureka{} 18 | } 19 | 20 | func (this *eureka) Name() string { 21 | return "eureka" 22 | } 23 | 24 | func (this *eureka) Register(id string, data []byte) { 25 | return 26 | } 27 | 28 | func (this *eureka) Deregister(id string, oldData []byte) error { 29 | return nil 30 | } 31 | 32 | func (this *eureka) WatchInstances() ([]string, <-chan zklib.Event, error) { 33 | return nil, nil, nil 34 | } 35 | -------------------------------------------------------------------------------- /registry/registry.go: -------------------------------------------------------------------------------- 1 | package registry 2 | 3 | import ( 4 | "github.com/samuel/go-zookeeper/zk" 5 | ) 6 | 7 | type Backend interface { 8 | Register(id string, data []byte) 9 | 10 | Deregister(id string, data []byte) error 11 | 12 | WatchInstances() ([]string, <-chan zk.Event, error) 13 | 14 | // Name of the registry backend. 15 | Name() string 16 | } 17 | 18 | var Default Backend 19 | -------------------------------------------------------------------------------- /registry/zk/zk_test.go: -------------------------------------------------------------------------------- 1 | package zk 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/funkygao/assert" 7 | "github.com/funkygao/gafka/ctx" 8 | "github.com/funkygao/gafka/zk" 9 | ) 10 | 11 | func init() { 12 | ctx.LoadFromHome() 13 | } 14 | 15 | func TestZkPath(t *testing.T) { 16 | zkzone := zk.NewZkZone(zk.DefaultConfig(ctx.DefaultZone(), ctx.ZoneZkAddrs(ctx.DefaultZone()))) 17 | defer zkzone.Close() 18 | zk := New(zkzone) 19 | id := "1" 20 | assert.Equal(t, "/_kateway/ids/local/1", zk.mypath(id)) 21 | 22 | data := []byte("foo, bar") 23 | err := zk.Register(id, data) 24 | assert.Equal(t, nil, err) 25 | defer zk.Deregister(id, data) 26 | 27 | ok, err := zk.Registered(id) 28 | assert.Equal(t, true, ok) 29 | assert.Equal(t, nil, err) 30 | } 31 | -------------------------------------------------------------------------------- /sla/api.go: -------------------------------------------------------------------------------- 1 | package sla 2 | 3 | func ValidateShadowName(name string) bool { 4 | if name != SlaKeyRetryTopic && name != SlaKeyDeadLetterTopic { 5 | return false 6 | } 7 | 8 | return true 9 | } 10 | -------------------------------------------------------------------------------- /sla/doc.go: -------------------------------------------------------------------------------- 1 | // Package sla provides the SLA definitions for topics. 2 | // 3 | // Warded topic has 2 extra topics: 4 | // {appid}.{topic}.retry.{ver} 5 | // {appid}.{topic}.dead.{ver} 6 | package sla 7 | -------------------------------------------------------------------------------- /sla/err.go: -------------------------------------------------------------------------------- 1 | package sla 2 | 3 | import ( 4 | "errors" 5 | ) 6 | 7 | var ( 8 | ErrNegative = errors.New("can not be negative") 9 | ErrEmptyArg = errors.New("empty argument") 10 | ErrNotNumber = errors.New("not number") 11 | ErrTooBigPartitions = errors.New("too big partitions") 12 | ) 13 | -------------------------------------------------------------------------------- /telemetry/Makefile: -------------------------------------------------------------------------------- 1 | escape: 2 | go build -gcflags '-m=1' 3 | -------------------------------------------------------------------------------- /telemetry/influxdb/config.go: -------------------------------------------------------------------------------- 1 | package influxdb 2 | 3 | import ( 4 | "errors" 5 | "net/url" 6 | "time" 7 | 8 | "github.com/funkygao/gafka/ctx" 9 | ) 10 | 11 | type config struct { 12 | interval time.Duration 13 | hostname string // local host name 14 | 15 | url url.URL 16 | database string // influxdb database 17 | username string // influxdb username 18 | password string 19 | } 20 | 21 | func NewConfig(uri, db, user, pass string, interval time.Duration) (*config, error) { 22 | u, err := url.Parse(uri) 23 | if err != nil { 24 | return nil, err 25 | } 26 | 27 | if interval == 0 { 28 | return nil, errors.New("illegal interval") 29 | } 30 | if uri == "" { 31 | return nil, errors.New("empty influxdb uri") 32 | } 33 | if db == "" { 34 | return nil, errors.New("empty influxdb db name") 35 | } 36 | 37 | return &config{ 38 | hostname: ctx.Hostname(), 39 | url: *u, 40 | database: db, 41 | username: user, 42 | password: pass, 43 | interval: interval, 44 | }, nil 45 | } 46 | -------------------------------------------------------------------------------- /telemetry/influxdb/tag.go: -------------------------------------------------------------------------------- 1 | package influxdb 2 | 3 | import ( 4 | "net/url" 5 | "strings" 6 | ) 7 | 8 | const ( 9 | tagSep = "#" 10 | ) 11 | 12 | // name: appid=5&topic=a.b.c&ver=v1#pub.qps 13 | // TODO deprecated 14 | func (this *runner) extractTagsFromMetricsName(name string) (realName string, tags map[string]string) { 15 | tags = map[string]string{ 16 | "host": this.cf.hostname, 17 | } 18 | 19 | i := strings.Index(name, tagSep) 20 | if i < 0 { 21 | // no tag 22 | return name, tags 23 | } 24 | 25 | // has tag 26 | realName = name[i+1:] 27 | 28 | u, _ := url.ParseQuery(name[:i]) 29 | for k, v := range u { 30 | tags[k] = v[0] // we use only 1st item 31 | } 32 | 33 | return 34 | } 35 | -------------------------------------------------------------------------------- /telemetry/reporter.go: -------------------------------------------------------------------------------- 1 | // Package telemetry perists github.com/funkygao/go-metrics 2 | // metrics.Registry to durable storage. 3 | package telemetry 4 | 5 | // A Reporter continuously scans metrics.Registry and 6 | // persists all metrics to durable storage. 7 | type Reporter interface { 8 | Name() string 9 | 10 | Start() error 11 | Stop() 12 | } 13 | 14 | var Default Reporter 15 | -------------------------------------------------------------------------------- /telemetry/sos.go: -------------------------------------------------------------------------------- 1 | package telemetry 2 | 3 | const ( 4 | SOSPort = 9119 5 | SOSIdentHeader = "X-SOS-From" 6 | ) 7 | -------------------------------------------------------------------------------- /telemetry/tag_test.go: -------------------------------------------------------------------------------- 1 | package telemetry 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/funkygao/assert" 7 | ) 8 | 9 | func TestUntag(t *testing.T) { 10 | appid, topic, ver, realname := Untag("{app1.mytopic.v1}pub.ok") 11 | assert.Equal(t, "app1", appid) 12 | assert.Equal(t, "mytopic", topic) 13 | assert.Equal(t, "v1", ver) 14 | assert.Equal(t, "pub.ok", realname) 15 | 16 | appid, topic, ver, realname = Untag("pub.ok") 17 | assert.Equal(t, "pub.ok", realname) 18 | assert.Equal(t, "", appid) 19 | assert.Equal(t, "", topic) 20 | assert.Equal(t, "", ver) 21 | } 22 | 23 | func TestTag(t *testing.T) { 24 | assert.Equal(t, "{appid.topic.ver}", Tag("appid", "topic", "ver")) 25 | } 26 | 27 | // 186 ns/op 28 | func BenchmarkUntag(b *testing.B) { 29 | for i := 0; i < b.N; i++ { 30 | Untag("{app1.mytopic.v1}pub.ok") 31 | } 32 | } 33 | 34 | // 159 ns/op 2 allocs/op 35 | func BenchmarkTag(b *testing.B) { 36 | for i := 0; i < b.N; i++ { 37 | Tag("appid", "topic", "ver") 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /ver.go: -------------------------------------------------------------------------------- 1 | // Package gafka is a full ecosystem built for real-time cloud PubSub system. 2 | package gafka 3 | 4 | var ( 5 | // Version is the unified version of the whole gafka project. 6 | // Each component shares the same version info. 7 | Version = "unknown" 8 | 9 | // BuildId is the SCM commit id. 10 | BuildId = "?" 11 | 12 | // BuiltAt is the time when build.sh was run. 13 | BuiltAt = "1970" 14 | ) 15 | -------------------------------------------------------------------------------- /zk/config.go: -------------------------------------------------------------------------------- 1 | package zk 2 | 3 | import ( 4 | "strings" 5 | "time" 6 | ) 7 | 8 | type Config struct { 9 | Name string 10 | ZkAddrs string 11 | SessionTimeout time.Duration 12 | PanicOnError bool 13 | } 14 | 15 | func DefaultConfig(name, addrs string) *Config { 16 | return &Config{ 17 | Name: name, 18 | ZkAddrs: addrs, 19 | SessionTimeout: DefaultZkSessionTimeout(), 20 | PanicOnError: false, 21 | } 22 | } 23 | 24 | func (this *Config) ZkServers() []string { 25 | return strings.Split(this.ZkAddrs, ",") 26 | } 27 | 28 | func DefaultZkSessionTimeout() time.Duration { 29 | // online zk tickTime=2000, valid timeout: 4s ~ 40s 30 | // io timeout: 13s ping interval: 6.5s 31 | return time.Second * 20 32 | } 33 | -------------------------------------------------------------------------------- /zk/dbus.go: -------------------------------------------------------------------------------- 1 | package zk 2 | 3 | import ( 4 | "path" 5 | ) 6 | 7 | func DbusCheckpointRoot(cluster string) string { 8 | return path.Join(DbusRoot, cluster, dbusCheckpointPath) 9 | } 10 | 11 | func DbusConfig(cluster string) string { 12 | return path.Join(DbusRoot, cluster, dbusConfPath) 13 | } 14 | 15 | func DbusConfigDir(cluster string) string { 16 | return path.Join(DbusRoot, cluster, dbusConfDirPath) 17 | } 18 | 19 | func DbusClusterRoot(cluster string) string { 20 | return path.Join(DbusRoot, cluster, dbusClusterPath) 21 | } 22 | -------------------------------------------------------------------------------- /zk/dbus_test.go: -------------------------------------------------------------------------------- 1 | package zk 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/funkygao/assert" 7 | ) 8 | 9 | func TestDbusPathRelated(t *testing.T) { 10 | cluster := "foo" 11 | assert.Equal(t, "/dbus/foo/cluster", DbusClusterRoot(cluster)) 12 | assert.Equal(t, "/dbus/foo/checkpoint", DbusCheckpointRoot(cluster)) 13 | assert.Equal(t, "/dbus/foo/conf", DbusConfig(cluster)) 14 | assert.Equal(t, "/dbus/foo/conf.d", DbusConfigDir(cluster)) 15 | } 16 | -------------------------------------------------------------------------------- /zk/doc.go: -------------------------------------------------------------------------------- 1 | // Package zk is a helper lib that manages kafka cluster meta data and 2 | // consumer meta data. 3 | package zk 4 | -------------------------------------------------------------------------------- /zk/error.go: -------------------------------------------------------------------------------- 1 | package zk 2 | 3 | import ( 4 | "errors" 5 | ) 6 | 7 | var ( 8 | ErrDupConnect = errors.New("connect while being connected") 9 | ErrClaimedByOthers = errors.New("claimed by others") 10 | ErrNotClaimed = errors.New("release non-claimed") 11 | ) 12 | -------------------------------------------------------------------------------- /zk/es.go: -------------------------------------------------------------------------------- 1 | package zk 2 | 3 | import ( 4 | "fmt" 5 | ) 6 | 7 | type EsCluster struct { 8 | Name string 9 | zkzone *ZkZone 10 | } 11 | 12 | func (ec *EsCluster) AddNode(hostPort string) error { 13 | path := fmt.Sprintf("%s/%s/node/%s", esRoot, ec.Name, hostPort) 14 | if err := ec.zkzone.ensureParentDirExists(path); err != nil { 15 | return err 16 | } 17 | 18 | return ec.zkzone.CreatePermenantZnode(path, nil) 19 | } 20 | 21 | func (ec *EsCluster) Nodes() []string { 22 | path := fmt.Sprintf("%s/%s/node", esRoot, ec.Name) 23 | return ec.zkzone.children(path) 24 | } 25 | 26 | func (ec *EsCluster) FirstBootstrapNode() string { 27 | nodes := ec.Nodes() 28 | if len(nodes) == 0 { 29 | return "" 30 | } 31 | 32 | return nodes[0] 33 | } 34 | -------------------------------------------------------------------------------- /zk/flw.go: -------------------------------------------------------------------------------- 1 | package zk 2 | 3 | import ( 4 | "time" 5 | 6 | "github.com/funkygao/golib/color" 7 | ) 8 | 9 | // Returns {zkHost: outputLines} 10 | func (this *ZkZone) RunZkFourLetterCommand(cmd string) map[string]string { 11 | servers := this.conf.ZkServers() 12 | r := make(map[string]string, len(servers)) 13 | for _, server := range servers { 14 | b, err := zkFourLetterWord(server, cmd, time.Minute) 15 | if err != nil { 16 | r[server] = color.Red(err.Error()) 17 | } else { 18 | r[server] = string(b) 19 | } 20 | 21 | } 22 | 23 | return r 24 | } 25 | -------------------------------------------------------------------------------- /zk/zkstat_test.go: -------------------------------------------------------------------------------- 1 | package zk 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/funkygao/assert" 7 | ) 8 | 9 | func TestParseStatResult(t *testing.T) { 10 | s := ` 11 | Zookeeper version: 3.4.6-1569965, built on 02/20/2014 09:09 GMT 12 | Clients: 13 | /127.0.0.1:54276[1](queued=0,recved=0,sent=0) 14 | /127.0.0.1:53766[1](queued=0,recved=0,sent=0) 15 | /127.0.0.1:55298[1](queued=0,recved=0,sent=0) 16 | /0:0:0:0:0:0:0:1%0:53888[0](queued=0,recved=1,sent=0) 17 | 18 | Latency min/avg/max: 0/0/4196 19 | Received: 3369429 20 | Sent: 3368840 21 | Connections: 4 22 | Outstanding: 0 23 | Zxid: 0x153b 24 | Mode: standalone 25 | Node count: 48 26 | ` 27 | stat := ParseStatResult(s) 28 | assert.Equal(t, "3.4.6-1569965", stat.Version) 29 | assert.Equal(t, "0/0/4196", stat.Latency) 30 | assert.Equal(t, "3369429", stat.Received) 31 | assert.Equal(t, "3368840", stat.Sent) 32 | assert.Equal(t, "4", stat.Connections) 33 | assert.Equal(t, "S", stat.Mode) 34 | assert.Equal(t, "48", stat.Znodes) 35 | } 36 | --------------------------------------------------------------------------------