├── .gitignore ├── README.md ├── VERSION ├── benchmarks ├── n30000_c100_strategy_random.png └── n30000_c100_strategy_sticky_batch.png ├── include └── kafboy_definitions.hrl ├── rebar.config ├── rel ├── reltool.config.sample └── vars.config └── src ├── kafboy.app.src ├── kafboy_app.erl ├── kafboy_demo.erl ├── kafboy_disco_handler.erl ├── kafboy_http_handler.erl ├── kafboy_logger.erl ├── kafboy_producer.erl ├── kafboy_startup_worker.erl └── kafboy_sup.erl /.gitignore: -------------------------------------------------------------------------------- 1 | .eunit 2 | .rebar 3 | rebar 4 | .dialyzer_plt 5 | deps 6 | doc/ 7 | ebin 8 | rel/ekafka/ 9 | *.o 10 | *.beam 11 | *.plt 12 | user_default.erl 13 | .DS_* 14 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # kafboy 2 | 3 | a low latency http server for writing to kafka. Optimized for heavy loads, hundreds of partition workers, supports batching, and more. Written in Erlang. Powered by `ekaf` and `Cowboy` 4 | 5 | ![ordered_round_robin](/benchmarks/n30000_c100_strategy_random.png) 6 | *see https://github.com/helpshift/ekaf for more information* 7 | 8 | ## Architecture 9 | 10 | With 0.8, Kafka clients take greater responsibility of deciding which broker and partition to publish to for a given topic. 11 | 12 | kafboy is a http wrapper over the ekafka client, that takes care of routing http requests to the right kafka broker socket. kafboy is self-aware over a cluster, and supoprts nodes routing requests arriving on any node, to the right process in the cluster. 13 | 14 | Simply send a POST with the desired JSON, to one of the following paths 15 | 16 | ## Fire and forget 17 | 18 | % fire and forget asynchronous call. the event is immediately send to kafka asynchronously 19 | POST /async/topic 20 | 21 | ## Synchronous calls 22 | 23 | % synchronous call that returns with the response after sending to kafka 24 | % `NOTE: a reply is sent until after kafka resonds, so is not recommended for low latency needs` 25 | POST /sync/topic 26 | 27 | ## Batching 28 | 29 | % will be added to a queue, and sent to the broker in a batch. 30 | % batch size, and flush timeout are configurable 31 | POST /batch/async/topic 32 | 33 | The payload is expected to be of the JSON format, but this can be configured to send the data as is. 34 | Very little else is done by this server in terms of dealing with kafka. It simply calls ekafka's produce function. 35 | 36 | ## Configuring kafboy 37 | 38 | {kafboy,[ 39 | % optional. you get to edit the json before it goes to kafka over here 40 | {kafboy_callback_edit_json, {my_module, massage_json}}, 41 | % M:F({post, Topic, Req, Json, Callback}) will be called. return with what you want to send to kafka 42 | % if an error occurs M:F({error, StatusCode, Message}) wil be called 43 | 44 | % optional. 45 | {kafboy_load_balancer, "http://localhost:8080/disco"} 46 | % should return plaintext of a node name with the right cookie eg: `node2@some-host` 47 | % can be used to distribute work to other nodes if ekaf thinks this one is too busy 48 | 49 | % optional, see more in kafboy_app.erl 50 | {kafboy_routes_async_batch,["/1/foo/:topic"]}, 51 | {kafboy_routes_async,[]}, 52 | {kafboy_routes_sync,[]} 53 | ]} 54 | 55 | In this example, you have to implement my_module:massage_json/1, on the lines of 56 | 57 | massage_json({post, Topic, Req, Body, Callback})-> 58 | Callback ! { edit_json_callback, Topic, Body }. 59 | 60 | 61 | Here is a more elaborate example: 62 | 63 | %% Let's check for the contents of Body 64 | %% and if its valid, add an extra field 65 | %% and then submit to kafka 66 | massage_json({post, Topic, _Req, Body, CallbackPid})-> 67 | case Body of 68 | [{<<"hello">>, Foo}] -> 69 | % either reply like this 70 | CallbackPid ! { edit_json_callback, Topic, Foo }; 71 | [] -> 72 | CallbackPid ! { edit_json_callback, {error, <>}}; 73 | _ -> 74 | %% i want to first reply 75 | CallbackPid ! { edit_json_callback, {200, <<"{\"ok\":\"fast reply\"}">>}}, 76 | 77 | %% then directly call ekaf, adding this msg to a batch 78 | Final = jsx:encode([{<<"extra">>,<<"true">>}| Body]), 79 | ekaf:produce_async_batched(Topic, Final) 80 | end; 81 | massage_json({error, Status, Message}) -> 82 | io:format("~n some ~p error: ~p",[Status, Message]), 83 | ok. 84 | 85 | 86 | kafboy will handle sending batch requests where the batch size is configurable, disconnections with brokers, and max retries. 87 | 88 | To see the API of ekaf, see http://github.com/helpshift/ekaf 89 | 90 | ## Quick start 91 | 92 | On terminal 1 93 | 94 | git clone https://github.com/helpshift/kafboy 95 | cd kafboy 96 | rebar get-deps compile 97 | erl -pa deps/*/ebin -pa ebin -s kafboy_demo 98 | 99 | On terminal 2 100 | 101 | curl localhost:9903/batch/async/ekaf -XPOST -d 'test=a' 102 | {"ok":"fast reply"} 103 | 104 | curl localhost:9903/batch/async/ekaf -XPOST -d 'hello=a' 105 | {"ok":1} 106 | 107 | curl localhost:9903/batch/async/ekaf -XPOST 108 | {"error":"ekaf.insufficient"} 109 | 110 | ## Configuring ekaf 111 | 112 | #### An example ekaf config 113 | 114 | {ekaf,[ 115 | 116 | % required. 117 | {ekaf_bootstrap_broker, {"localhost", 9091} }, 118 | % pass the {BrokerHost,Port} of atleast one permanent broker. Ideally should be 119 | % the IP of a load balancer so that any broker can be contacted 120 | 121 | 122 | % optional 123 | {ekaf_per_partition_workers,100}, 124 | % how big is the connection pool per partition 125 | % eg: if the topic has 3 partitions, then with this eg: 300 workers will be started 126 | 127 | 128 | % optional 129 | {ekaf_max_buffer_size, [{<<"topic">>,10000}, % for specific topic 130 | {ekaf_max_buffer_size,100}]}, % for other topics 131 | % how many events should the worker wait for before flushing to kafka as a batch 132 | 133 | 134 | % optional 135 | {ekaf_partition_strategy, random} 136 | % if you are not bothered about the order, use random for speed 137 | % else the default is ordered_round_robin 138 | 139 | 140 | ]}, 141 | 142 | To see how to configure the number of workers per topic+partition, the buffer batch size, buffer flush ttl, and more see the extensive README for `ekaf` https://github.com/helpshift/ekaf 143 | 144 | ## License 145 | 146 | ``` 147 | Copyright 2014, Helpshift, Inc. 148 | 149 | Licensed under the Apache License, Version 2.0 (the "License"); 150 | you may not use this file except in compliance with the License. 151 | You may obtain a copy of the License at 152 | 153 | http://www.apache.org/licenses/LICENSE-2.0 154 | 155 | Unless required by applicable law or agreed to in writing, software 156 | distributed under the License is distributed on an "AS IS" BASIS, 157 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 158 | See the License for the specific language governing permissions and 159 | limitations under the License. 160 | ``` 161 | 162 | Add a feature request at https://github.com/helpshift/ekaf or check the ekaf web server at https://github.com/helpshift/kafboy 163 | -------------------------------------------------------------------------------- /VERSION: -------------------------------------------------------------------------------- 1 | v0.6.0 2 | -------------------------------------------------------------------------------- /benchmarks/n30000_c100_strategy_random.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/helpshift/kafboy/fae3c4ec358512797ee968c089b4675f95cd61c7/benchmarks/n30000_c100_strategy_random.png -------------------------------------------------------------------------------- /benchmarks/n30000_c100_strategy_sticky_batch.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/helpshift/kafboy/fae3c4ec358512797ee968c089b4675f95cd61c7/benchmarks/n30000_c100_strategy_sticky_batch.png -------------------------------------------------------------------------------- /include/kafboy_definitions.hrl: -------------------------------------------------------------------------------- 1 | %%====================================================================== 2 | %% Defaults 3 | %%====================================================================== 4 | -define(KAFBOY_DEFAULT_SYNC_URL , "/sync/:topic/"). 5 | -define(KAFBOY_DEFAULT_ASYNC_URL , "/async/:topic/"). 6 | -define(KAFBOY_DEFAULT_HTTP_PORT , 8000). 7 | 8 | -include_lib("ekaf/include/ekaf_definitions.hrl"). 9 | 10 | %%====================================================================== 11 | %% Macros 12 | %%====================================================================== 13 | -define(KAFBOY_AUTOSTART, true). 14 | -define(TCP_DEFAULT_SEND_OPTIONS,[{active, once},{nodelay,true},{keepalive,true},{broadcast,false}]). 15 | 16 | %%====================================================================== 17 | %% Records 18 | %%====================================================================== 19 | %% available to every request 20 | -record(kafboy_http, { sync=false::boolean(), 21 | batch=false::boolean(), 22 | callback_edit_json}). 23 | 24 | -record(kafboy_startup,{ 25 | logging, 26 | profiling 27 | }). 28 | 29 | -record(kafboy_enabled, { 30 | modules, 31 | functions, 32 | pids, 33 | times, 34 | levels 35 | }). 36 | -------------------------------------------------------------------------------- /rebar.config: -------------------------------------------------------------------------------- 1 | {erl_opts, [debug_info, 2 | {i, ["include"]}]}. 3 | {plugins,[rebar_ct]}. 4 | {sub_dirs,[ 5 | ]}. 6 | {deps,[ 7 | {cowboy,".*",{git,"https://github.com/extend/cowboy.git", "b57f94661f5"}}, 8 | {ekaf, ".*",{git,"https://github.com/helpshift/ekaf.git", {tag, "v1.5.4"}}}, 9 | {jsx,".*",{git,"https://github.com/talentdeficit/jsx.git", "2d8a7d0"}} 10 | ]}. 11 | {ct_dir, "test"}. 12 | {ct_log_dir,"test"}. 13 | {covertool_prefix_len, 2}. % Optional: Use module prefix as (imaginary) package name 14 | {xref_checks, [undefined_function_calls, 15 | undefined_functions, 16 | locals_not_used, 17 | exports_not_used, 18 | deprecated_function_calls, 19 | deprecated_functions]}. 20 | -------------------------------------------------------------------------------- /rel/reltool.config.sample: -------------------------------------------------------------------------------- 1 | {sys, [ 2 | {lib_dirs, ["../deps"]}, 3 | {erts, [{mod_cond, derived}, {app_file, strip}]}, 4 | {app_file, strip}, 5 | {rel, "node", "1", 6 | [ 7 | compiler, 8 | crypto, 9 | gproc, 10 | erlsha2, 11 | cowboy, 12 | inets, 13 | kernel, 14 | lager, 15 | {mnesia,load}, 16 | public_key, 17 | runtime_tools, 18 | sasl, 19 | ssl, 20 | stdlib, 21 | syntax_tools, 22 | {jsx,load}, 23 | {ekafka,load}, 24 | kafboy, 25 | exometer 26 | ]}, 27 | {rel, "start_clean", "", 28 | [ 29 | kernel, 30 | stdlib, 31 | sasl, 32 | tools 33 | ]}, 34 | {boot_rel, "node"}, 35 | {profile, embedded}, 36 | {incl_cond, derived}, 37 | {excl_archive_filters, [".*"]}, %% Do not archive built libs 38 | {excl_sys_filters, ["^bin/.*", "^erts.*/bin/(dialyzer|typer)", 39 | "^erts.*/(doc|info|include|lib|man|src)"]}, 40 | {excl_app_filters, ["\.gitignore"]}, 41 | {app, inets, [{incl_cond, include}]}, 42 | {app, kernel, [{incl_cond, include}]}, 43 | {app, lager, [{incl_cond, include}]}, 44 | {app, mnesia, [{incl_cond, include}]}, 45 | {app, public_key, [{incl_cond, include}]}, 46 | {app, runtime_tools, [{incl_cond, include}]}, 47 | {app, sasl, [{incl_cond, include}]}, 48 | {app, snmp, [{incl_cond, include}]}, 49 | {app, ssl, [{incl_cond, include}]}, 50 | {app, stdlib, [{incl_cond, include}]}, 51 | {app, tools, [{incl_cond, include}]}, 52 | {app, syntax_tools, [{incl_cond, include}]}, 53 | {app, compiler, [{incl_cond, include}]}, 54 | {app, crypto, [{incl_cond, include}]}, 55 | {app, gproc, [{incl_cond, include}]}, 56 | 57 | {app, erlsha2, [{incl_cond, include}]}, 58 | {app, jsx, [{incl_cond, include}]}, 59 | {app, exometer, [{incl_cond, include}]}, 60 | {app, ekafka, [{mod_cond,app}, {incl_cond, include}]}, 61 | {app, cowboy, [{incl_cond, include}]}, 62 | 63 | {app, kafboy, [{mod_cond, app},{incl_cond,derived},{lib_dir,"../"}]} 64 | ]}. 65 | 66 | {target_dir, "node"}. 67 | 68 | {overlay_vars, "vars.config"}. 69 | 70 | {overlay, [ 71 | {mkdir, "log/sasl"}, 72 | {copy, "files/erl", "\{\{erts_vsn\}\}/bin/erl"}, 73 | {copy, "files/nodetool", "\{\{erts_vsn\}\}/bin/nodetool"}, 74 | 75 | {template, "files/node", "bin/node"}, 76 | {template, "files/node.cmd", "bin/node.cmd"}, 77 | 78 | {copy, "files/start_erl.cmd", "bin/start_erl.cmd"}, 79 | {copy, "files/install_upgrade.escript", "bin/install_upgrade.escript"}, 80 | {template, "files/sys.config", "releases/\{\{rel_vsn\}\}/sys.config"}, 81 | 82 | {template, "files/vm.args", "releases/\{\{rel_vsn\}\}/vm.args"} 83 | ]}. 84 | -------------------------------------------------------------------------------- /rel/vars.config: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/helpshift/kafboy/fae3c4ec358512797ee968c089b4675f95cd61c7/rel/vars.config -------------------------------------------------------------------------------- /src/kafboy.app.src: -------------------------------------------------------------------------------- 1 | %% Feel free to use, reuse and abuse the code in this file. 2 | 3 | {application, kafboy, [ 4 | {description, "http gateway for the ekaf kafka producer"}, 5 | {vsn, "0.6.0"}, 6 | {modules, [kafboy_app,kafboy_sup,kafboy_startup_worker,kafboy_http_handler]}, 7 | {registered, []}, 8 | {applications, [ 9 | kernel, 10 | stdlib, 11 | cowboy, 12 | jsx, 13 | ekaf 14 | ]}, 15 | {mod, {kafboy_app, []}}, 16 | {env, []} 17 | ]}. 18 | -------------------------------------------------------------------------------- /src/kafboy_app.erl: -------------------------------------------------------------------------------- 1 | -module(kafboy_app). 2 | 3 | -behaviour(application). 4 | 5 | %% Application callbacks 6 | -export([start/1, start/2, stop/1]). 7 | 8 | %% includes 9 | -include("kafboy_definitions.hrl"). 10 | -ifdef(TEST). 11 | -include_lib("eunit/include/eunit.hrl"). 12 | -endif. 13 | 14 | -define(setup(F), {setup, fun start/0, fun stop/1, F}). 15 | 16 | %% =================================================================== 17 | %% Application callbacks 18 | %% =================================================================== 19 | %% @private 20 | -spec start(normal | {takeover, node()} | {failover, node()}, 21 | any()) -> {ok, pid()} | {ok, pid()} | 22 | {error, Reason::any()}. 23 | start(_StartType)-> 24 | ?MODULE:start(_StartType,[]). 25 | 26 | start(_StartType, _StartArgs) -> 27 | 28 | case application:get_env(ekaf, ekaf_bootstrap_broker) of 29 | undefined -> 30 | io:format("~n %% KAFBOY WARNING",[]), 31 | io:format("~n %% Please add an app env for the ekaf_bootstrap_broker",[]), 32 | io:format("~n %% {ekaf, [ {ekaf_bootstrap_broker, {\"localhost\", 9091}} ]",[]), 33 | io:format("~n %%",[]), 34 | {stop,{missing,ekaf_bootstrap_broker}}; 35 | _ -> 36 | start_with_ekaf(_StartType, _StartArgs) 37 | end. 38 | 39 | get_config_urls(#kafboy_http{ sync = false, batch = true } = State)-> 40 | [ {Url, kafboy_http_handler, State } || Url <- get_default(kafboy_routes_async_batch, [])]; 41 | get_config_urls(#kafboy_http{ sync = true } = State) -> 42 | [ {Url, kafboy_http_handler, State } || Url <- get_default(kafboy_routes_sync, [])]; 43 | get_config_urls(#kafboy_http{ sync = false } = State) -> 44 | [ {Url, kafboy_http_handler, State } || Url <- get_default(kafboy_routes_async, [])]. 45 | 46 | get_routes(InitState)-> 47 | [InitState#kafboy_http{ sync=false, batch=true}, 48 | InitState#kafboy_http{ sync=true }, 49 | InitState#kafboy_http{ sync=false }]. 50 | 51 | start_with_ekaf(_StartType, _StartArgs)-> 52 | SyncUrl = get_default(kafboy_sync_url,?KAFBOY_DEFAULT_SYNC_URL), 53 | AsyncUrl = get_default(kafboy_async_url,?KAFBOY_DEFAULT_ASYNC_URL), 54 | Port = get_default(kafboy_http_port,?KAFBOY_DEFAULT_HTTP_PORT), 55 | EditJsonCallback = get_default(kafboy_callback_edit_json, undefined), 56 | InitState = #kafboy_http{ callback_edit_json = EditJsonCallback}, 57 | CustomUrls = lists:foldl(fun(TempState,Acc)-> 58 | get_config_urls(TempState) ++ Acc 59 | end,[], get_routes(InitState)), 60 | Dispatch = cowboy_router:compile([{'_', 61 | CustomUrls ++ 62 | [{"/echo_post", kafboy_disco_handler,InitState}, 63 | {"/disco", kafboy_disco_handler,InitState}, 64 | 65 | {SyncUrl, kafboy_http_handler, InitState}, 66 | {AsyncUrl, kafboy_http_handler, InitState#kafboy_http{ sync = false}}, 67 | 68 | {"/batch/"++SyncUrl, kafboy_http_handler, InitState#kafboy_http{ batch=true}}, 69 | {"/batch/"++AsyncUrl, kafboy_http_handler, InitState#kafboy_http{ sync=false, batch=true}} 70 | ]} 71 | ]), 72 | %?INFO_MSG("start with port ~p syncurl ~p asyncurl ~p",[Port,SyncUrl,AsyncUrl]), 73 | {ok, _Ref} = cowboy:start_http(http, 100, [{port, Port}], [ 74 | {env, [{dispatch, Dispatch}]} 75 | ,{backlog, 4196} 76 | ,{raw, 6, 9, <<30:32/native>>} 77 | ,{max_connections, infinity} 78 | ,{max_keepalive, 150} 79 | %,{timeout,1000} 80 | ]), 81 | case kafboy_sup:start_link(_StartArgs) of 82 | {ok,Pid}-> 83 | {ok,Pid}; 84 | _Error -> 85 | {stop,_Error} 86 | end. 87 | 88 | -spec stop(State::any()) -> ok. 89 | stop(_State) -> 90 | ok. 91 | 92 | get_default(Key,Default)-> 93 | case kafboy_startup_worker:read_env(Key) of 94 | {true,Val}-> 95 | Val; 96 | _ -> 97 | Default 98 | end. 99 | -------------------------------------------------------------------------------- /src/kafboy_demo.erl: -------------------------------------------------------------------------------- 1 | -module(kafboy_demo). 2 | 3 | -export([start/0, massage_json/1]). 4 | 5 | -include_lib("kafkamocker/include/kafkamocker.hrl"). 6 | 7 | %% know where to curl to and for which topic 8 | -define(TOPIC, <<"ekaf">>). 9 | -define(KAFBOY_HTTP_PORT, 9903). 10 | 11 | -define(USE_AN_ACTUAL_KAFKA_SERVER, false). 12 | %% when set to false, uses the embedded kafkamocker 13 | 14 | -define(KAFKAMOCKER_PORT, 9908). 15 | 16 | 17 | 18 | massage_json({post, Topic, _Req, Body, CallbackPid})-> 19 | case Body of 20 | [] -> 21 | CallbackPid ! { edit_json_callback, {error, <>}}; 22 | [{<<"hello">>, Foo}] -> 23 | % either reply like this 24 | CallbackPid ! { edit_json_callback, Topic, Foo }; 25 | _ -> 26 | %% i want to first reply 27 | CallbackPid ! { edit_json_callback, {200, <<"{\"ok\":\"fast reply\"}">>}}, 28 | 29 | %% then directly call ekaf, adding this msg to a batch 30 | Final = jsx:encode([{<<"extra">>,<<"true">>}| Body]), 31 | ekaf:produce_async_batched(Topic, Final) 32 | end; 33 | massage_json({error, Status, Message}) -> 34 | io:format("~n some ~p error: ~p",[Status, Message]), 35 | ok. 36 | 37 | start()-> 38 | Topic = ?TOPIC, 39 | [application:load(X) ||X<- [ekaf, kafkamocker, kafboy] ], 40 | 41 | %% mock a kafka server 42 | case ?USE_AN_ACTUAL_KAFKA_SERVER of 43 | true -> 44 | %% use your own actual broker 45 | application:set_env(ekaf, ekaf_bootstrap_broker, {"localhost", 9091}); 46 | _ -> 47 | %% or mock it with an embedded kafkamocker 48 | application:set_env(ekaf, ekaf_bootstrap_broker, {"localhost", ?KAFKAMOCKER_PORT}), 49 | application:set_env(kafkamocker, kafkamocker_callback, kafka_consumer), 50 | application:set_env(kafkamocker, kafkamocker_bootstrap_broker, {"localhost",?KAFKAMOCKER_PORT}), 51 | application:set_env(kafkamocker, kafkamocker_bootstrap_topics, Topic), 52 | [ application:start(App) || App <- [gproc, ranch, kafkamocker]], 53 | kafkamocker_fsm:start_link({metadata, topics_metadata()}) 54 | end, 55 | 56 | %% ekaf is the kafka client that kafboy internally uses 57 | %% more info on ekaf and its configuration at http://github.com/helpshift/ekaf 58 | application:set_env(ekaf, ekaf_per_partition_workers, 1), 59 | application:set_env(ekaf, ekaf_bootstrap_topics, Topic), 60 | application:set_env(ekaf, ekaf_buffer_ttl, 10), 61 | 62 | %% start kafboy 63 | %% every POST gets called into the callback massage_json 64 | %% it must also catch {error, Msg}, allowing you to log it, etc 65 | application:set_env(kafboy, kafboy_http_port, ?KAFBOY_HTTP_PORT), 66 | application:set_env(kafboy, kafboy_callback_edit_json, {?MODULE, massage_json}), 67 | 68 | %% ekaf needs groc, ranch, kafkamocker 69 | %% kafboy needs cowboy, cowlib, jsx 70 | [ application:start(App) || App <- [crypto, gproc, ranch, kafkamocker, ekaf, cowlib, cowboy, jsx, kafboy]]. 71 | 72 | topics_metadata()-> 73 | Topics = [?TOPIC], 74 | #kafkamocker_metadata{ 75 | brokers = [ #kafkamocker_broker{ id = 1, host = "localhost", port = 9908 }], 76 | topics = [ #kafkamocker_topic { name = Topic, 77 | partitions = [ #kafkamocker_partition {id = 0, leader = 1, 78 | replicas = [#kafkamocker_replica{ id = 1 }], 79 | isrs = [#kafkamocker_isr{ id = 1 }] 80 | } 81 | ] 82 | } 83 | || Topic <- Topics]}. 84 | -------------------------------------------------------------------------------- /src/kafboy_disco_handler.erl: -------------------------------------------------------------------------------- 1 | %% @doc /disco should return the current node 2 | %% @end 3 | -module(kafboy_disco_handler). 4 | 5 | -include("kafboy_definitions.hrl"). 6 | 7 | -export([init/3, 8 | handle/2, 9 | handle_method/3, 10 | terminate/3]). 11 | 12 | init(_Transport, Req, State) -> 13 | {ok, Req, State}. 14 | 15 | handle(Req,State)-> 16 | {Method, Req1} = cowboy_req:method(Req), 17 | {ok, Req2} = handle_method(Method, Req1, State), 18 | %%TODO common signature authentication 19 | {ok, Req2, State}. 20 | 21 | handle_method(<<"POST">>, Req, _State)-> 22 | Body = case cowboy_req:has_body(Req) of 23 | true -> 24 | {ok, PostVals, _} = cowboy_req:body_qs(Req), 25 | PostVals; 26 | _ -> 27 | [] 28 | end, 29 | Json = jsx:encode(Body), 30 | cowboy_req:reply(200, [ 31 | {<<"content-type">>, <<"application/json; charset=utf-8">>} 32 | ], Json, Req); 33 | handle_method(_, Req, _State)-> 34 | cowboy_req:reply(200, [ 35 | {<<"content-type">>, <<"text/plain; charset=utf-8">>} 36 | ], atom_to_list(node()), Req). 37 | 38 | terminate(_Reason, _Req, _State) -> 39 | ok. 40 | -------------------------------------------------------------------------------- /src/kafboy_http_handler.erl: -------------------------------------------------------------------------------- 1 | %% @doc 2 | %% web server that allows editing the json before sending to kafka 3 | %% @end 4 | -module(kafboy_http_handler). 5 | -behaviour(cowboy_loop_handler). 6 | -export([init/3]). 7 | -export([handle/2, handle_method/3, handle_url/5, info/3]). 8 | -export([terminate/3]). 9 | -define(DEFAULT_HEADER,[{<<"content-type">>, <<"application/json; charset=utf-8">>}]). 10 | -export([test_callback_edit_json/2]). 11 | 12 | %% includes 13 | -include("kafboy_definitions.hrl"). 14 | -ifdef(TEST). 15 | -include_lib("eunit/include/eunit.hrl"). 16 | -endif. 17 | 18 | test_callback_edit_json(PropList,Callback)-> 19 | Callback(PropList). 20 | 21 | init(_Transport, Req, InitState) -> 22 | {Method, _} = cowboy_req:method(Req), 23 | handle_method(Method, Req, InitState). 24 | 25 | handle_method(<<"GET">>, Req, State)-> 26 | {ok, Req1, Next} = fail(<<"POST exp">>, Req, State), 27 | {shutdown, Req1, Next}; 28 | handle_method(<<"POST">>, Req, #kafboy_http{ callback_edit_json = {M,F}} = State)-> 29 | Self = self(), 30 | % NOTE: the cowboy_req:body_qs, and read buffer should be bound in the same proess 31 | % filed an issue, got an explanation at 32 | % https://github.com/extend/cowboy/issues/718 33 | case cowboy_req:body_qs(Req) of 34 | {ok, Body, _} -> 35 | {Topic, _} = cowboy_req:binding(topic, Req), 36 | M:F({post, Topic, Req, Body, Self}), 37 | {loop, Req, State}; 38 | _E -> 39 | {ok, Req1, Next} = fail(<<"no_body">>, Req, State), 40 | {shutdown, Req1, Next} 41 | end; 42 | %% No callback 43 | handle_method(_Method, Req, State)-> 44 | ReqBody = cowboy_req:body_qs(Req), 45 | Self = self(), 46 | case 47 | ReqBody 48 | of 49 | {ok, Body, Req1} -> 50 | {Topic, _} = cowboy_req:binding(topic, Req1), 51 | Self ! {edit_json_callback, Topic, Body}; 52 | _ -> 53 | Self ! {edit_json_callback, {error,<<"no_body">>}} 54 | end, 55 | {loop, Req, State, 500}. 56 | 57 | handle(Req, {error,queue_full}=State)-> 58 | fail(<<"system queue full">>, Req, State); 59 | handle(Req, {error,overload}=State)-> 60 | fail(<<"system overload">>, Req, State); 61 | handle(Req, {error,Msg}=State)-> 62 | fail(Msg, Req, State); 63 | handle(Req, State)-> 64 | {Method, _} = cowboy_req:method(Req), 65 | handle_method(Method, Req, State). 66 | 67 | fail({error,Msg}, Req, State) -> 68 | fail(Msg, Req, State); 69 | fail(Msg, Req, #kafboy_http{ callback_edit_json = {M,F}} = _State) when is_binary(Msg) -> 70 | M:F({error, 400, Msg}), 71 | {ok,Req1} = cowboy_req:reply(400,?DEFAULT_HEADER, <<"{\"error\":\"",Msg/binary,"\"}">>, Req), 72 | {ok, Req1, undefined}; 73 | fail(_Msg, Req, #kafboy_http{ callback_edit_json = {M,F}} = _State) -> 74 | M:F({error, 400, _Msg}), 75 | Req1 = cowboy_req:reply(400, ?DEFAULT_HEADER, <<"{\"error\":\"unknown\"">>, Req), 76 | {ok, Req1, undefined}. 77 | 78 | info({edit_json_callback,{200,Message}}, Req, _State)-> 79 | reply(Message,Req); 80 | info({edit_json_callback,{error,_}=Error}, Req, _State)-> 81 | fail(Error,Req,_State); 82 | info({edit_json_callback,[]}, Req, _State)-> 83 | fail({error,<<"empty">>},Req, _State); 84 | info({edit_json_callback, Topic, Body}, Req, State)-> 85 | %% Produce to topic 86 | %% See bosky101/ekaf for what happens under the hood 87 | %% connection pooling, batched writes, and so on 88 | case cowboy_req:path(Req) of 89 | {Url,_} -> 90 | info({edit_json_callback, Topic, Body, Url}, Req, State); 91 | _Path -> 92 | ?INFO_MSG("info/3 dont know what to do with ~p",[_Path]), 93 | fail(<<"invalid">>,Req,State) 94 | end; 95 | info({edit_json_callback, Topic, Body, Url}, Req, State)-> 96 | handle_url(Url, Topic, Body, Req, State); 97 | 98 | info(_Message, Req, State) -> 99 | fail({error,<<"unexp">>}, Req, State). 100 | 101 | handle_url(_Url, _Topic, {error,Reason}, Req, State)-> 102 | fail(Reason, Req, State); 103 | handle_url(Url, Topic, Message, Req, State)-> 104 | case Url of 105 | <<"/batch/async/",_/binary>> -> 106 | R = reply(<<"{\"ok\":1}">>,Req), 107 | kafboy_producer:async_batch(Topic, Message, []), 108 | R; 109 | <<"/batch/sync/",_/binary>> -> 110 | kafboy_producer:sync_batch(Topic, Message, []); 111 | <<"/async/",_/binary>> -> 112 | R = reply(<<"{\"ok\":1}">>,Req), 113 | kafboy_producer:async(Topic, Message, []), 114 | R; 115 | <<"/sync/",_/binary>> -> 116 | ProduceResponse = kafboy_producer:sync(Topic, Message, []), 117 | %% in case you want to create your own list, see the below function 118 | ResponseList = ekaf_lib:response_to_proplist(ProduceResponse), 119 | ResponseJson = jsx:encode(ResponseList), 120 | reply(ResponseJson,Req); 121 | _Path -> 122 | ?INFO_MSG("handle_url/5: dont know what to do with ~p",[_Path]), 123 | fail({error,<<"invalid">>},Req, State) 124 | end. 125 | 126 | terminate(_Reason, _Req, _State) -> 127 | ok. 128 | 129 | reply(Json,Req) when is_list(Json)-> 130 | reply(ekaf_utils:atob(Json), Req); 131 | reply(Json,Req)-> 132 | {ok,Req1} = cowboy_req:reply(200,[{<<"content-type">>, <<"application/json; charset=utf-8">>}], Json, Req), 133 | {ok, Req1, undefined}. 134 | -------------------------------------------------------------------------------- /src/kafboy_logger.erl: -------------------------------------------------------------------------------- 1 | -module(kafboy_logger). 2 | 3 | -compile([export_all]). 4 | 5 | %% includes 6 | -include("kafboy_definitions.hrl"). 7 | -ifdef(TEST). 8 | -include_lib("eunit/include/eunit.hrl"). 9 | -endif. 10 | 11 | log(Format,Args)-> 12 | ?INFO_MSG(Format,Args). 13 | 14 | info_msg(_Mod,_Line,Format,Args)-> 15 | %not using lager 16 | %lager:info(Format,Args). 17 | ?INFO_MSG(Format,Args). 18 | 19 | enabled_modules()-> 20 | [ 21 | %add modules you want traced here: eg 22 | %kafboy_http_handler 23 | ]. 24 | 25 | enabled_functions()-> 26 | ['_']. 27 | 28 | enable_trace()-> 29 | dbg:start(), 30 | dbg:tracer(), 31 | dbg:p(all,[c]). 32 | 33 | enable_module(Mod)-> 34 | gen_server:call(kafboy_startup_worker,{log,enable_module,Mod}). 35 | 36 | disable_module(Mod)-> 37 | gen_server:call(kafboy_startup_worker,{log,disable_module,Mod}). 38 | -------------------------------------------------------------------------------- /src/kafboy_producer.erl: -------------------------------------------------------------------------------- 1 | %%%------------------------------------------------------------------- 2 | %%% @doc produce to kafka 3 | %%% @end 4 | %%%------------------------------------------------------------------- 5 | -module(kafboy_producer). 6 | 7 | -include("kafboy_definitions.hrl"). 8 | 9 | -export([sync/3,async/3, 10 | sync_batch/3, async_batch/3]). 11 | 12 | sync(Topic,Data,_Opts) -> 13 | ekaf:produce_sync(Topic, Data). 14 | 15 | sync_batch(Topic,Data,_Opts)-> 16 | ekaf:produce_sync_batched(Topic, Data). 17 | 18 | async(Topic,Data,_Opts) -> 19 | ekaf:produce_async(Topic, Data). 20 | 21 | async_batch(Topic,Data,_Opts)-> 22 | ekaf:produce_async_batched(Topic, Data). 23 | -------------------------------------------------------------------------------- /src/kafboy_startup_worker.erl: -------------------------------------------------------------------------------- 1 | -module(kafboy_startup_worker). 2 | 3 | -behaviour(gen_server). 4 | 5 | %% API 6 | -export([ 7 | get_child_spec/0, get_child_spec/1, read_env/1, 8 | cookie_setup/0, 9 | kickoff/1, 10 | kickoff_master/1,kickoff_master_new_node/1,kickoff_master_restart/1, 11 | kickoff_slave/1, 12 | slave_added/1, 13 | setup_metrics/0, later/0, bootup/0, 14 | log/4, info/0, profile_modules/1, get_bin_path/0 15 | ]). 16 | %% ------------------------------------------------------------------ 17 | %% gen_server Function Exports 18 | %% ------------------------------------------------------------------ 19 | -export([start/0, start_link/1]). 20 | -export([init/1, handle_call/3, handle_cast/2, handle_info/2, 21 | terminate/2, code_change/3]). 22 | 23 | %% includes 24 | -include("kafboy_definitions.hrl"). 25 | -ifdef(TEST). 26 | -include_lib("eunit/include/eunit.hrl"). 27 | -endif. 28 | 29 | 30 | %% Constants 31 | -define(SERVER, ?MODULE). 32 | -define(WORKER_PREFIX,"kafboy_startup_"). 33 | -define(DISABLED,false). 34 | -define(DEBUG,false). 35 | -define(MAX_WORKERS,1). 36 | 37 | %%%=================================================================== 38 | %%% API 39 | %%%=================================================================== 40 | log(Mod,Line,Format, Args)-> 41 | gen_server:cast(?SERVER,{log,Mod,Line,Format,Args}). 42 | 43 | info()-> 44 | gen_server:call(?SERVER,info). 45 | 46 | profile_modules(_Args)-> 47 | application:start(runtime_tools), 48 | 49 | case ?MODULE:read_env(kafboy_profiling_apps) of 50 | {true,ProfilingApps} -> 51 | [ application:start(ProfilingApp) || ProfilingApp <- ProfilingApps ]; 52 | _E -> 53 | ok 54 | end, 55 | 56 | %application:start(statsderl), 57 | 58 | ?MODULE:setup_metrics(), 59 | 60 | case ?MODULE:read_env(kafboy_enable_trace) of 61 | {_,true} -> 62 | kafboy_logger:enable_trace(), 63 | lists:foldl(fun(ModToAdd,Acc)-> 64 | [ 65 | dbg:tpl({ModToAdd,FuncToAdd,'_'},[]) 66 | || FuncToAdd <- kafboy_logger:enabled_functions() 67 | ], 68 | 69 | sets:add_element(ModToAdd,Acc) 70 | end, sets:new(), kafboy_logger:enabled_modules() ); 71 | _ -> 72 | sets:new() 73 | end. 74 | 75 | %%-------------------------------------------------------------------- 76 | %% @doc 77 | %% Starts the server 78 | %% 79 | %% @spec start_link() -> {ok, Pid} | ignore | {error, Error} 80 | %% @end 81 | %%-------------------------------------------------------------------- 82 | start()-> 83 | start_link([]). 84 | 85 | start_link(Args) -> 86 | gen_server:start_link({local,?SERVER}, ?MODULE, Args,[]). 87 | 88 | %% get_child_spec() -> SupervisorTreeSpecifications 89 | %% @doc The supervisor calls each worker's specification from the worker itself 90 | %% This makes it easier to manage different workers, and placing the logic away 91 | %% from the supervisor itself. In this case there is no need to explicitly start 92 | %% a child worker. 93 | get_child_spec()-> 94 | ?MODULE:get_child_spec(?MAX_WORKERS). 95 | 96 | get_child_spec(_NoOfWorkers)-> 97 | Worker = {kafboy_startup_worker, {kafboy_startup_worker, start_link, [[read_env(trace),read_env(debug), read_env(fresh)]] }, temporary, 2000, worker, dynamic}, 98 | [Worker]. 99 | 100 | read_env(Field) -> 101 | Got = application:get_env(kafboy,Field), 102 | case Got of 103 | {ok,Val} -> 104 | {true,Val}; 105 | _E -> 106 | {false,Field} 107 | end. 108 | 109 | %% ------------------------------------------------------------------ 110 | %% gen_server Function Definitions 111 | %% ------------------------------------------------------------------ 112 | init(Args) -> 113 | %% Note: don't call ?INFO_MSG until here 114 | Set = ?MODULE:profile_modules(Args), 115 | 116 | %% your {kafboy,[]} app config can decide whether to trace calls 117 | State = #kafboy_startup{logging=#kafboy_enabled{modules=Set}}, 118 | 119 | ?MODULE:kickoff(Args), 120 | 121 | {ok, State}. 122 | 123 | kickoff(Args)-> 124 | 125 | %% deps 126 | %%A = sha2:start(), 127 | A = ok, 128 | 129 | %% security 130 | B = ?MODULE:cookie_setup(), 131 | 132 | %% master vs slave 133 | C = case ?MODULE:read_env(kafboy_master) of 134 | {_,true} -> 135 | kickoff_master(Args); 136 | _ -> 137 | kickoff_slave(Args) 138 | end, 139 | 140 | {ok,[ 141 | {sha,A}, 142 | {cookie,B}, 143 | {kickoff,C} 144 | ]}. 145 | 146 | kickoff_master(Args)-> 147 | %% Boilerplate for future distributed work. 148 | %% For now just joins cluster 149 | case ?MODULE:read_env(kafboy_fresh) of 150 | {_,true} -> 151 | ?INFO_MSG("asked to kickoff new node since value of new in ~p",[Args]), 152 | ?MODULE:kickoff_master_new_node(Args), 153 | ok; 154 | NotTrue-> 155 | ?INFO_MSG("asked to kickoff restart since value of new in ~p is ~p, ~nknown nodes are ~p",[Args,NotTrue,nodes()]), 156 | ?MODULE:kickoff_master_restart(Args), 157 | ok 158 | end, 159 | 160 | later_master(). 161 | 162 | kickoff_master_new_node(_Args)-> 163 | ?INFO_MSG("done kickoff new node",[]), 164 | ok. 165 | 166 | kickoff_master_restart(_Args)-> 167 | ?INFO_MSG("going to call kafboy_mgr_db:init/0",[]), 168 | ok. 169 | 170 | kickoff_slave(_Args)-> 171 | %% ask master to allow location transparent access 172 | %% of tables to this node 173 | case ?MODULE:read_env(kafboy_load_balancer) of 174 | {true,DiscoUrl} -> 175 | case httpc:request(get, {DiscoUrl, []}, [ 176 | %% TODO: ssl support ? 177 | %% {ssl,[{verify,verify_peer}]} 178 | ], [{sync, true}]) of 179 | {ok,{_Status, _Headers, Result}} -> 180 | MasterNode = list_to_atom(Result), 181 | ping_master(MasterNode), 182 | ok; 183 | _E -> 184 | ?INFO_MSG("Cant ping master since didnt get expected node as result for /disco => ~n~p",[_E]) 185 | end; 186 | 187 | _ -> 188 | ok 189 | end, 190 | later_slave(). 191 | 192 | ping_master(MasterNode)-> 193 | SelfNode = node(), 194 | 195 | A = net_adm:ping(MasterNode), 196 | 197 | ?INFO_MSG("Slave ~p pinging master ~p gave ~p",[SelfNode,MasterNode,A]), 198 | 199 | A. 200 | 201 | 202 | slave_added(_SlaveNode)-> 203 | ok. 204 | 205 | cookie_setup()-> 206 | case ?MODULE:read_env(kafboy_set_cookie) of 207 | {true,CookieVal} -> 208 | erlang:set_cookie(node(),CookieVal); 209 | _ -> 210 | ok 211 | end. 212 | 213 | handle_call({log,enable_module,Mod},_From, State) -> 214 | PresentSet = (State#kafboy_startup.logging)#kafboy_enabled.modules, 215 | NextSet = sets:add_element(Mod,PresentSet), 216 | NextLogging = #kafboy_enabled{modules=NextSet}, 217 | dbg:tpl({Mod,'_','_'},[]), 218 | NextState = State#kafboy_startup{logging=NextLogging}, 219 | {reply, ok, NextState}; 220 | 221 | handle_call({log,disable_module,Mod},_From, State)-> 222 | PresentSet = (State#kafboy_startup.logging)#kafboy_enabled.modules, 223 | NextSet = sets:del_element(Mod,PresentSet), 224 | dbg:ctpl(Mod,'_','_'), 225 | NextLogging = #kafboy_enabled{modules=NextSet}, 226 | NextState = State#kafboy_startup{logging=NextLogging}, 227 | {reply, ok, NextState}; 228 | 229 | handle_call({trace_enable_module,_Mod},_From, State)-> 230 | {reply, false, State}; 231 | 232 | handle_call(_Msg,_From, State)-> 233 | io:format("~p:handle_info/3 unknown guard for ~p",[?MODULE,_Msg]), 234 | {reply, State, State}. 235 | 236 | %% Cast 237 | handle_cast({log,Mod,Line,Format,Args}=_Msg,State)-> 238 | %%io:format("~p:handle_cast/2 log ~p",[?MODULE,_Msg]), 239 | Mods = (State#kafboy_startup.logging)#kafboy_enabled.modules, %%kafboy_logger:enabled_modules() 240 | case sets:is_element(Mod,Mods) of 241 | true -> 242 | kafboy_logger:info_msg(Mod,Line,Format, Args); 243 | _ -> 244 | ok 245 | end, 246 | {noreply,State}; 247 | 248 | handle_cast(_Msg, State) -> 249 | io:format("~p:handle_cast/2 unknown guard for ~p",[?MODULE,_Msg]), 250 | {noreply, State}. 251 | 252 | handle_info(_Info, State) -> 253 | io:format("~p:handle_info/2 unknown guard for ~p",[?MODULE,_Info]), 254 | {noreply, State}. 255 | 256 | terminate(_Reason, _State) -> 257 | ok. 258 | 259 | code_change(_OldVsn, State, _Extra) -> 260 | {ok, State}. 261 | 262 | %% ------------------------------------------------------------------ 263 | %% Internal Function Definitions 264 | %% ------------------------------------------------------------------ 265 | 266 | get_bin_path() -> 267 | case os:getenv("BIN_PATH") of 268 | false -> 269 | case code:priv_dir(kafboy) of 270 | {error, _} -> 271 | "."; 272 | Path -> 273 | filename:join([Path, "bin"]) 274 | end; 275 | Path -> 276 | Path 277 | end. 278 | 279 | later()-> 280 | case ?KAFBOY_AUTOSTART of 281 | true -> 282 | spawn(fun()-> 283 | receive _X -> ok 284 | after 5000 -> 285 | ?MODULE:bootup() 286 | end 287 | end); 288 | _ -> 289 | ?INFO_MSG("not auto-starting previously started acccounts",[]) 290 | end. 291 | 292 | later_master()-> 293 | later(), 294 | ok. 295 | 296 | later_slave()-> 297 | later(), 298 | ok. 299 | 300 | bootup()-> 301 | ok. 302 | 303 | setup_metrics()-> 304 | Metrics = metrics(), 305 | 306 | lists:map(fun(X) when is_binary(X) -> 307 | %folsom_metrics:new_histogram(<<"slide.",X/binary>>,slide_uniform, {60,1028}), 308 | %folsom_metrics:new_counter(<<"counter.",X/binary>>);(X) -> 309 | ok; 310 | (X)-> 311 | ?INFO_MSG("dont know what to do with ~p",[X]) 312 | end, Metrics). 313 | 314 | metrics()-> 315 | []. 316 | -------------------------------------------------------------------------------- /src/kafboy_sup.erl: -------------------------------------------------------------------------------- 1 | -module(kafboy_sup). 2 | 3 | -behaviour(supervisor). 4 | 5 | %% API 6 | -export([start_link/0,start_link/1,start_child/2,kill_child/1]). 7 | %% fault-tolderance/supervisor related 8 | -export([ get_child_spec/1 ]). 9 | %% Supervisor callbacks 10 | -export([init/1,restart_c/1]). 11 | 12 | %% includes 13 | -include("kafboy_definitions.hrl"). 14 | -ifdef(TEST). 15 | -include_lib("eunit/include/eunit.hrl"). 16 | -endif. 17 | 18 | %% Constants 19 | -define(SERVER, ?MODULE). 20 | -define(CHILD(I, Type), {I, {I, start_link, []}, permanent, 5000, Type, [I]}). 21 | -define(WORKER_PREFIX,"kafboy_"). 22 | -define(PG_PREFIX, "kafboy_pg_"). 23 | -define(RECHECK_DELAY, 10). 24 | -define(DISABLED,false). 25 | -define(DEBUG,false). 26 | -define(MAX_WORKERS,2). 27 | %% =================================================================== 28 | %% API functions 29 | %% =================================================================== 30 | -spec start_link() -> {ok, pid()} | any(). 31 | start_link()-> 32 | ?MODULE:start_link([]). 33 | 34 | -spec start_link(State::any()) -> {ok, pid()} | any(). 35 | start_link(_Args) -> 36 | supervisor:start_link({local, ?SERVER}, ?MODULE, _Args). 37 | 38 | %% get_child_spec() -> SupervisorTreeSpecifications 39 | %% @doc The supervisor calls each worker's specification from the worker itself 40 | %% This makes it easier to manage different workers, and placing the logic away 41 | %% from the supervisor itself. In this case there is no need to explicitly start 42 | %% a child worker. 43 | get_child_spec(NoOfWorkers)-> 44 | Workers = lists:map(fun( X )-> 45 | WorkerId = list_to_atom(?WORKER_PREFIX++ integer_to_list(X)), 46 | {WorkerId, {?MODULE, start_link, [{WorkerId}] }, 47 | permanent, 2000, worker, [?MODULE]} 48 | end, lists:seq(1,NoOfWorkers)), 49 | Workers. 50 | 51 | %%==================================================================== 52 | %% Supervisor callbacks 53 | %%==================================================================== 54 | %%-------------------------------------------------------------------- 55 | %% Func: init(Args) -> {ok, {SupFlags, [ChildSpec]}} | 56 | %% ignore | 57 | %% {error, Reason} 58 | %% Description: Whenever a supervisor is started using 59 | %% supervisor:start_link/[2,3], this function is called by the new process 60 | %% to find out about restart strategy, maximum restart frequency and child 61 | %% specifications. 62 | %%-------------------------------------------------------------------- 63 | %% @private 64 | -spec init(list()) -> {ok, _}. 65 | init(InitArgs) -> 66 | Workers = case InitArgs of 67 | _ -> 68 | lists:flatten ( 69 | kafboy_startup_worker:get_child_spec(1) 70 | %++ kafboy_mgr_db:get_child_spec(1) 71 | %++ kafboy_proxy:get_child_spec(1) 72 | %++ kafboy_http_api:get_child_spec(1) 73 | ) 74 | end, 75 | 76 | {ok, {{ one_for_one, 10, 10}, 77 | Workers } 78 | }. 79 | 80 | %%==================================================================== 81 | %% Internal functions 82 | %%==================================================================== 83 | start_child(Module,ChildSpec) when is_tuple(ChildSpec) -> 84 | supervisor:start_child(Module,ChildSpec); 85 | 86 | start_child(Module,InitArgs) -> 87 | case Module:get_child_spec(InitArgs) of 88 | [] -> 89 | ?INFO_MSG("not starting ~p since []",[InitArgs]), 90 | ok; 91 | [ChildSpec] -> 92 | start_child(Module,ChildSpec); 93 | _E -> 94 | error_logger:info_msg("~n ~p unexp when start_child. got ~p",[Module,_E]), 95 | error 96 | end. 97 | 98 | kill_child(SID) -> 99 | GroupName = "kafboy_worker_"++SID, 100 | PID = pg2:get_closest_pid(GroupName), 101 | {ok, {_,ChildSpecs}} = ?MODULE:init(SID), 102 | [ supervisor:terminate_child(PID,ChildSpec) || ChildSpec <- ChildSpecs]. 103 | 104 | restart_c(SID) -> 105 | GroupName = "kafboy_worker_"++SID, 106 | PID = pg2:get_closest_pid(GroupName), 107 | {ok, {_,ChildSpecs}} = ?MODULE:init(SID), 108 | [ supervisor:restart_child(PID,ChildSpec) || ChildSpec <- ChildSpecs]. 109 | --------------------------------------------------------------------------------