├── .gitignore ├── Json VS Protobuf ├── CSharp │ ├── .gitignore │ ├── ICSharpCode.SharpZipLib.dll │ ├── LitJson.dll │ ├── Makefile │ ├── PackBenchmark.cs │ ├── ProtocolDefine.dll │ ├── ProtocolSerializer.dll │ ├── UnpackBenchmark.cs │ └── protobuf-net.dll ├── Erlang │ ├── .gitignore │ ├── benchmark │ ├── include │ │ ├── gpb.hrl │ │ └── protocol.hrl │ ├── rebar.config │ └── src │ │ ├── json_vs_proto.app.src │ │ ├── json_vs_proto.erl │ │ └── protocol.erl ├── Python │ ├── pack.py │ ├── protocol_pb2.py │ └── unpack.py ├── README.md ├── chart.png ├── data.json ├── data.json.gz ├── data.pb └── protocol.proto └── c++ ├── .gitignore ├── README.md ├── context.cpp ├── coro.h ├── coro_basic.cpp ├── coro_echo_server.cpp ├── coro_echo_server.h ├── coro_event_and_queue.cpp └── coroutine_symmetric.cpp /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | *.swp 3 | *.bak 4 | *.beam 5 | *.exe 6 | -------------------------------------------------------------------------------- /Json VS Protobuf/CSharp/.gitignore: -------------------------------------------------------------------------------- 1 | *.exe 2 | -------------------------------------------------------------------------------- /Json VS Protobuf/CSharp/ICSharpCode.SharpZipLib.dll: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yueyoum/playground/1b90abc14b306f99c7f2f70e629cb969fbebf26a/Json VS Protobuf/CSharp/ICSharpCode.SharpZipLib.dll -------------------------------------------------------------------------------- /Json VS Protobuf/CSharp/LitJson.dll: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yueyoum/playground/1b90abc14b306f99c7f2f70e629cb969fbebf26a/Json VS Protobuf/CSharp/LitJson.dll -------------------------------------------------------------------------------- /Json VS Protobuf/CSharp/Makefile: -------------------------------------------------------------------------------- 1 | pack.exe: PackBenchmark.cs 2 | gmcs -out:pack.exe PackBenchmark.cs \ 3 | -r:LitJson.dll \ 4 | -r:ICSharpCode.SharpZipLib.dll \ 5 | -r:protobuf-net.dll \ 6 | -r:ProtocolDefine.dll \ 7 | -r:ProtocolSerializer.dll 8 | 9 | unpack.exe: UnpackBenchmark.cs 10 | gmcs -out:unpack.exe UnpackBenchmark.cs \ 11 | -r:LitJson.dll \ 12 | -r:ICSharpCode.SharpZipLib.dll \ 13 | -r:protobuf-net.dll \ 14 | -r:ProtocolDefine.dll \ 15 | -r:ProtocolSerializer.dll 16 | 17 | .PHONY: clean 18 | clean: 19 | -rm -f *.exe 20 | -------------------------------------------------------------------------------- /Json VS Protobuf/CSharp/PackBenchmark.cs: -------------------------------------------------------------------------------- 1 | using System; 2 | using System.IO; 3 | using System.Text; 4 | using System.Collections.Generic; 5 | using System.Diagnostics; 6 | 7 | using LitJson; 8 | using ICSharpCode.SharpZipLib.Core; 9 | using ICSharpCode.SharpZipLib.GZip; 10 | 11 | using MyProject.Protocol; 12 | using MyProject.Protocol.Define; 13 | 14 | 15 | class LogEntry 16 | { 17 | public int Id {get; set;} 18 | public string Content {get; set;} 19 | public int Status {get; set;} 20 | public int Times {get; set;} 21 | } 22 | 23 | 24 | 25 | static class Program 26 | { 27 | public static int[] tags = {0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19}; 28 | public static List logs; 29 | 30 | public static int logAmount; 31 | public static int benchmarkTimes; 32 | 33 | 34 | public static void Main(string[] args) 35 | { 36 | try 37 | { 38 | logAmount = Convert.ToInt32(args[0]); 39 | benchmarkTimes = Convert.ToInt32(args[1]); 40 | if (logAmount < 0 || benchmarkTimes <= 0) 41 | { 42 | throw new Exception("wrong arguments"); 43 | } 44 | } 45 | catch 46 | { 47 | Console.WriteLine("usage: ./pack.exe [LOG AMOUNT] [BENCHMARK TIMES]"); 48 | return; 49 | } 50 | 51 | logs = new List(); 52 | for(var i=0; i 0) 93 | { 94 | ms.Write(buffer, 0, count); 95 | } 96 | } 97 | 98 | return ms.ToArray(); 99 | } 100 | } 101 | 102 | 103 | public static void UnPackPb() 104 | { 105 | var data = GetData("data.pb"); 106 | var stream = new MemoryStream(data); 107 | 108 | var ser = new ProtocolSerializer(); 109 | var person = ser.Deserialize(stream, null, typeof(Person)) as Person; 110 | stream.Close(); 111 | 112 | // Console.WriteLine("{0}, {1}, {2}, {3}", person.id, person.name, person.tags.Count, person.logs.Count); 113 | } 114 | 115 | public static void UnPackJson() 116 | { 117 | var data = GetData("data.json"); 118 | var json = Encoding.UTF8.GetString(data); 119 | 120 | ConvertJsonType(json); 121 | } 122 | 123 | public static void UnPackJsonGzip() 124 | { 125 | var data = GetData("data.json.gz"); 126 | var inputStream = new MemoryStream(data); 127 | var outStream = new MemoryStream(); 128 | 129 | var zs = new GZipInputStream(inputStream); 130 | 131 | int count = 0; 132 | var buffer = new byte[2048]; 133 | while((count = zs.Read(buffer, 0, buffer.Length)) > 0) 134 | { 135 | outStream.Write(buffer, 0, count); 136 | } 137 | 138 | var decodedData = outStream.ToArray(); 139 | var json = Encoding.UTF8.GetString(decodedData); 140 | 141 | outStream.Close(); 142 | inputStream.Close(); 143 | 144 | ConvertJsonType(json); 145 | } 146 | 147 | 148 | 149 | public static void ConvertJsonType(string json) 150 | { 151 | JsonData obj = JsonMapper.ToObject(json); 152 | int id = (int)obj["id"]; 153 | string name = (string)obj["name"]; 154 | 155 | // Console.WriteLine("Id: {0}, Name: {1}", id, name); 156 | 157 | // Console.Write("Tags: "); 158 | foreach(JsonData _tag in obj["tags"]) 159 | { 160 | int tag = (int)_tag; 161 | // Console.Write("{0}, ", tag); 162 | } 163 | 164 | // Console.WriteLine(); 165 | 166 | foreach(JsonData _log in obj["logs"]) 167 | { 168 | int logId = (int)_log["id"]; 169 | string content = (string)_log["content"]; 170 | int status = (int)_log["status"]; 171 | int times = (int)_log["times"]; 172 | // Console.WriteLine("{0}, {1}, {2}, {3}", logId, content, status, times); 173 | } 174 | } 175 | } 176 | -------------------------------------------------------------------------------- /Json VS Protobuf/CSharp/protobuf-net.dll: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yueyoum/playground/1b90abc14b306f99c7f2f70e629cb969fbebf26a/Json VS Protobuf/CSharp/protobuf-net.dll -------------------------------------------------------------------------------- /Json VS Protobuf/Erlang/.gitignore: -------------------------------------------------------------------------------- 1 | deps 2 | ebin 3 | .rebar -------------------------------------------------------------------------------- /Json VS Protobuf/Erlang/benchmark: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env escript 2 | %% -*- erlang -*- 3 | 4 | usage() -> 5 | io:format("usage: ./benchmark pack [LOG AMOUNT] [BENCHMARK TIMES]~n"), 6 | io:format(" ./benchmark unpack [BENCHMARK TIMES]~n"), 7 | halt(1). 8 | 9 | setup() -> 10 | SelfPath = filename:dirname(escript:script_name()), 11 | EbinPath = filename:join([SelfPath, "ebin"]), 12 | DepsEbinPath = filelib:wildcard(filename:join([SelfPath, "deps/*/ebin"])), 13 | 14 | Paths = [EbinPath | DepsEbinPath], 15 | 16 | lists:foreach(fun(P) -> code:add_patha(P) end, Paths). 17 | 18 | 19 | main(["pack", LogAmount, BenchmarkTimes]) -> 20 | [Amount, Times] = 21 | try 22 | [list_to_integer(LogAmount), list_to_integer(BenchmarkTimes)] 23 | catch 24 | _:_ -> 25 | usage(), 26 | [0, 0] 27 | end, 28 | 29 | setup(), 30 | pack_benchmark(Amount, Times); 31 | 32 | 33 | main(["unpack", BenchmarkTimes]) -> 34 | Times = 35 | try 36 | list_to_integer(BenchmarkTimes) 37 | catch 38 | _:_ -> 39 | usage(), 40 | 0 41 | end, 42 | 43 | setup(), 44 | unpack_benchmark(Times); 45 | 46 | 47 | main(_) -> 48 | usage(). 49 | 50 | 51 | pack_benchmark(Amount, Times) -> 52 | json_vs_proto:setup(Amount), 53 | io:format("LogAmount = ~p~n", [Amount]), 54 | io:format("Protobuf Size : ~p~n", [size(json_vs_proto:pack_pb(Amount))]), 55 | io:format("Json Size : ~p~n", [size(json_vs_proto:pack_json(Amount))]), 56 | io:format("Json Gzip Size : ~p~n~n", [size(json_vs_proto:pack_json_gzip(Amount))]), 57 | 58 | io:format("BenchmarkTimes = ~p~n", [Times]), 59 | io:format("Protobuf Seconds : ~p~n", [pack_tc(pack_pb, Amount, Times)]), 60 | io:format("Json Seconds : ~p~n", [pack_tc(pack_json, Amount, Times)]), 61 | io:format("Json Gzip Seconds : ~p~n", [pack_tc(pack_json_gzip, Amount, Times)]). 62 | 63 | 64 | unpack_benchmark(Times) -> 65 | json_vs_proto:setup(), 66 | io:format("BenchmarkTimes = ~p~n", [Times]), 67 | io:format("Protobuf Seconds : ~p~n", [unpack_tc(unpack_pb, Times)]), 68 | io:format("Json Seconds : ~p~n", [unpack_tc(unpack_json, Times)]), 69 | io:format("Json Gzip Seconds : ~p~n", [unpack_tc(unpack_json_gzip, Times)]). 70 | 71 | 72 | pack_tc(Func, Amount, Times) -> 73 | Args = [fun(_) -> json_vs_proto:Func(Amount) end, lists:seq(1, Times)], 74 | {Time, _} = timer:tc(lists, foreach, Args), 75 | Time / 1000000. 76 | 77 | 78 | unpack_tc(Func, Times)-> 79 | Args = [fun(_) -> json_vs_proto:Func() end, lists:seq(1, Times)], 80 | {Time, _} = timer:tc(lists, foreach, Args), 81 | Time / 1000000. 82 | -------------------------------------------------------------------------------- /Json VS Protobuf/Erlang/include/gpb.hrl: -------------------------------------------------------------------------------- 1 | -ifndef(gpb_hrl). 2 | -define(gpb_hrl, true). 3 | 4 | -type gpb_field_type() :: %% Erlang type Comment 5 | 'int32' | 'int64' % integer() variable-length encoded 6 | | 'uint32' | 'uint64' % integer() variable-length encoded 7 | | 'sint32' | 'sint64' % integer() variable-length zig-zag encoded 8 | | 'fixed32' | 'fixed64' % integer() always 4 | 8 bytes on wire 9 | | 'sfixed32' | 'sfixed64' % integer() always 4 | 8 bytes on wire 10 | | 'bool' % true | false 11 | | 'float' | 'double' % float() 12 | | 'string' % string() UTF-8 encoded 13 | | 'bytes' % binary() 14 | | {'enum',atom()} % atom() the enum literal is the atom 15 | | {'msg',atom()}. % record() the msg name is record name 16 | 17 | %% The following two definitions (`gpb_field' and `gpb_rpc') are to 18 | %% avoid clashes with other code, since the `field' and `rpc' are 19 | %% really too general names, they should have been prefixed. 20 | %% 21 | %% Unfortunately, they are already part of the API, so they can't 22 | %% be changed without breaking backwards compatibility. 23 | %% (They appear as parameters or retrun values for functions in `gpb' 24 | %% in generated code.) 25 | %% 26 | %% In case a clash, it is possible to redefine the name locally. 27 | %% The recommendation is to redefine them with prefix, ie to `gpb_field' 28 | %% and `gpb_rpc', since this is what they will change to in some future. 29 | %% 30 | -ifdef(gpb_field_record_name). 31 | -define(gpb_field, ?gpb_field_record_name). 32 | -else. 33 | -define(gpb_field, field). %% odd definition is due to backwards compatibility 34 | -endif. 35 | 36 | -ifdef(gpb_rpc_record_name). 37 | -define(gpb_rpc, ?gpb_rpc_record_name). 38 | -else. 39 | -define(gpb_rpc, rpc). %% odd definition is due to backwards compatibility 40 | -endif. 41 | 42 | -record(?gpb_field, % NB: record name is (currently) `field' (not `gpb_field')! 43 | {name :: atom(), 44 | fnum :: integer(), 45 | rnum :: pos_integer(), %% field number in the record 46 | type :: gpb_field_type() | 47 | {ref, term()} | %% intermediary, during parsing 48 | {msg, list()} | %% intermediary, during parsing 49 | {enum, list()}, %% intermediary, during parsing 50 | occurrence :: 'required' | 'optional' | 'repeated', 51 | opts = [] :: [term()] 52 | }). 53 | 54 | -record(?gpb_rpc, % NB: record name is (currently) `rpc' (not `gpb_rpc')! 55 | {name :: atom(), 56 | input, 57 | output 58 | }). 59 | 60 | -record(gpb_oneof, 61 | {name :: atom(), 62 | rnum :: pos_integer(), %% field number in the record 63 | fields :: [#?gpb_field{}] %% all fields have the same rnum 64 | }). 65 | 66 | -endif. 67 | -------------------------------------------------------------------------------- /Json VS Protobuf/Erlang/include/protocol.hrl: -------------------------------------------------------------------------------- 1 | %% Automatically generated, do not edit 2 | %% Generated by gpb_compile version 3.17.11 on {{2015,4,25},{17,32,50}} 3 | 4 | -ifndef(protocol). 5 | -define(protocol, true). 6 | 7 | -define(protocol_gpb_version, "3.17.11"). 8 | 9 | -record('Log', 10 | {id, % = 1, int32 11 | content, % = 2, string 12 | status, % = 3, int32 13 | times % = 4, int32 14 | }). 15 | 16 | -record('Person', 17 | {id, % = 1, int32 18 | name, % = 2, string 19 | tags = [], % = 3, [int32] 20 | logs = [] % = 4, [{msg,'Log'}] 21 | }). 22 | 23 | -endif. 24 | -------------------------------------------------------------------------------- /Json VS Protobuf/Erlang/rebar.config: -------------------------------------------------------------------------------- 1 | {deps, [ 2 | {jiffy, ".*", 3 | {git, "https://github.com/davisp/jiffy.git", {tag, "0.13.3"}}} 4 | ]}. 5 | -------------------------------------------------------------------------------- /Json VS Protobuf/Erlang/src/json_vs_proto.app.src: -------------------------------------------------------------------------------- 1 | {application, json_vs_proto, 2 | [ 3 | {description, "An Erlang json_vs_proto library"}, 4 | {vsn, "1"}, 5 | {modules, [ 6 | json_vs_proto 7 | ]}, 8 | {registered, []}, 9 | {applications, [ 10 | kernel, 11 | stdlib 12 | ]}, 13 | {env, []} 14 | ]}. 15 | -------------------------------------------------------------------------------- /Json VS Protobuf/Erlang/src/json_vs_proto.erl: -------------------------------------------------------------------------------- 1 | -module(json_vs_proto). 2 | 3 | %% json_vs_proto: json_vs_proto library's entry point. 4 | 5 | -export([setup/0, 6 | setup/1]). 7 | 8 | -export([pack_pb/1, 9 | pack_json/1, 10 | pack_json_gzip/1, 11 | unpack_pb/0, 12 | unpack_json/0, 13 | unpack_json_gzip/0]). 14 | 15 | 16 | -define(TAGS, lists:seq(0, 19)). 17 | -define(LOGS(Amount), lists:map( 18 | fun(Id) -> 19 | C = "Log Contents..." ++ integer_to_list(Id), 20 | [Id, list_to_binary(C), Id rem 2, 10000000 + Id] 21 | end, 22 | lists:seq(0, Amount-1) 23 | ) 24 | ). 25 | 26 | 27 | -include("../include/gpb.hrl"). 28 | -include("../include/protocol.hrl"). 29 | 30 | 31 | 32 | %% API 33 | 34 | pack_pb(LogAmount) -> 35 | Person = #'Person'{ 36 | id = 1, 37 | name = "My Playground!!!", 38 | tags = get_tags(), 39 | logs = create_logs_record_list(LogAmount) 40 | }, 41 | 42 | protocol:encode_msg(Person, [{verify, false}]). 43 | 44 | 45 | pack_json(LogAmount) -> 46 | Person = #{ 47 | id => 1, 48 | name => <<"My Playground!!!">>, 49 | tags => get_tags(), 50 | logs => create_logs_maps_list(LogAmount) 51 | }, 52 | 53 | jiffy:encode(Person). 54 | 55 | 56 | pack_json_gzip(LogAmount) -> 57 | Json = pack_json(LogAmount), 58 | zlib:gzip(Json). 59 | 60 | 61 | unpack_pb() -> 62 | Data = get_data(pb), 63 | protocol:decode_msg(Data, 'Person'). 64 | 65 | unpack_json() -> 66 | Data = get_data(json), 67 | jiffy:decode(Data, [return_maps]). 68 | 69 | unpack_json_gzip() -> 70 | Data = zlib:gunzip(get_data(json_gzip)), 71 | jiffy:decode(Data, [return_maps]). 72 | 73 | 74 | 75 | %% Internals 76 | 77 | 78 | create_logs_record_list(Amount) when Amount >= 0 -> 79 | Fun = fun([Id, Content, Status, Times]) -> 80 | #'Log'{ 81 | id=Id, 82 | content=Content, 83 | status=Status, 84 | times=Times 85 | } 86 | end, 87 | 88 | [Fun(Item) || Item <- get_logs()]. 89 | 90 | 91 | create_logs_maps_list(Amount) when Amount >= 0 -> 92 | Fun = fun([Id, Content, Status, Times]) -> 93 | #{ 94 | id => Id, 95 | content => Content, 96 | status => Status, 97 | times => Times 98 | } 99 | end, 100 | 101 | [Fun(Item) || Item <- get_logs()]. 102 | 103 | 104 | get_data(Type) -> 105 | case get(Type) of 106 | undefined -> 107 | {Path, _Options} = filename:find_src(json_vs_proto), 108 | DataPath = get_parent_path(3, Path), 109 | File = get_file(Type, DataPath), 110 | {ok, Data} = file:read_file(File), 111 | put(Type, Data), 112 | Data; 113 | Data -> 114 | Data 115 | end. 116 | 117 | get_file(pb, DataPath) -> 118 | filename:join(DataPath, "data.pb"); 119 | 120 | get_file(json, DataPath) -> 121 | filename:join(DataPath, "data.json"); 122 | 123 | get_file(json_gzip, DataPath) -> 124 | filename:join(DataPath, "data.json.gz"). 125 | 126 | 127 | get_parent_path(0, Path) -> 128 | Path; 129 | 130 | get_parent_path(N, Path) when N > 0 -> 131 | get_parent_path(N-1, filename:dirname(Path)). 132 | 133 | 134 | get_tags() -> 135 | case get(tags) of 136 | undefined -> 137 | Tags = ?TAGS, 138 | put(tags, Tags), 139 | Tags; 140 | Tags -> 141 | Tags 142 | end. 143 | 144 | get_logs() -> 145 | get(logs). 146 | 147 | get_logs(Amount) -> 148 | case get(logs) of 149 | undefined -> 150 | Logs = ?LOGS(Amount), 151 | put(logs, Logs), 152 | Logs; 153 | Logs -> 154 | Logs 155 | end. 156 | 157 | 158 | setup() -> 159 | get_tags(), 160 | get_data(pb), 161 | get_data(json), 162 | get_data(json_gzip), 163 | ok. 164 | 165 | setup(Amount) -> 166 | get_logs(Amount), 167 | setup(). 168 | 169 | 170 | 171 | %% End of Module. 172 | -------------------------------------------------------------------------------- /Json VS Protobuf/Erlang/src/protocol.erl: -------------------------------------------------------------------------------- 1 | %% Automatically generated, do not edit 2 | %% Generated by gpb_compile version 3.17.11 on {{2015,4,25},{17,32,50}} 3 | -module(protocol). 4 | 5 | -export([encode_msg/1, encode_msg/2]). 6 | -export([decode_msg/2]). 7 | -export([merge_msgs/2]). 8 | -export([verify_msg/1]). 9 | -export([get_msg_defs/0]). 10 | -export([get_msg_names/0]). 11 | -export([get_enum_names/0]). 12 | -export([find_msg_def/1, fetch_msg_def/1]). 13 | -export([find_enum_def/1, fetch_enum_def/1]). 14 | -export([enum_symbol_by_value/2, enum_value_by_symbol/2]). 15 | -export([get_service_names/0]). 16 | -export([get_service_def/1]). 17 | -export([get_rpc_names/1]). 18 | -export([find_rpc_def/2, fetch_rpc_def/2]). 19 | -export([get_package_name/0]). 20 | -export([gpb_version_as_string/0, gpb_version_as_list/0]). 21 | 22 | -include("protocol.hrl"). 23 | -include("gpb.hrl"). 24 | 25 | 26 | encode_msg(Msg) -> encode_msg(Msg, []). 27 | 28 | 29 | encode_msg(Msg, Opts) -> 30 | case proplists:get_bool(verify, Opts) of 31 | true -> verify_msg(Msg); 32 | false -> ok 33 | end, 34 | case Msg of 35 | #'Log'{} -> e_msg_Log(Msg); 36 | #'Person'{} -> e_msg_Person(Msg) 37 | end. 38 | 39 | 40 | e_msg_Log(Msg) -> e_msg_Log(Msg, <<>>). 41 | 42 | 43 | e_msg_Log(#'Log'{id = F1, content = F2, status = F3, 44 | times = F4}, 45 | Bin) -> 46 | B1 = e_type_int32(F1, <>), 47 | B2 = e_type_string(F2, <>), 48 | B3 = e_type_int32(F3, <>), 49 | e_type_int32(F4, <>). 50 | 51 | e_msg_Person(Msg) -> e_msg_Person(Msg, <<>>). 52 | 53 | 54 | e_msg_Person(#'Person'{id = F1, name = F2, tags = F3, 55 | logs = F4}, 56 | Bin) -> 57 | B1 = e_type_int32(F1, <>), 58 | B2 = e_type_string(F2, <>), 59 | B3 = if F3 == [] -> B2; 60 | true -> e_field_Person_tags(F3, B2) 61 | end, 62 | if F4 == [] -> B3; 63 | true -> e_field_Person_logs(F4, B3) 64 | end. 65 | 66 | e_field_Person_tags([Elem | Rest], Bin) -> 67 | Bin2 = <>, 68 | Bin3 = e_type_int32(Elem, Bin2), 69 | e_field_Person_tags(Rest, Bin3); 70 | e_field_Person_tags([], Bin) -> Bin. 71 | 72 | e_mfield_Person_logs(Msg, Bin) -> 73 | SubBin = e_msg_Log(Msg, <<>>), 74 | Bin2 = e_varint(byte_size(SubBin), Bin), 75 | <>. 76 | 77 | e_field_Person_logs([Elem | Rest], Bin) -> 78 | Bin2 = <>, 79 | Bin3 = e_mfield_Person_logs(Elem, Bin2), 80 | e_field_Person_logs(Rest, Bin3); 81 | e_field_Person_logs([], Bin) -> Bin. 82 | 83 | e_type_int32(Value, Bin) 84 | when 0 =< Value, Value =< 127 -> 85 | <>; 86 | e_type_int32(Value, Bin) -> 87 | <> = <>, 88 | e_varint(N, Bin). 89 | 90 | e_type_string(S, Bin) -> 91 | Utf8 = unicode:characters_to_binary(S), 92 | Bin2 = e_varint(byte_size(Utf8), Bin), 93 | <>. 94 | 95 | e_varint(N, Bin) when N =< 127 -> <>; 96 | e_varint(N, Bin) -> 97 | Bin2 = <>, 98 | e_varint(N bsr 7, Bin2). 99 | 100 | 101 | 102 | decode_msg(Bin, MsgName) when is_binary(Bin) -> 103 | case MsgName of 104 | 'Log' -> d_msg_Log(Bin); 105 | 'Person' -> d_msg_Person(Bin) 106 | end. 107 | 108 | 109 | 110 | d_msg_Log(Bin) -> 111 | dfp_read_field_def_Log(Bin, 0, 0, undefined, undefined, 112 | undefined, undefined). 113 | 114 | dfp_read_field_def_Log(<<8, Rest/binary>>, Z1, Z2, F1, 115 | F2, F3, F4) -> 116 | d_field_Log_id(Rest, Z1, Z2, F1, F2, F3, F4); 117 | dfp_read_field_def_Log(<<18, Rest/binary>>, Z1, Z2, F1, 118 | F2, F3, F4) -> 119 | d_field_Log_content(Rest, Z1, Z2, F1, F2, F3, F4); 120 | dfp_read_field_def_Log(<<24, Rest/binary>>, Z1, Z2, F1, 121 | F2, F3, F4) -> 122 | d_field_Log_status(Rest, Z1, Z2, F1, F2, F3, F4); 123 | dfp_read_field_def_Log(<<32, Rest/binary>>, Z1, Z2, F1, 124 | F2, F3, F4) -> 125 | d_field_Log_times(Rest, Z1, Z2, F1, F2, F3, F4); 126 | dfp_read_field_def_Log(<<>>, 0, 0, F1, F2, F3, F4) -> 127 | #'Log'{id = F1, content = F2, status = F3, times = F4}; 128 | dfp_read_field_def_Log(Other, Z1, Z2, F1, F2, F3, F4) -> 129 | dg_read_field_def_Log(Other, Z1, Z2, F1, F2, F3, F4). 130 | 131 | dg_read_field_def_Log(<<1:1, X:7, Rest/binary>>, N, Acc, 132 | F1, F2, F3, F4) 133 | when N < 32 - 7 -> 134 | dg_read_field_def_Log(Rest, N + 7, X bsl N + Acc, F1, 135 | F2, F3, F4); 136 | dg_read_field_def_Log(<<0:1, X:7, Rest/binary>>, N, Acc, 137 | F1, F2, F3, F4) -> 138 | Key = X bsl N + Acc, 139 | case Key of 140 | 8 -> d_field_Log_id(Rest, 0, 0, F1, F2, F3, F4); 141 | 18 -> d_field_Log_content(Rest, 0, 0, F1, F2, F3, F4); 142 | 24 -> d_field_Log_status(Rest, 0, 0, F1, F2, F3, F4); 143 | 32 -> d_field_Log_times(Rest, 0, 0, F1, F2, F3, F4); 144 | _ -> 145 | case Key band 7 of 146 | 0 -> skip_varint_Log(Rest, 0, 0, F1, F2, F3, F4); 147 | 1 -> skip_64_Log(Rest, 0, 0, F1, F2, F3, F4); 148 | 2 -> 149 | skip_length_delimited_Log(Rest, 0, 0, F1, F2, F3, F4); 150 | 5 -> skip_32_Log(Rest, 0, 0, F1, F2, F3, F4) 151 | end 152 | end; 153 | dg_read_field_def_Log(<<>>, 0, 0, F1, F2, F3, F4) -> 154 | #'Log'{id = F1, content = F2, status = F3, times = F4}. 155 | 156 | d_field_Log_id(<<1:1, X:7, Rest/binary>>, N, Acc, F1, 157 | F2, F3, F4) 158 | when N < 57 -> 159 | d_field_Log_id(Rest, N + 7, X bsl N + Acc, F1, F2, F3, 160 | F4); 161 | d_field_Log_id(<<0:1, X:7, Rest/binary>>, N, Acc, _, F2, 162 | F3, F4) -> 163 | <> = <<(X bsl N + 164 | Acc):32/unsigned-native>>, 165 | dfp_read_field_def_Log(Rest, 0, 0, NewFValue, F2, F3, 166 | F4). 167 | 168 | 169 | d_field_Log_content(<<1:1, X:7, Rest/binary>>, N, Acc, 170 | F1, F2, F3, F4) 171 | when N < 57 -> 172 | d_field_Log_content(Rest, N + 7, X bsl N + Acc, F1, F2, 173 | F3, F4); 174 | d_field_Log_content(<<0:1, X:7, Rest/binary>>, N, Acc, 175 | F1, _, F3, F4) -> 176 | Len = X bsl N + Acc, 177 | <> = Rest, 178 | NewFValue = binary:copy(Bytes), 179 | dfp_read_field_def_Log(Rest2, 0, 0, F1, NewFValue, F3, 180 | F4). 181 | 182 | 183 | d_field_Log_status(<<1:1, X:7, Rest/binary>>, N, Acc, 184 | F1, F2, F3, F4) 185 | when N < 57 -> 186 | d_field_Log_status(Rest, N + 7, X bsl N + Acc, F1, F2, 187 | F3, F4); 188 | d_field_Log_status(<<0:1, X:7, Rest/binary>>, N, Acc, 189 | F1, F2, _, F4) -> 190 | <> = <<(X bsl N + 191 | Acc):32/unsigned-native>>, 192 | dfp_read_field_def_Log(Rest, 0, 0, F1, F2, NewFValue, 193 | F4). 194 | 195 | 196 | d_field_Log_times(<<1:1, X:7, Rest/binary>>, N, Acc, F1, 197 | F2, F3, F4) 198 | when N < 57 -> 199 | d_field_Log_times(Rest, N + 7, X bsl N + Acc, F1, F2, 200 | F3, F4); 201 | d_field_Log_times(<<0:1, X:7, Rest/binary>>, N, Acc, F1, 202 | F2, F3, _) -> 203 | <> = <<(X bsl N + 204 | Acc):32/unsigned-native>>, 205 | dfp_read_field_def_Log(Rest, 0, 0, F1, F2, F3, 206 | NewFValue). 207 | 208 | 209 | skip_varint_Log(<<1:1, _:7, Rest/binary>>, Z1, Z2, F1, 210 | F2, F3, F4) -> 211 | skip_varint_Log(Rest, Z1, Z2, F1, F2, F3, F4); 212 | skip_varint_Log(<<0:1, _:7, Rest/binary>>, Z1, Z2, F1, 213 | F2, F3, F4) -> 214 | dfp_read_field_def_Log(Rest, Z1, Z2, F1, F2, F3, F4). 215 | 216 | 217 | skip_length_delimited_Log(<<1:1, X:7, Rest/binary>>, N, 218 | Acc, F1, F2, F3, F4) 219 | when N < 57 -> 220 | skip_length_delimited_Log(Rest, N + 7, X bsl N + Acc, 221 | F1, F2, F3, F4); 222 | skip_length_delimited_Log(<<0:1, X:7, Rest/binary>>, N, 223 | Acc, F1, F2, F3, F4) -> 224 | Length = X bsl N + Acc, 225 | <<_:Length/binary, Rest2/binary>> = Rest, 226 | dfp_read_field_def_Log(Rest2, 0, 0, F1, F2, F3, F4). 227 | 228 | 229 | skip_32_Log(<<_:32, Rest/binary>>, Z1, Z2, F1, F2, F3, 230 | F4) -> 231 | dfp_read_field_def_Log(Rest, Z1, Z2, F1, F2, F3, F4). 232 | 233 | 234 | skip_64_Log(<<_:64, Rest/binary>>, Z1, Z2, F1, F2, F3, 235 | F4) -> 236 | dfp_read_field_def_Log(Rest, Z1, Z2, F1, F2, F3, F4). 237 | 238 | 239 | d_msg_Person(Bin) -> 240 | dfp_read_field_def_Person(Bin, 0, 0, undefined, 241 | undefined, [], []). 242 | 243 | dfp_read_field_def_Person(<<8, Rest/binary>>, Z1, Z2, 244 | F1, F2, F3, F4) -> 245 | d_field_Person_id(Rest, Z1, Z2, F1, F2, F3, F4); 246 | dfp_read_field_def_Person(<<18, Rest/binary>>, Z1, Z2, 247 | F1, F2, F3, F4) -> 248 | d_field_Person_name(Rest, Z1, Z2, F1, F2, F3, F4); 249 | dfp_read_field_def_Person(<<24, Rest/binary>>, Z1, Z2, 250 | F1, F2, F3, F4) -> 251 | d_field_Person_tags(Rest, Z1, Z2, F1, F2, F3, F4); 252 | dfp_read_field_def_Person(<<34, Rest/binary>>, Z1, Z2, 253 | F1, F2, F3, F4) -> 254 | d_field_Person_logs(Rest, Z1, Z2, F1, F2, F3, F4); 255 | dfp_read_field_def_Person(<<>>, 0, 0, F1, F2, F3, F4) -> 256 | #'Person'{id = F1, name = F2, tags = lists:reverse(F3), 257 | logs = lists:reverse(F4)}; 258 | dfp_read_field_def_Person(Other, Z1, Z2, F1, F2, F3, 259 | F4) -> 260 | dg_read_field_def_Person(Other, Z1, Z2, F1, F2, F3, F4). 261 | 262 | dg_read_field_def_Person(<<1:1, X:7, Rest/binary>>, N, 263 | Acc, F1, F2, F3, F4) 264 | when N < 32 - 7 -> 265 | dg_read_field_def_Person(Rest, N + 7, X bsl N + Acc, F1, 266 | F2, F3, F4); 267 | dg_read_field_def_Person(<<0:1, X:7, Rest/binary>>, N, 268 | Acc, F1, F2, F3, F4) -> 269 | Key = X bsl N + Acc, 270 | case Key of 271 | 8 -> d_field_Person_id(Rest, 0, 0, F1, F2, F3, F4); 272 | 18 -> d_field_Person_name(Rest, 0, 0, F1, F2, F3, F4); 273 | 24 -> d_field_Person_tags(Rest, 0, 0, F1, F2, F3, F4); 274 | 34 -> d_field_Person_logs(Rest, 0, 0, F1, F2, F3, F4); 275 | _ -> 276 | case Key band 7 of 277 | 0 -> skip_varint_Person(Rest, 0, 0, F1, F2, F3, F4); 278 | 1 -> skip_64_Person(Rest, 0, 0, F1, F2, F3, F4); 279 | 2 -> 280 | skip_length_delimited_Person(Rest, 0, 0, F1, F2, F3, 281 | F4); 282 | 5 -> skip_32_Person(Rest, 0, 0, F1, F2, F3, F4) 283 | end 284 | end; 285 | dg_read_field_def_Person(<<>>, 0, 0, F1, F2, F3, F4) -> 286 | #'Person'{id = F1, name = F2, tags = lists:reverse(F3), 287 | logs = lists:reverse(F4)}. 288 | 289 | d_field_Person_id(<<1:1, X:7, Rest/binary>>, N, Acc, F1, 290 | F2, F3, F4) 291 | when N < 57 -> 292 | d_field_Person_id(Rest, N + 7, X bsl N + Acc, F1, F2, 293 | F3, F4); 294 | d_field_Person_id(<<0:1, X:7, Rest/binary>>, N, Acc, _, 295 | F2, F3, F4) -> 296 | <> = <<(X bsl N + 297 | Acc):32/unsigned-native>>, 298 | dfp_read_field_def_Person(Rest, 0, 0, NewFValue, F2, F3, 299 | F4). 300 | 301 | 302 | d_field_Person_name(<<1:1, X:7, Rest/binary>>, N, Acc, 303 | F1, F2, F3, F4) 304 | when N < 57 -> 305 | d_field_Person_name(Rest, N + 7, X bsl N + Acc, F1, F2, 306 | F3, F4); 307 | d_field_Person_name(<<0:1, X:7, Rest/binary>>, N, Acc, 308 | F1, _, F3, F4) -> 309 | Len = X bsl N + Acc, 310 | <> = Rest, 311 | NewFValue = binary:copy(Bytes), 312 | dfp_read_field_def_Person(Rest2, 0, 0, F1, NewFValue, 313 | F3, F4). 314 | 315 | 316 | d_field_Person_tags(<<1:1, X:7, Rest/binary>>, N, Acc, 317 | F1, F2, F3, F4) 318 | when N < 57 -> 319 | d_field_Person_tags(Rest, N + 7, X bsl N + Acc, F1, F2, 320 | F3, F4); 321 | d_field_Person_tags(<<0:1, X:7, Rest/binary>>, N, Acc, 322 | F1, F2, F3, F4) -> 323 | <> = <<(X bsl N + 324 | Acc):32/unsigned-native>>, 325 | dfp_read_field_def_Person(Rest, 0, 0, F1, F2, 326 | [NewFValue | F3], F4). 327 | 328 | 329 | d_field_Person_logs(<<1:1, X:7, Rest/binary>>, N, Acc, 330 | F1, F2, F3, F4) 331 | when N < 57 -> 332 | d_field_Person_logs(Rest, N + 7, X bsl N + Acc, F1, F2, 333 | F3, F4); 334 | d_field_Person_logs(<<0:1, X:7, Rest/binary>>, N, Acc, 335 | F1, F2, F3, F4) -> 336 | Len = X bsl N + Acc, 337 | <> = Rest, 338 | NewFValue = decode_msg(Bs, 'Log'), 339 | dfp_read_field_def_Person(Rest2, 0, 0, F1, F2, F3, 340 | [NewFValue | F4]). 341 | 342 | 343 | skip_varint_Person(<<1:1, _:7, Rest/binary>>, Z1, Z2, 344 | F1, F2, F3, F4) -> 345 | skip_varint_Person(Rest, Z1, Z2, F1, F2, F3, F4); 346 | skip_varint_Person(<<0:1, _:7, Rest/binary>>, Z1, Z2, 347 | F1, F2, F3, F4) -> 348 | dfp_read_field_def_Person(Rest, Z1, Z2, F1, F2, F3, F4). 349 | 350 | 351 | skip_length_delimited_Person(<<1:1, X:7, Rest/binary>>, 352 | N, Acc, F1, F2, F3, F4) 353 | when N < 57 -> 354 | skip_length_delimited_Person(Rest, N + 7, X bsl N + Acc, 355 | F1, F2, F3, F4); 356 | skip_length_delimited_Person(<<0:1, X:7, Rest/binary>>, 357 | N, Acc, F1, F2, F3, F4) -> 358 | Length = X bsl N + Acc, 359 | <<_:Length/binary, Rest2/binary>> = Rest, 360 | dfp_read_field_def_Person(Rest2, 0, 0, F1, F2, F3, F4). 361 | 362 | 363 | skip_32_Person(<<_:32, Rest/binary>>, Z1, Z2, F1, F2, 364 | F3, F4) -> 365 | dfp_read_field_def_Person(Rest, Z1, Z2, F1, F2, F3, F4). 366 | 367 | 368 | skip_64_Person(<<_:64, Rest/binary>>, Z1, Z2, F1, F2, 369 | F3, F4) -> 370 | dfp_read_field_def_Person(Rest, Z1, Z2, F1, F2, F3, F4). 371 | 372 | 373 | 374 | 375 | merge_msgs(Prev, New) 376 | when element(1, Prev) =:= element(1, New) -> 377 | case Prev of 378 | #'Log'{} -> merge_msg_Log(Prev, New); 379 | #'Person'{} -> merge_msg_Person(Prev, New) 380 | end. 381 | 382 | merge_msg_Log(#'Log'{id = PFid, content = PFcontent, 383 | status = PFstatus, times = PFtimes}, 384 | #'Log'{id = NFid, content = NFcontent, 385 | status = NFstatus, times = NFtimes}) -> 386 | #'Log'{id = 387 | if NFid =:= undefined -> PFid; 388 | true -> NFid 389 | end, 390 | content = 391 | if NFcontent =:= undefined -> PFcontent; 392 | true -> NFcontent 393 | end, 394 | status = 395 | if NFstatus =:= undefined -> PFstatus; 396 | true -> NFstatus 397 | end, 398 | times = 399 | if NFtimes =:= undefined -> PFtimes; 400 | true -> NFtimes 401 | end}. 402 | 403 | merge_msg_Person(#'Person'{id = PFid, name = PFname, 404 | tags = PFtags, logs = PFlogs}, 405 | #'Person'{id = NFid, name = NFname, tags = NFtags, 406 | logs = NFlogs}) -> 407 | #'Person'{id = 408 | if NFid =:= undefined -> PFid; 409 | true -> NFid 410 | end, 411 | name = 412 | if NFname =:= undefined -> PFname; 413 | true -> NFname 414 | end, 415 | tags = PFtags ++ NFtags, logs = PFlogs ++ NFlogs}. 416 | 417 | 418 | 419 | verify_msg(Msg) -> 420 | case Msg of 421 | #'Log'{} -> v_msg_Log(Msg, ['Log']); 422 | #'Person'{} -> v_msg_Person(Msg, ['Person']); 423 | _ -> mk_type_error(not_a_known_message, Msg, []) 424 | end. 425 | 426 | 427 | v_msg_Log(#'Log'{id = F1, content = F2, status = F3, 428 | times = F4}, 429 | Path) -> 430 | v_type_int32(F1, [id | Path]), 431 | v_type_string(F2, [content | Path]), 432 | v_type_int32(F3, [status | Path]), 433 | v_type_int32(F4, [times | Path]), 434 | ok; 435 | v_msg_Log(X, Path) -> 436 | mk_type_error({expected_msg, 'Log'}, X, Path). 437 | 438 | v_msg_Person(#'Person'{id = F1, name = F2, tags = F3, 439 | logs = F4}, 440 | Path) -> 441 | v_type_int32(F1, [id | Path]), 442 | v_type_string(F2, [name | Path]), 443 | if is_list(F3) -> 444 | [v_type_int32(Elem, [tags | Path]) || Elem <- F3]; 445 | true -> 446 | mk_type_error({invalid_list_of, int32}, F3, Path) 447 | end, 448 | if is_list(F4) -> 449 | [v_msg_Log(Elem, [logs | Path]) || Elem <- F4]; 450 | true -> 451 | mk_type_error({invalid_list_of, {msg, 'Log'}}, F4, Path) 452 | end, 453 | ok. 454 | 455 | v_type_int32(N, _Path) 456 | when -2147483648 =< N, N =< 2147483647 -> 457 | ok; 458 | v_type_int32(N, Path) when is_integer(N) -> 459 | mk_type_error({value_out_of_range, int32, signed, 32}, 460 | N, Path); 461 | v_type_int32(X, Path) -> 462 | mk_type_error({bad_integer, int32, signed, 32}, X, 463 | Path). 464 | 465 | v_type_string(S, Path) when is_list(S) -> 466 | try unicode:characters_to_binary(S), ok catch 467 | error:badarg -> 468 | mk_type_error(bad_unicode_string, S, Path) 469 | end; 470 | v_type_string(X, Path) -> 471 | mk_type_error(bad_unicode_string, X, Path). 472 | 473 | mk_type_error(Error, ValueSeen, Path) -> 474 | Path2 = prettify_path(Path), 475 | erlang:error({gpb_type_error, 476 | {Error, [{value, ValueSeen}, {path, Path2}]}}). 477 | 478 | 479 | prettify_path([]) -> top_level; 480 | prettify_path(PathR) -> 481 | list_to_atom(string:join(lists:map(fun atom_to_list/1, 482 | lists:reverse(PathR)), 483 | ".")). 484 | 485 | 486 | 487 | get_msg_defs() -> 488 | [{{msg, 'Log'}, 489 | [#field{name = id, fnum = 1, rnum = 2, type = int32, 490 | occurrence = required, opts = []}, 491 | #field{name = content, fnum = 2, rnum = 3, 492 | type = string, occurrence = required, opts = []}, 493 | #field{name = status, fnum = 3, rnum = 4, type = int32, 494 | occurrence = required, opts = []}, 495 | #field{name = times, fnum = 4, rnum = 5, type = int32, 496 | occurrence = required, opts = []}]}, 497 | {{msg, 'Person'}, 498 | [#field{name = id, fnum = 1, rnum = 2, type = int32, 499 | occurrence = required, opts = []}, 500 | #field{name = name, fnum = 2, rnum = 3, type = string, 501 | occurrence = required, opts = []}, 502 | #field{name = tags, fnum = 3, rnum = 4, type = int32, 503 | occurrence = repeated, opts = []}, 504 | #field{name = logs, fnum = 4, rnum = 5, 505 | type = {msg, 'Log'}, occurrence = repeated, 506 | opts = []}]}]. 507 | 508 | 509 | get_msg_names() -> ['Log', 'Person']. 510 | 511 | 512 | get_enum_names() -> []. 513 | 514 | 515 | fetch_msg_def(MsgName) -> 516 | case find_msg_def(MsgName) of 517 | Fs when is_list(Fs) -> Fs; 518 | error -> erlang:error({no_such_msg, MsgName}) 519 | end. 520 | 521 | 522 | fetch_enum_def(EnumName) -> 523 | erlang:error({no_such_enum, EnumName}). 524 | 525 | 526 | find_msg_def('Log') -> 527 | [#field{name = id, fnum = 1, rnum = 2, type = int32, 528 | occurrence = required, opts = []}, 529 | #field{name = content, fnum = 2, rnum = 3, 530 | type = string, occurrence = required, opts = []}, 531 | #field{name = status, fnum = 3, rnum = 4, type = int32, 532 | occurrence = required, opts = []}, 533 | #field{name = times, fnum = 4, rnum = 5, type = int32, 534 | occurrence = required, opts = []}]; 535 | find_msg_def('Person') -> 536 | [#field{name = id, fnum = 1, rnum = 2, type = int32, 537 | occurrence = required, opts = []}, 538 | #field{name = name, fnum = 2, rnum = 3, type = string, 539 | occurrence = required, opts = []}, 540 | #field{name = tags, fnum = 3, rnum = 4, type = int32, 541 | occurrence = repeated, opts = []}, 542 | #field{name = logs, fnum = 4, rnum = 5, 543 | type = {msg, 'Log'}, occurrence = repeated, opts = []}]; 544 | find_msg_def(_) -> error. 545 | 546 | 547 | find_enum_def(_) -> error. 548 | 549 | 550 | enum_symbol_by_value(E, V) -> 551 | erlang:error({no_enum_defs, E, V}). 552 | 553 | 554 | enum_value_by_symbol(E, V) -> 555 | erlang:error({no_enum_defs, E, V}). 556 | 557 | 558 | 559 | get_service_names() -> []. 560 | 561 | 562 | get_service_def(_) -> error. 563 | 564 | 565 | get_rpc_names(_) -> error. 566 | 567 | 568 | find_rpc_def(_, _) -> error. 569 | 570 | 571 | 572 | fetch_rpc_def(ServiceName, RpcName) -> 573 | erlang:error({no_such_rpc, ServiceName, RpcName}). 574 | 575 | 576 | get_package_name() -> undefined. 577 | 578 | 579 | 580 | gpb_version_as_string() -> 581 | "3.17.11". 582 | 583 | gpb_version_as_list() -> 584 | [3,17,11]. 585 | -------------------------------------------------------------------------------- /Json VS Protobuf/Python/pack.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import json 3 | import gzip 4 | from cStringIO import StringIO 5 | 6 | from protocol_pb2 import Person 7 | 8 | 9 | 10 | class Pack(object): 11 | def __init__(self, log_amount=0): 12 | self.TAGS = self.get_tags() 13 | self.LOGS = self.get_logs(log_amount) 14 | 15 | def get_tags(self): 16 | return range(20) 17 | 18 | def get_logs(self, amount): 19 | return [(i, "Log Contents...%d" %i, i % 2, 10000000 + i) for i in range(amount)] 20 | 21 | 22 | def create_pb(self): 23 | msg = Person() 24 | msg.id = 1 25 | msg.name = "My Playground!!!" 26 | msg.tags.extend(self.TAGS) 27 | for _id, content, status, times in self.LOGS: 28 | log = msg.logs.add() 29 | log.id = _id 30 | log.content = content 31 | log.status = status 32 | log.times = times 33 | 34 | return msg.SerializeToString() 35 | 36 | 37 | def create_json(self): 38 | data = { 39 | 'id': 1, 40 | 'name': "My Playground!!!", 41 | 'tags': self.TAGS, 42 | 'logs': [] 43 | } 44 | 45 | for _id, content, status, times in self.LOGS: 46 | data['logs'].append({ 47 | 'id': _id, 48 | 'content': content, 49 | 'status': status, 50 | 'times': times 51 | }) 52 | 53 | return json.dumps(data) 54 | 55 | 56 | def compress_with_gzip(self, data): 57 | buf = StringIO() 58 | with gzip.GzipFile(fileobj=buf, mode='w', compresslevel=6) as f: 59 | f.write(data) 60 | 61 | return buf.getvalue() 62 | 63 | 64 | if __name__ == '__main__': 65 | import sys 66 | import timeit 67 | 68 | try: 69 | log_amount = int(sys.argv[1]) 70 | times = int(sys.argv[2]) 71 | except: 72 | print "./pack.py [LOG AMOUNT] [BENCHMARK TIMES]" 73 | sys.exit(1) 74 | 75 | 76 | p = Pack(log_amount) 77 | pb_data = p.create_pb() 78 | json_data = p.create_json() 79 | json_data_gzip = p.compress_with_gzip(json_data) 80 | 81 | print "LogAmount = ", log_amount 82 | print "Protobuf Size :", len(pb_data) 83 | print "Json Size :", len(json_data) 84 | print "Json GZip Size :", len(json_data_gzip) 85 | 86 | 87 | pb_t = timeit.Timer("p.create_pb()", setup="from __main__ import p") 88 | json_t = timeit.Timer("p.create_json()", setup="from __main__ import p") 89 | json_gzip_t = timeit.Timer("p.compress_with_gzip(p.create_json())", setup="from __main__ import p") 90 | 91 | print 92 | print "Benchmark Times =", times 93 | print "Protobuf Seconds :", pb_t.timeit(number=times) 94 | print "Json Seconds :", json_t.timeit(number=times) 95 | print "Json GZip Seconds :", json_gzip_t.timeit(number=times) 96 | -------------------------------------------------------------------------------- /Json VS Protobuf/Python/protocol_pb2.py: -------------------------------------------------------------------------------- 1 | # Generated by the protocol buffer compiler. DO NOT EDIT! 2 | # source: protocol.proto 3 | 4 | from google.protobuf import descriptor as _descriptor 5 | from google.protobuf import message as _message 6 | from google.protobuf import reflection as _reflection 7 | from google.protobuf import descriptor_pb2 8 | # @@protoc_insertion_point(imports) 9 | 10 | 11 | 12 | 13 | DESCRIPTOR = _descriptor.FileDescriptor( 14 | name='protocol.proto', 15 | package='', 16 | serialized_pb='\n\x0eprotocol.proto\"A\n\x03Log\x12\n\n\x02id\x18\x01 \x02(\x05\x12\x0f\n\x07\x63ontent\x18\x02 \x02(\t\x12\x0e\n\x06status\x18\x03 \x02(\x05\x12\r\n\x05times\x18\x04 \x02(\x05\"D\n\x06Person\x12\n\n\x02id\x18\x01 \x02(\x05\x12\x0c\n\x04name\x18\x02 \x02(\t\x12\x0c\n\x04tags\x18\x03 \x03(\x05\x12\x12\n\x04logs\x18\x04 \x03(\x0b\x32\x04.Log') 17 | 18 | 19 | 20 | 21 | _LOG = _descriptor.Descriptor( 22 | name='Log', 23 | full_name='Log', 24 | filename=None, 25 | file=DESCRIPTOR, 26 | containing_type=None, 27 | fields=[ 28 | _descriptor.FieldDescriptor( 29 | name='id', full_name='Log.id', index=0, 30 | number=1, type=5, cpp_type=1, label=2, 31 | has_default_value=False, default_value=0, 32 | message_type=None, enum_type=None, containing_type=None, 33 | is_extension=False, extension_scope=None, 34 | options=None), 35 | _descriptor.FieldDescriptor( 36 | name='content', full_name='Log.content', index=1, 37 | number=2, type=9, cpp_type=9, label=2, 38 | has_default_value=False, default_value=unicode("", "utf-8"), 39 | message_type=None, enum_type=None, containing_type=None, 40 | is_extension=False, extension_scope=None, 41 | options=None), 42 | _descriptor.FieldDescriptor( 43 | name='status', full_name='Log.status', index=2, 44 | number=3, type=5, cpp_type=1, label=2, 45 | has_default_value=False, default_value=0, 46 | message_type=None, enum_type=None, containing_type=None, 47 | is_extension=False, extension_scope=None, 48 | options=None), 49 | _descriptor.FieldDescriptor( 50 | name='times', full_name='Log.times', index=3, 51 | number=4, type=5, cpp_type=1, label=2, 52 | has_default_value=False, default_value=0, 53 | message_type=None, enum_type=None, containing_type=None, 54 | is_extension=False, extension_scope=None, 55 | options=None), 56 | ], 57 | extensions=[ 58 | ], 59 | nested_types=[], 60 | enum_types=[ 61 | ], 62 | options=None, 63 | is_extendable=False, 64 | extension_ranges=[], 65 | serialized_start=18, 66 | serialized_end=83, 67 | ) 68 | 69 | 70 | _PERSON = _descriptor.Descriptor( 71 | name='Person', 72 | full_name='Person', 73 | filename=None, 74 | file=DESCRIPTOR, 75 | containing_type=None, 76 | fields=[ 77 | _descriptor.FieldDescriptor( 78 | name='id', full_name='Person.id', index=0, 79 | number=1, type=5, cpp_type=1, label=2, 80 | has_default_value=False, default_value=0, 81 | message_type=None, enum_type=None, containing_type=None, 82 | is_extension=False, extension_scope=None, 83 | options=None), 84 | _descriptor.FieldDescriptor( 85 | name='name', full_name='Person.name', index=1, 86 | number=2, type=9, cpp_type=9, label=2, 87 | has_default_value=False, default_value=unicode("", "utf-8"), 88 | message_type=None, enum_type=None, containing_type=None, 89 | is_extension=False, extension_scope=None, 90 | options=None), 91 | _descriptor.FieldDescriptor( 92 | name='tags', full_name='Person.tags', index=2, 93 | number=3, type=5, cpp_type=1, label=3, 94 | has_default_value=False, default_value=[], 95 | message_type=None, enum_type=None, containing_type=None, 96 | is_extension=False, extension_scope=None, 97 | options=None), 98 | _descriptor.FieldDescriptor( 99 | name='logs', full_name='Person.logs', index=3, 100 | number=4, type=11, cpp_type=10, label=3, 101 | has_default_value=False, default_value=[], 102 | message_type=None, enum_type=None, containing_type=None, 103 | is_extension=False, extension_scope=None, 104 | options=None), 105 | ], 106 | extensions=[ 107 | ], 108 | nested_types=[], 109 | enum_types=[ 110 | ], 111 | options=None, 112 | is_extendable=False, 113 | extension_ranges=[], 114 | serialized_start=85, 115 | serialized_end=153, 116 | ) 117 | 118 | _PERSON.fields_by_name['logs'].message_type = _LOG 119 | DESCRIPTOR.message_types_by_name['Log'] = _LOG 120 | DESCRIPTOR.message_types_by_name['Person'] = _PERSON 121 | 122 | class Log(_message.Message): 123 | __metaclass__ = _reflection.GeneratedProtocolMessageType 124 | DESCRIPTOR = _LOG 125 | 126 | # @@protoc_insertion_point(class_scope:Log) 127 | 128 | class Person(_message.Message): 129 | __metaclass__ = _reflection.GeneratedProtocolMessageType 130 | DESCRIPTOR = _PERSON 131 | 132 | # @@protoc_insertion_point(class_scope:Person) 133 | 134 | 135 | # @@protoc_insertion_point(module_scope) 136 | -------------------------------------------------------------------------------- /Json VS Protobuf/Python/unpack.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import os 3 | import json 4 | import gzip 5 | from cStringIO import StringIO 6 | 7 | from protocol_pb2 import Person 8 | 9 | 10 | 11 | class UnPack(object): 12 | def __init__(self): 13 | project_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) 14 | self.data_pb = open(os.path.join(project_path, "data.pb")).read() 15 | self.data_json = open(os.path.join(project_path, "data.json")).read() 16 | 17 | self.data_json_gzip_buf = StringIO(open(os.path.join(project_path, "data.json.gz")).read()) 18 | 19 | def unpack_pb(self): 20 | msg = Person() 21 | msg.ParseFromString(self.data_pb) 22 | return msg 23 | 24 | 25 | def unpack_json(self): 26 | return json.loads(self.data_json) 27 | 28 | 29 | def unpack_json_gzip(self): 30 | self.data_json_gzip_buf.seek(0) 31 | z = gzip.GzipFile(fileobj=self.data_json_gzip_buf) 32 | return json.loads(z.read()) 33 | 34 | 35 | if __name__ == '__main__': 36 | import sys 37 | import timeit 38 | 39 | try: 40 | times = int(sys.argv[1]) 41 | except: 42 | print "./unpack.py [BENCHMARK TIMES]" 43 | sys.exit(1) 44 | 45 | 46 | p = UnPack() 47 | 48 | pb_t = timeit.Timer("p.unpack_pb()", setup="from __main__ import p") 49 | json_t = timeit.Timer("p.unpack_json()", setup="from __main__ import p") 50 | json_gzip_t = timeit.Timer("p.unpack_json_gzip()", setup="from __main__ import p") 51 | 52 | print "Benchmark Times =", times 53 | print "Protobuf Seconds :", pb_t.timeit(number=times) 54 | print "Json Seconds :", json_t.timeit(number=times) 55 | print "Json GZip Seconds :", json_gzip_t.timeit(number=times) 56 | -------------------------------------------------------------------------------- /Json VS Protobuf/README.md: -------------------------------------------------------------------------------- 1 | # Json VS Protobuf 2 | 3 | ## 测试方式 4 | * Protobuf 5 | * Json 6 | * Json with GZip 7 | 8 | ## 测试基准: 9 | * 序列化后数据大小 10 | * 序列化速度 11 | * 反序列化速度 12 | 13 | 14 | ## 测试结果 15 | 16 | ``` 17 | 以 CSharp 为例,不同Logs数量对消息大小的影响 18 | +----------+-----------+--------------------+--------+----------------+ 19 | | Logs | Protobuf | Protobuf with GZip | Json | Json with GZip | 20 | +----------+-----------+--------------------+--------+----------------+ 21 | | 0 | 60 | 81 | 101 | 109 | 22 | +----------+-----------+--------------------+--------+----------------+ 23 | | 10 | 350 | 179 | 762 | 222 | 24 | +----------+-----------+--------------------+--------+----------------+ 25 | | 50 | 1550 | 438 | 3482 | 500 | 26 | +----------+-----------+--------------------+--------+----------------+ 27 | | 100 | 3050 | 729 | 6882 | 843 | 28 | +----------+-----------+--------------------+--------+----------------+ 29 | 30 | 31 | 100个logs, 序列化5000次所需时间(秒). 越小越好 32 | +----------+-----------+--------+----------------+ 33 | | Language | Protobuf | Json | Json with GZip | 34 | +----------+-----------+--------+----------------+ 35 | | Python | 15.13 | 0.88 | 1.75 | 36 | +----------+-----------+--------+----------------+ 37 | | Erlang | 0.79 | 1.13 | 1.61 | 38 | +----------+-----------+--------+----------------+ 39 | | CSharp | 0.23 | 1.80 | 4.30 | 40 | +----------+-----------+--------+----------------+ 41 | 42 | 43 | 100个logs, 反序列化5000次所需时间(秒). 越小越好 44 | +----------+-----------+--------+----------------+ 45 | | Language | Protobuf | Json | Json with GZip | 46 | +----------+-----------+--------+----------------+ 47 | | Python | 8.14 | 1.40 | 1.69 | 48 | +----------+-----------+--------+----------------+ 49 | | Erlang | 0.58 | 0.96 | 1.16 | 50 | +----------+-----------+--------+----------------+ 51 | | CSharp | 0.47 | 4.37 | 5.05 | 52 | +----------+-----------+--------+----------------+ 53 | 54 | ``` 55 | 56 | ![chart](chart.png) 57 | 58 | 59 | 60 | ## 结论 61 | 62 | * 由于CSharp语言自身的高效率,再加上预生成代码的protobuf方式。 63 | 64 | CSharp + Protobuf 是序列化/反序列化速度最快的 65 | * Google官方的python protobuf库 效率低到无法形容的地步 66 | * Erlang的表现非常不错,并没有比CSharp慢太多,而且在json的处理上还领先于CSharp 67 | * Python做服务器,如果对效率要求很高,那么还是找其他的替换官方的protobuf 库 68 | * Erlang做服务器,Protobuf/Json都是不错的选择。 69 | 70 | 71 | ## 说明 72 | 73 | * `protocol.proto` 是用来测试的 protobuf 描述文件 74 | * `data.pb` 是 数据通过 protobuf 序列化后的文件 75 | * `data.json` 是 数据通过 json 序列化后的文件 76 | * `data.json.gz` 是数据通过 json 序列号后,并且用 gzip 压缩后的文件 77 | * gzip 压缩等级为6 78 | * 序列化测试需要两个参数 79 | * LOG AMOUNT: 通过不同logs数量来模拟不同大小的数据 80 | * BENCHMARK TIMES: 序列化次数 81 | * 反序列化需要一个参数 82 | * BENCHMARK TIMES: 反序列化次数 83 | * 输出的时间单位为秒 84 | 85 | 上面三个data开头的文件,正是反序列化所需的数据来源。(100个logs) 86 | 87 | (文件读取是在测试开始以前就完成的,不会有频繁的IO影响测试结果). 88 | 89 | 90 | #### Python 91 | 92 | * 使用google官方的 protoc 生成 对应的python文件 93 | * Python 使用的库是 protobuf==2.5.0 94 | * 使用内置json库 95 | * 序列化测试: `./pack.py [LOG AMOUNT] [BENCHMARK TIMES]` 96 | 97 | 98 | ``` 99 | ./pack.py 100 100 100 | LogAmount = 100 101 | Protobuf Size : 3050 102 | Json Size : 7707 103 | Json GZip Size : 898 104 | 105 | Benchmark Times = 100 106 | Protobuf Seconds : 0.309223890305 107 | Json Seconds : 0.0185949802399 108 | Json GZip Seconds : 0.0442588329315 109 | ``` 110 | 111 | * 反序列化测试: `./unpack.py [BENCHMARK TIMES]` 112 | 113 | ``` 114 | ./unpack.py 100 115 | Benchmark Times = 100 116 | Protobuf Seconds : 0.162999868393 117 | Json Seconds : 0.0300300121307 118 | Json GZip Seconds : 0.0367920398712 119 | 120 | ``` 121 | 122 | #### Erlang 123 | * 使用 [gpb][1] 作为从 .proto 文件生成 erlang 文件的工具 124 | * 使用 [jiffy][10] 所为 json 库 125 | * 序列化测试: `./benchmark pack [LOG AMOUNT] [BENCHMARK TIMES]` 126 | 127 | ``` 128 | ./benchmark pack 100 100 129 | LogAmount = 100 130 | Protobuf Size : 3050 131 | Json Size : 6882 132 | Json Gzip Size : 868 133 | 134 | BenchmarkTimes = 100 135 | Protobuf Seconds : 0.018345 136 | Json Seconds : 0.023872 137 | Json Gzip Seconds : 0.033556 138 | 139 | ``` 140 | * 反序列化测试: `./benchmark unpack [BENCHMARK TIMES]` 141 | 142 | ``` 143 | ./benchmark unpack 100 144 | BenchmarkTimes = 100 145 | Protobuf Seconds : 0.013519 146 | Json Seconds : 0.021982 147 | Json Gzip Seconds : 0.024749 148 | 149 | ``` 150 | 151 | #### CSharp 152 | * 使用 [pg][2] 来生成对用的dll 153 | 154 | [pg][2] 是我的一个小工具,基于 [ProtoGen][3],并且使用了 [precompile][4] 技术。 155 | [pg][2] 基于 [mono][5] 平台,所以可以在 Windows, Linux, MacOS 上使用。 156 | 157 | * 使用 [LitJson][6] 作为Json库 158 | * Json直接使用的 `LitJson.JsonData`,没有预先写Class来序列化/反序列化。不知道对速度有什么影响。 159 | * 使用 [SharpZipLib][7] 作为GZip 压缩/解压缩工具 160 | * 序列化: `make pack.exe && ./pack.exe [LOG AMOUNT] [BENCHMARK TIMES]` 161 | ``` 162 | ./pack.exe 100 100 163 | LogAmount = 100 164 | Protobuf Size : 3050 165 | Json Size : 6882 166 | Json GZip Size : 843 167 | 168 | Benchmark Times = 100 169 | Protobuf Seconds : 0.004 170 | Json Seconds : 0.04 171 | Json GZip Seconds : 0.088 172 | 173 | ``` 174 | * 反序列化: `make unpack.exe && ./unpack.exe [BENCHMARK TIMES]` 175 | ``` 176 | ./unpack.exe 100 177 | Benchmark Times = 100 178 | Protobuf Seconds : 0.025 179 | Json Seconds : 0.104 180 | Json GZip Seconds : 0.107 181 | 182 | ``` 183 | 184 | 185 | 186 | 187 | 188 | 189 | [1]: https://github.com/tomas-abrahamsson/gpb 190 | [2]: https://github.com/yueyoum/pg 191 | [3]: https://code.google.com/p/protobuf-csharp-port/wiki/ProtoGen 192 | [4]: http://game.ceeger.com/forum/read.php?tid=13479 193 | [5]: http://www.mono-project.com/ 194 | [6]: https://github.com/lbv/litjson 195 | [7]: https://github.com/icsharpcode/SharpZipLib 196 | [10]: https://github.com/davisp/jiffy 197 | 198 | 199 | 200 | -------------------------------------------------------------------------------- /Json VS Protobuf/chart.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yueyoum/playground/1b90abc14b306f99c7f2f70e629cb969fbebf26a/Json VS Protobuf/chart.png -------------------------------------------------------------------------------- /Json VS Protobuf/data.json: -------------------------------------------------------------------------------- 1 | {"logs": [{"content": "Log Contents...0", "status": 0, "id": 0, "times": 10000000}, {"content": "Log Contents...1", "status": 1, "id": 1, "times": 10000001}, {"content": "Log Contents...2", "status": 0, "id": 2, "times": 10000002}, {"content": "Log Contents...3", "status": 1, "id": 3, "times": 10000003}, {"content": "Log Contents...4", "status": 0, "id": 4, "times": 10000004}, {"content": "Log Contents...5", "status": 1, "id": 5, "times": 10000005}, {"content": "Log Contents...6", "status": 0, "id": 6, "times": 10000006}, {"content": "Log Contents...7", "status": 1, "id": 7, "times": 10000007}, {"content": "Log Contents...8", "status": 0, "id": 8, "times": 10000008}, {"content": "Log Contents...9", "status": 1, "id": 9, "times": 10000009}, {"content": "Log Contents...10", "status": 0, "id": 10, "times": 10000010}, {"content": "Log Contents...11", "status": 1, "id": 11, "times": 10000011}, {"content": "Log Contents...12", "status": 0, "id": 12, "times": 10000012}, {"content": "Log Contents...13", "status": 1, "id": 13, "times": 10000013}, {"content": "Log Contents...14", "status": 0, "id": 14, "times": 10000014}, {"content": "Log Contents...15", "status": 1, "id": 15, "times": 10000015}, {"content": "Log Contents...16", "status": 0, "id": 16, "times": 10000016}, {"content": "Log Contents...17", "status": 1, "id": 17, "times": 10000017}, {"content": "Log Contents...18", "status": 0, "id": 18, "times": 10000018}, {"content": "Log Contents...19", "status": 1, "id": 19, "times": 10000019}, {"content": "Log Contents...20", "status": 0, "id": 20, "times": 10000020}, {"content": "Log Contents...21", "status": 1, "id": 21, "times": 10000021}, {"content": "Log Contents...22", "status": 0, "id": 22, "times": 10000022}, {"content": "Log Contents...23", "status": 1, "id": 23, "times": 10000023}, {"content": "Log Contents...24", "status": 0, "id": 24, "times": 10000024}, {"content": "Log Contents...25", "status": 1, "id": 25, "times": 10000025}, {"content": "Log Contents...26", "status": 0, "id": 26, "times": 10000026}, {"content": "Log Contents...27", "status": 1, "id": 27, "times": 10000027}, {"content": "Log Contents...28", "status": 0, "id": 28, "times": 10000028}, {"content": "Log Contents...29", "status": 1, "id": 29, "times": 10000029}, {"content": "Log Contents...30", "status": 0, "id": 30, "times": 10000030}, {"content": "Log Contents...31", "status": 1, "id": 31, "times": 10000031}, {"content": "Log Contents...32", "status": 0, "id": 32, "times": 10000032}, {"content": "Log Contents...33", "status": 1, "id": 33, "times": 10000033}, {"content": "Log Contents...34", "status": 0, "id": 34, "times": 10000034}, {"content": "Log Contents...35", "status": 1, "id": 35, "times": 10000035}, {"content": "Log Contents...36", "status": 0, "id": 36, "times": 10000036}, {"content": "Log Contents...37", "status": 1, "id": 37, "times": 10000037}, {"content": "Log Contents...38", "status": 0, "id": 38, "times": 10000038}, {"content": "Log Contents...39", "status": 1, "id": 39, "times": 10000039}, {"content": "Log Contents...40", "status": 0, "id": 40, "times": 10000040}, {"content": "Log Contents...41", "status": 1, "id": 41, "times": 10000041}, {"content": "Log Contents...42", "status": 0, "id": 42, "times": 10000042}, {"content": "Log Contents...43", "status": 1, "id": 43, "times": 10000043}, {"content": "Log Contents...44", "status": 0, "id": 44, "times": 10000044}, {"content": "Log Contents...45", "status": 1, "id": 45, "times": 10000045}, {"content": "Log Contents...46", "status": 0, "id": 46, "times": 10000046}, {"content": "Log Contents...47", "status": 1, "id": 47, "times": 10000047}, {"content": "Log Contents...48", "status": 0, "id": 48, "times": 10000048}, {"content": "Log Contents...49", "status": 1, "id": 49, "times": 10000049}, {"content": "Log Contents...50", "status": 0, "id": 50, "times": 10000050}, {"content": "Log Contents...51", "status": 1, "id": 51, "times": 10000051}, {"content": "Log Contents...52", "status": 0, "id": 52, "times": 10000052}, {"content": "Log Contents...53", "status": 1, "id": 53, "times": 10000053}, {"content": "Log Contents...54", "status": 0, "id": 54, "times": 10000054}, {"content": "Log Contents...55", "status": 1, "id": 55, "times": 10000055}, {"content": "Log Contents...56", "status": 0, "id": 56, "times": 10000056}, {"content": "Log Contents...57", "status": 1, "id": 57, "times": 10000057}, {"content": "Log Contents...58", "status": 0, "id": 58, "times": 10000058}, {"content": "Log Contents...59", "status": 1, "id": 59, "times": 10000059}, {"content": "Log Contents...60", "status": 0, "id": 60, "times": 10000060}, {"content": "Log Contents...61", "status": 1, "id": 61, "times": 10000061}, {"content": "Log Contents...62", "status": 0, "id": 62, "times": 10000062}, {"content": "Log Contents...63", "status": 1, "id": 63, "times": 10000063}, {"content": "Log Contents...64", "status": 0, "id": 64, "times": 10000064}, {"content": "Log Contents...65", "status": 1, "id": 65, "times": 10000065}, {"content": "Log Contents...66", "status": 0, "id": 66, "times": 10000066}, {"content": "Log Contents...67", "status": 1, "id": 67, "times": 10000067}, {"content": "Log Contents...68", "status": 0, "id": 68, "times": 10000068}, {"content": "Log Contents...69", "status": 1, "id": 69, "times": 10000069}, {"content": "Log Contents...70", "status": 0, "id": 70, "times": 10000070}, {"content": "Log Contents...71", "status": 1, "id": 71, "times": 10000071}, {"content": "Log Contents...72", "status": 0, "id": 72, "times": 10000072}, {"content": "Log Contents...73", "status": 1, "id": 73, "times": 10000073}, {"content": "Log Contents...74", "status": 0, "id": 74, "times": 10000074}, {"content": "Log Contents...75", "status": 1, "id": 75, "times": 10000075}, {"content": "Log Contents...76", "status": 0, "id": 76, "times": 10000076}, {"content": "Log Contents...77", "status": 1, "id": 77, "times": 10000077}, {"content": "Log Contents...78", "status": 0, "id": 78, "times": 10000078}, {"content": "Log Contents...79", "status": 1, "id": 79, "times": 10000079}, {"content": "Log Contents...80", "status": 0, "id": 80, "times": 10000080}, {"content": "Log Contents...81", "status": 1, "id": 81, "times": 10000081}, {"content": "Log Contents...82", "status": 0, "id": 82, "times": 10000082}, {"content": "Log Contents...83", "status": 1, "id": 83, "times": 10000083}, {"content": "Log Contents...84", "status": 0, "id": 84, "times": 10000084}, {"content": "Log Contents...85", "status": 1, "id": 85, "times": 10000085}, {"content": "Log Contents...86", "status": 0, "id": 86, "times": 10000086}, {"content": "Log Contents...87", "status": 1, "id": 87, "times": 10000087}, {"content": "Log Contents...88", "status": 0, "id": 88, "times": 10000088}, {"content": "Log Contents...89", "status": 1, "id": 89, "times": 10000089}, {"content": "Log Contents...90", "status": 0, "id": 90, "times": 10000090}, {"content": "Log Contents...91", "status": 1, "id": 91, "times": 10000091}, {"content": "Log Contents...92", "status": 0, "id": 92, "times": 10000092}, {"content": "Log Contents...93", "status": 1, "id": 93, "times": 10000093}, {"content": "Log Contents...94", "status": 0, "id": 94, "times": 10000094}, {"content": "Log Contents...95", "status": 1, "id": 95, "times": 10000095}, {"content": "Log Contents...96", "status": 0, "id": 96, "times": 10000096}, {"content": "Log Contents...97", "status": 1, "id": 97, "times": 10000097}, {"content": "Log Contents...98", "status": 0, "id": 98, "times": 10000098}, {"content": "Log Contents...99", "status": 1, "id": 99, "times": 10000099}], "tags": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19], "id": 1, "name": "My Playground!!!"} -------------------------------------------------------------------------------- /Json VS Protobuf/data.json.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yueyoum/playground/1b90abc14b306f99c7f2f70e629cb969fbebf26a/Json VS Protobuf/data.json.gz -------------------------------------------------------------------------------- /Json VS Protobuf/data.pb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yueyoum/playground/1b90abc14b306f99c7f2f70e629cb969fbebf26a/Json VS Protobuf/data.pb -------------------------------------------------------------------------------- /Json VS Protobuf/protocol.proto: -------------------------------------------------------------------------------- 1 | message Log { 2 | required int32 id = 1; 3 | required string content = 2; 4 | required int32 status = 3; 5 | required int32 times = 4; 6 | } 7 | 8 | message Person { 9 | required int32 id = 1; 10 | required string name = 2; 11 | repeated int32 tags = 3; 12 | repeated Log logs = 4; 13 | } 14 | -------------------------------------------------------------------------------- /c++/.gitignore: -------------------------------------------------------------------------------- 1 | * 2 | !README.md 3 | !.gitignore 4 | !*.h 5 | !*.hpp 6 | !*.c 7 | !*.cpp 8 | -------------------------------------------------------------------------------- /c++/README.md: -------------------------------------------------------------------------------- 1 | # context.cpp 2 | 3 | 演示如何使用 boost.context 这个 stackful context 库 4 | 5 | # coroutine_symmetric.cpp 6 | 7 | 演示了 boost stackful 的对称式协程的 最基本用法 8 | 9 | # coro_basic.cpp 10 | 11 | 演示了 基于 stackful 对称式协程 封装的 coro 使用方法 12 | 13 | # coro_event_and_queue.cpp 14 | 15 | 演示了 Event 同步协程, Queue 在协程之间传递参数 16 | 17 | # coro_echo_server.cpp 18 | 19 | 演示了 基于协程的一个简单的echo server, 用同步的方式写异步程序 20 | -------------------------------------------------------------------------------- /c++/context.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | #include 5 | #include 6 | #include 7 | 8 | boost::context::fcontext_t hub; 9 | class Greenlet; 10 | 11 | std::unordered_map greenlets_map; 12 | 13 | class Greenlet 14 | { 15 | public: 16 | Greenlet(void (*fn)(intptr_t), std::string arg) 17 | : fn_(fn), 18 | ctx_(boost::context::make_fcontext(stack_.data()+stack_.size(), stack_.size(), fn_)), 19 | arg_(std::move(arg)), 20 | finish_(false) 21 | { 22 | id_ = reinterpret_cast(this); 23 | greenlets_map[id_] = this; 24 | printf("[CORO %d]<%s> spawned\n", id_, arg_.data()); 25 | } 26 | 27 | ~Greenlet() 28 | { 29 | printf("[CORO %d]<%s> finish\n", id_, arg_.data()); 30 | auto iter = greenlets_map.find(id_); 31 | greenlets_map.erase(iter); 32 | } 33 | 34 | void operator()() 35 | { 36 | boost::context::jump_fcontext(&hub, ctx_, static_cast(id_)); 37 | } 38 | 39 | void yield(bool isfinish = false) 40 | { 41 | finish_ = isfinish; 42 | boost::context::jump_fcontext(&ctx_, hub, 0); 43 | } 44 | 45 | std::string& arg() 46 | { 47 | return arg_; 48 | } 49 | 50 | int& id() 51 | { 52 | return id_; 53 | } 54 | 55 | bool& finish() 56 | { 57 | return finish_; 58 | } 59 | 60 | 61 | private: 62 | void (*fn_)(intptr_t); 63 | std::array stack_; 64 | boost::context::fcontext_t ctx_; 65 | std::string arg_; 66 | int id_; 67 | bool finish_; 68 | }; 69 | 70 | 71 | 72 | class Scheduler 73 | { 74 | public: 75 | Scheduler() 76 | { 77 | } 78 | 79 | void spawn(void (*fn)(intptr_t), std::string arg) 80 | { 81 | Greenlet* gl = new Greenlet(fn, std::move(arg)); 82 | greenlets.push_back(gl); 83 | } 84 | 85 | void operator()() 86 | { 87 | while(!greenlets.empty()) 88 | { 89 | Greenlet* gl = greenlets.front(); 90 | greenlets.pop_front(); 91 | 92 | printf("[MAIN] run coroutine<%s>\n", gl->arg().data()); 93 | (*gl)(); 94 | 95 | if(gl->finish()) 96 | { 97 | delete gl; 98 | } 99 | else 100 | { 101 | greenlets.push_back(gl); 102 | } 103 | } 104 | 105 | printf("[MAIN] no more coroutines, exit\n"); 106 | } 107 | 108 | private: 109 | std::deque greenlets; 110 | }; 111 | 112 | Scheduler sche; 113 | 114 | void fxx(intptr_t); 115 | 116 | void ff(intptr_t arg) 117 | { 118 | int id = static_cast(arg); 119 | auto gl = greenlets_map[id]; 120 | 121 | printf("[CORO %d] <%s> enter ff\n", id, gl->arg().data()); 122 | gl->yield(); 123 | printf("[CORO %d] <%s> re-enter ff\n", id, gl->arg().data()); 124 | sche.spawn(fxx, std::string("three")); 125 | gl->yield(true); 126 | }; 127 | 128 | 129 | void fxx(intptr_t arg) 130 | { 131 | int id = static_cast(arg); 132 | auto gl = greenlets_map[id]; 133 | printf("[CORO %d] <%s> enter fxx\n", id, gl->arg().data()); 134 | gl->yield(); 135 | printf("[CORO %d] <%s> re-enter fxx\n", id, gl->arg().data()); 136 | sche.spawn(ff, std::string("four")); 137 | gl->yield(true); 138 | }; 139 | 140 | 141 | 142 | int main() 143 | { 144 | sche.spawn(ff, std::string("one")); 145 | sche.spawn(fxx, std::string("two")); 146 | sche(); 147 | printf("[MAIN] done\n"); 148 | 149 | return 0; 150 | } 151 | -------------------------------------------------------------------------------- /c++/coro.h: -------------------------------------------------------------------------------- 1 | #ifndef __CORO_H__ 2 | #define __CORO_H__ 3 | 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include 9 | #include 10 | #include 11 | #include 12 | 13 | namespace coro 14 | { 15 | 16 | typedef boost::coroutines::symmetric_coroutine::call_type call_type; 17 | typedef boost::coroutines::symmetric_coroutine::yield_type yield_type; 18 | 19 | class Coroutine; 20 | class Event; 21 | class Scheduler; 22 | class Timer; 23 | static Coroutine* spawn(std::function, std::string); 24 | 25 | namespace this_coroutine 26 | { 27 | namespace detail 28 | { 29 | Coroutine* current; 30 | void jump(Coroutine*); 31 | } 32 | 33 | // yield: give up the current execution 34 | void yield(); 35 | // suspend: wait on block call (e.g, IO, Event, Queue...) 36 | // or switch to other coroutines and wait back 37 | // this coroutine will be resume when that block call returns 38 | void suspend(); 39 | std::shared_ptr sleep_for(int); 40 | } 41 | 42 | class Coroutine 43 | { 44 | public: 45 | call_type* ct; 46 | yield_type* yt; 47 | Coroutine* from; 48 | Coroutine* to; 49 | std::string name;bool active; 50 | 51 | Coroutine(std::string n) : 52 | ct(NULL), yt(NULL), from(NULL), to(NULL), name(n), active(true) 53 | { 54 | } 55 | 56 | ~Coroutine() 57 | { 58 | delete ct; 59 | 60 | if (to) 61 | { 62 | to->from = NULL; 63 | } 64 | std::cout << "[co] die " << name << std::endl; 65 | } 66 | 67 | void add_link(Coroutine*) 68 | { 69 | } 70 | 71 | void remove_link(Coroutine*) 72 | { 73 | } 74 | 75 | void suspend() 76 | { 77 | context_switch(from); 78 | } 79 | 80 | void jump(Coroutine* target) 81 | { 82 | target->from = this; 83 | to = target; 84 | context_switch(target); 85 | } 86 | 87 | private: 88 | void context_switch(Coroutine* target) 89 | { 90 | this_coroutine::detail::current = target; 91 | if (target) 92 | { 93 | std::cout << "[co] " << name << " switch to " << target->name 94 | << std::endl; 95 | (*yt)(*(target->ct)); 96 | } 97 | else 98 | { 99 | std::cout << "[co] " << name << " return to main context" 100 | << std::endl; 101 | (*yt)(); 102 | } 103 | } 104 | 105 | std::vector links; 106 | }; 107 | 108 | class Event 109 | { 110 | public: 111 | Event() : 112 | set_(false), process_set_(false) 113 | { 114 | co = coro::spawn(std::bind(&Event::loop, this), std::string("evt")); 115 | } 116 | 117 | void set() 118 | { 119 | if (process_set_) 120 | { 121 | std::cout << "Event can not set!" << std::endl; 122 | exit(1); 123 | } 124 | this_coroutine::detail::jump(co); 125 | } 126 | 127 | void wait() 128 | { 129 | auto current = this_coroutine::detail::current; 130 | if (!current) 131 | { 132 | std::cout << "ERROR, can not wait main context" << std::endl; 133 | exit(1); 134 | } 135 | 136 | if (!set_) 137 | { 138 | wait_queue_.push(current); 139 | current->suspend(); 140 | } 141 | set_ = false; 142 | } 143 | 144 | std::size_t size() const 145 | { 146 | return wait_queue_.size(); 147 | } 148 | 149 | private: 150 | void loop() 151 | { 152 | for (;;) 153 | { 154 | process_set_ = true; 155 | 156 | // assume there are A and B coroutines wait to be notifed. 157 | // the correct process is: 158 | // 1) event jump to A 159 | // 2) A call `this_coroutine::yield()` or `this_coroutine::wait()` 160 | // to yield back to event 161 | // 3) due to the while loop, event then jump to B 162 | // 4) same as A, B will yield back to event. 163 | // 5) event yield out, waiting next set. 164 | // 165 | // But think about this situation: 166 | // 1) event jump to A 167 | // 2) A call `evt.set()` 168 | // 3) the event loop will forward one step. 169 | // means that B will be notified 170 | // 4) B call yield to yield back to event, 171 | // this set operation is complete. 172 | // and return to step 2). 173 | // 5) A continue to execute, if A call yield, 174 | // this will let event re-enter loop, 175 | // the effect is same as the `evt.set()` call. 176 | // 177 | // This behavior is absolute wrong. 178 | // So the `process_set_` flag is used to prevent the above wrong operation. 179 | // 180 | // 181 | // Need a process_queue_, reason: 182 | // if no process_queue_, use the wait_queue_, 183 | // when jump to A coroutine, and A call `evt.wait()` 184 | // the event loop will got the A again, and jump to A 185 | // this will lead to infinite loop 186 | 187 | while (!wait_queue_.empty()) 188 | { 189 | process_queue_.push(wait_queue_.front()); 190 | wait_queue_.pop(); 191 | } 192 | 193 | while (!process_queue_.empty()) 194 | { 195 | auto w = process_queue_.front(); 196 | process_queue_.pop(); 197 | this_coroutine::detail::jump(w); 198 | } 199 | process_set_ = false; 200 | set_ = true; 201 | 202 | this_coroutine::suspend(); 203 | } 204 | } 205 | 206 | bool set_;bool process_set_; 207 | std::queue wait_queue_; 208 | std::queue process_queue_; 209 | Coroutine* co; 210 | }; 211 | 212 | template 213 | class Queue 214 | { 215 | public: 216 | Queue() : 217 | co_get_(NULL) 218 | { 219 | } 220 | 221 | std::size_t size() const 222 | { 223 | return q_.size(); 224 | } 225 | 226 | void put(T& value) 227 | { 228 | // std::cout << "[queue] put" << std::endl; 229 | q_.push(value); 230 | if (co_get_) 231 | { 232 | this_coroutine::detail::jump(co_get_); 233 | } 234 | } 235 | 236 | T& get() 237 | { 238 | auto current = this_coroutine::detail::current; 239 | if (!current) 240 | { 241 | std::cout << "ERROR, can not get queue in main context" 242 | << std::endl; 243 | exit(1); 244 | } 245 | 246 | if (co_get_) 247 | { 248 | std::cout << "ERROR, another coroutine is get the queue" 249 | << std::endl; 250 | exit(1); 251 | } 252 | 253 | // std::cout << "[queue] co " << current->name << " start get" << std::endl; 254 | if (q_.empty()) 255 | { 256 | co_get_ = current; 257 | current->suspend(); 258 | } 259 | 260 | // std::cout << "[queue] co " << current->name << " end get" << std::endl; 261 | 262 | auto value = q_.front(); 263 | q_.pop(); 264 | co_get_ = NULL; 265 | return std::ref(value); 266 | } 267 | 268 | private: 269 | std::queue q_; 270 | Coroutine* co_get_; 271 | }; 272 | 273 | class Scheduler 274 | { 275 | public: 276 | static Scheduler* create(boost::asio::io_service& io) 277 | { 278 | if (!instance_) 279 | { 280 | std::lock_guard lock(mutex_); 281 | if (!instance_) 282 | { 283 | instance_ = new Scheduler(io); 284 | } 285 | } 286 | 287 | return instance_; 288 | } 289 | 290 | static Scheduler* get() 291 | { 292 | return instance_; 293 | } 294 | 295 | boost::asio::io_service& io_service() 296 | { 297 | return io_; 298 | } 299 | 300 | void run() 301 | { 302 | co_loop_ = coro::spawn(std::bind(&Scheduler::loop, this), 303 | std::string("loop")); 304 | this_coroutine::detail::jump(co_loop_); 305 | coroutines.insert(co_loop_); 306 | } 307 | 308 | void spawn(std::function func, std::string name) 309 | { 310 | auto co = coro::spawn(func, name); 311 | coroutines.insert(co); 312 | queue_.put(co); 313 | } 314 | 315 | void kill(Coroutine* co) 316 | { 317 | coroutines.erase(co); 318 | // delete co; 319 | } 320 | 321 | std::size_t size() const 322 | { 323 | return coroutines.size(); 324 | } 325 | 326 | private: 327 | Scheduler(boost::asio::io_service& io) : 328 | io_(io), queue_(Queue()), co_loop_(NULL) 329 | { 330 | } 331 | 332 | void loop() 333 | { 334 | for (;;) 335 | { 336 | // std::cout << "[loop] start get" << std::endl; 337 | auto co = queue_.get(); 338 | // std::cout << "[loop] got: " << co->name << std::endl; 339 | 340 | // std::cout << "[loop] start to run " << co->name << std::endl; 341 | this_coroutine::detail::jump(co); 342 | // std::cout << "[loop] back from " << co->name << std::endl; 343 | 344 | if (!co->active) 345 | { 346 | kill(co); 347 | } 348 | } 349 | } 350 | 351 | static std::mutex mutex_; 352 | static Scheduler* instance_; 353 | 354 | boost::asio::io_service& io_; 355 | std::set coroutines; 356 | Queue queue_; 357 | Coroutine* co_loop_; 358 | }; 359 | 360 | class Timer 361 | { 362 | public: 363 | Timer(int seconds) : 364 | t_(Scheduler::get()->io_service()) 365 | { 366 | t_.expires_from_now(boost::posix_time::seconds(seconds)); 367 | co = this_coroutine::detail::current; 368 | 369 | t_.async_wait(std::bind(&Timer::handler, this, std::placeholders::_1)); 370 | co->suspend(); 371 | } 372 | 373 | void cancel() 374 | { 375 | } 376 | 377 | private: 378 | void handler(const boost::system::error_code& error) 379 | { 380 | if (error) 381 | { 382 | std::cout << "Timer error: " << error << std::endl; 383 | } 384 | else 385 | { 386 | this_coroutine::detail::jump(co); 387 | } 388 | } 389 | 390 | boost::asio::deadline_timer t_; 391 | Coroutine* co; 392 | }; 393 | 394 | static Coroutine* spawn(std::function func, std::string name) 395 | { 396 | Coroutine* co = new Coroutine(name); 397 | 398 | auto func_wrapper = [co](yield_type& yield, std::function co_func) 399 | { 400 | co->yt = &yield; 401 | co->suspend(); 402 | 403 | // enter func 404 | co_func(); 405 | 406 | co->active = false; 407 | auto from = co->from; 408 | 409 | Scheduler::get()->kill(co); 410 | this_coroutine::detail::current = NULL; 411 | if(from) 412 | { 413 | this_coroutine::detail::jump(from); 414 | } 415 | }; 416 | 417 | call_type* ct = new call_type( 418 | std::bind(func_wrapper, std::placeholders::_1, func)); 419 | co->ct = ct; 420 | 421 | this_coroutine::detail::jump(co); 422 | return co; 423 | } 424 | 425 | void this_coroutine::detail::jump(Coroutine* other) 426 | { 427 | if (this_coroutine::detail::current) 428 | { 429 | // call in a coroutine 430 | this_coroutine::detail::current->jump(other); 431 | } 432 | else 433 | { 434 | // call in main context 435 | // std::cout << "[main] jump from main context to " << other->name << std::endl; 436 | other->from = NULL; 437 | this_coroutine::detail::current = other; 438 | (*(other->ct))(); 439 | } 440 | } 441 | 442 | void this_coroutine::yield() 443 | { 444 | if (!this_coroutine::detail::current) 445 | { 446 | std::cout << "ERROR can not yield from main context" << std::endl; 447 | exit(1); 448 | } 449 | 450 | std::make_shared(0); 451 | } 452 | 453 | void this_coroutine::suspend() 454 | { 455 | if (!this_coroutine::detail::current) 456 | { 457 | std::cout << "ERROR can not wait in main context" << std::endl; 458 | exit(1); 459 | } 460 | 461 | this_coroutine::detail::current->suspend(); 462 | } 463 | 464 | std::shared_ptr this_coroutine::sleep_for(int seconds) 465 | { 466 | if (!this_coroutine::detail::current) 467 | { 468 | std::cout << "ERROR can not sleep_for in main context" << std::endl; 469 | exit(1); 470 | } 471 | 472 | return std::make_shared(seconds); 473 | } 474 | 475 | std::mutex Scheduler::mutex_; 476 | Scheduler* Scheduler::instance_ = NULL; 477 | 478 | } 479 | 480 | #endif 481 | -------------------------------------------------------------------------------- /c++/coro_basic.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | 5 | #include "coro.h" 6 | 7 | 8 | void one() 9 | { 10 | std::cout << "[one] 1" << std::endl; 11 | coro::this_coroutine::yield(); 12 | std::cout << "[one] 2" << std::endl; 13 | } 14 | 15 | void two() 16 | { 17 | std::cout << "[two] 1" << std::endl; 18 | coro::this_coroutine::yield(); 19 | std::cout << "[two] 2" << std::endl; 20 | } 21 | 22 | void three() 23 | { 24 | std::cout << "[three] 1" << std::endl; 25 | coro::this_coroutine::yield(); 26 | std::cout << "[three] 2" << std::endl; 27 | } 28 | 29 | void four() 30 | { 31 | std::cout << "[four] 1" << std::endl; 32 | coro::this_coroutine::yield(); 33 | std::cout << "[four] 2" << std::endl; 34 | coro::this_coroutine::yield(); 35 | std::cout << "[four] 3" << std::endl; 36 | } 37 | 38 | int main() 39 | { 40 | std::cout << "start" << std::endl; 41 | boost::asio::io_service io; 42 | boost::asio::io_service::work w(io); 43 | 44 | auto sche = coro::Scheduler::create(io); 45 | 46 | sche->spawn(one, std::string("one")); 47 | 48 | boost::asio::deadline_timer timter(io, boost::posix_time::seconds(3)); 49 | timter.async_wait( 50 | [&sche](const boost::system::error_code&) 51 | { 52 | sche->spawn(two, std::string("two")); 53 | sche->spawn(three, std::string("three")); 54 | } 55 | ); 56 | 57 | boost::asio::deadline_timer timter1(io, boost::posix_time::seconds(6)); 58 | timter1.async_wait( 59 | [&sche](const boost::system::error_code&) 60 | { 61 | sche->spawn(four, std::string("four")); 62 | } 63 | ); 64 | 65 | sche->run(); 66 | 67 | io.run(); 68 | 69 | delete sche; 70 | std::cout << "end" << std::endl; 71 | } 72 | 73 | -------------------------------------------------------------------------------- /c++/coro_echo_server.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | 5 | #include "coro_echo_server.h" 6 | 7 | 8 | void connection_handler(Client client) 9 | { 10 | std::cout << "new client" << std::endl; 11 | 12 | // Client remote = Endpoint::connect(client->io_service(), std::string("127.0.0.1"), 8008); 13 | for(;;) 14 | { 15 | std::string data = client->recv(1024); 16 | if(data.empty()) 17 | { 18 | std::cout << "client connection lost" << std::endl; 19 | break; 20 | } 21 | 22 | std::cout << "client got: " << data << std::endl; 23 | 24 | // remote->send(data); 25 | // 26 | // data = remote->recv(1024); 27 | // if(data.empty()) 28 | // { 29 | // std::cout << "remote connection lost" << std::endl; 30 | // break; 31 | // } 32 | // 33 | // std::cout << "remote got: " << data << std::endl; 34 | 35 | client->send(data); 36 | } 37 | } 38 | 39 | 40 | int main() 41 | { 42 | boost::asio::io_service io; 43 | boost::asio::io_service::work w(io); 44 | 45 | Server s(io, 9090, connection_handler); 46 | s.run(); 47 | 48 | return 0; 49 | } 50 | -------------------------------------------------------------------------------- /c++/coro_echo_server.h: -------------------------------------------------------------------------------- 1 | #ifndef __CORO_ECHO_SERVER_H__ 2 | #define __CORO_ECHO_SERVER_H__ 3 | 4 | #include 5 | #include 6 | #include 7 | #include 8 | 9 | #include "coro.h" 10 | 11 | using boost::asio::ip::tcp; 12 | 13 | class Connection: public std::enable_shared_from_this 14 | { 15 | public: 16 | Connection(tcp::socket&& socket): 17 | socket_(std::move(socket)) 18 | {} 19 | 20 | boost::asio::io_service& io_service() 21 | { 22 | return socket_.get_io_service(); 23 | } 24 | 25 | std::string recv(const std::size_t& size) 26 | { 27 | std::array buf; 28 | 29 | auto current = coro::this_coroutine::detail::current; 30 | bool has_error = false; 31 | 32 | socket_.async_read_some( 33 | boost::asio::buffer(buf), 34 | [current, &has_error](const boost::system::error_code& error, std::size_t) 35 | { 36 | if(error) 37 | { 38 | std::cout << "recv error: " << error << std::endl; 39 | has_error = true; 40 | } 41 | coro::this_coroutine::detail::jump(current); 42 | } 43 | ); 44 | 45 | coro::this_coroutine::suspend(); 46 | 47 | if(has_error) return std::string(); 48 | return std::string(buf.data()); 49 | } 50 | 51 | void send(std::string data) 52 | { 53 | auto current = coro::this_coroutine::detail::current; 54 | boost::asio::async_write( 55 | socket_, 56 | boost::asio::buffer(data), 57 | [current](const boost::system::error_code& error, std::size_t) 58 | { 59 | if(error) 60 | { 61 | std::cout << "send error: " << error << std::endl; 62 | } 63 | else 64 | { 65 | coro::this_coroutine::detail::jump(current); 66 | } 67 | } 68 | ); 69 | 70 | coro::this_coroutine::suspend(); 71 | } 72 | 73 | 74 | private: 75 | tcp::socket socket_; 76 | }; 77 | 78 | 79 | typedef std::shared_ptr Client; 80 | 81 | 82 | class Endpoint : public Connection 83 | { 84 | public: 85 | Client static connect(boost::asio::io_service& io, std::string ip, int port) 86 | { 87 | tcp::socket socket(io); 88 | tcp::endpoint endpoint( 89 | boost::asio::ip::address::from_string(ip), 90 | port 91 | ); 92 | 93 | auto current = coro::this_coroutine::detail::current; 94 | 95 | socket.async_connect( 96 | endpoint, 97 | [current](const boost::system::error_code& error) 98 | { 99 | if(error) 100 | { 101 | std::cout << "connect error: " << error << std::endl; 102 | } 103 | else 104 | { 105 | coro::this_coroutine::detail::jump(current); 106 | } 107 | } 108 | ); 109 | 110 | coro::this_coroutine::suspend(); 111 | 112 | return std::make_shared(std::move(socket)); 113 | } 114 | }; 115 | 116 | 117 | class Server 118 | { 119 | public: 120 | Server(boost::asio::io_service& io, int port, std::function callback) 121 | : io_(io), 122 | acceptor_(io, tcp::endpoint(tcp::v4(), port)), 123 | accept_callback_(callback) 124 | { 125 | sche_ = coro::Scheduler::create(io_); 126 | } 127 | 128 | void run() 129 | { 130 | sche_->spawn(std::bind(&Server::accept_loop, this), std::string("accept_loop")); 131 | 132 | sche_->run(); 133 | io_.run(); 134 | } 135 | 136 | private: 137 | void accept_loop() 138 | { 139 | auto current = coro::this_coroutine::detail::current; 140 | for(;;) 141 | { 142 | tcp::socket socket(io_); 143 | acceptor_.async_accept( 144 | socket, 145 | [current](const boost::system::error_code& error) 146 | { 147 | if(error) 148 | { 149 | std::cout << "accept error: " << error << std::endl; 150 | exit(1); 151 | } 152 | else 153 | { 154 | coro::this_coroutine::detail::jump(current); 155 | } 156 | } 157 | ); 158 | 159 | coro::this_coroutine::suspend(); 160 | 161 | Client client = std::make_shared(std::move(socket)); 162 | std::function handler = std::bind(accept_callback_, client); 163 | sche_->spawn(handler, std::string("client")); 164 | } 165 | } 166 | 167 | boost::asio::io_service& io_; 168 | tcp::acceptor acceptor_; 169 | std::function accept_callback_; 170 | coro::Scheduler* sche_; 171 | }; 172 | 173 | 174 | #endif // __CORO_ECHO_SERVER_H__ 175 | 176 | -------------------------------------------------------------------------------- /c++/coro_event_and_queue.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | 5 | #include "coro.h" 6 | 7 | void one(coro::Event& evt) 8 | { 9 | std::cout << "one start" << std::endl; 10 | evt.wait(); 11 | std::cout << "one end" << std::endl; 12 | } 13 | 14 | void two(coro::Event& evt) 15 | { 16 | std::cout << "two start" << std::endl; 17 | evt.wait(); 18 | std::cout << "two end" << std::endl; 19 | } 20 | 21 | void three(coro::Event& evt) 22 | { 23 | std::cout << "three start" << std::endl; 24 | for(int i=0; i<5; i++) 25 | { 26 | coro::this_coroutine::sleep_for(1); 27 | std::cout << "three: " << i << std::endl; 28 | } 29 | 30 | evt.set(); 31 | 32 | std::cout << "three end" << std::endl; 33 | } 34 | 35 | 36 | int main() 37 | { 38 | boost::asio::io_service io; 39 | boost::asio::io_service::work w(io); 40 | 41 | coro::Event evt; 42 | auto sche = coro::Scheduler::create(io); 43 | 44 | sche->spawn(std::bind(one, std::ref(evt)), std::string("one")); 45 | sche->spawn(std::bind(two, std::ref(evt)), std::string("two")); 46 | sche->spawn(std::bind(three, std::ref(evt)), std::string("three")); 47 | 48 | 49 | coro::Queue queue; 50 | auto consumer = [&queue]() 51 | { 52 | for(;;) 53 | { 54 | auto value = queue.get(); 55 | std::cout << "get value: " << value << std::endl; 56 | 57 | if (value == 5) 58 | { 59 | break; 60 | } 61 | } 62 | }; 63 | 64 | auto producer = [&queue]() 65 | { 66 | int data[] = {1, 2, 3, 4, 5}; 67 | for(auto i: data) 68 | { 69 | queue.put(i); 70 | coro::this_coroutine::sleep_for(1); 71 | } 72 | }; 73 | 74 | boost::asio::deadline_timer timer(io, boost::posix_time::seconds(6)); 75 | timer.async_wait( 76 | [sche, consumer, producer](const boost::system::error_code&) 77 | { 78 | sche->spawn(consumer, std::string("consumer")); 79 | sche->spawn(producer, std::string("producer")); 80 | } 81 | ); 82 | 83 | 84 | sche->run(); 85 | io.run(); 86 | 87 | delete sche; 88 | return 0; 89 | } 90 | 91 | 92 | -------------------------------------------------------------------------------- /c++/coroutine_symmetric.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | 4 | typedef boost::coroutines::symmetric_coroutine coro_t; 5 | 6 | coro_t::call_type* a, * b, *z; 7 | 8 | void xx(coro_t::yield_type&); 9 | void yy(coro_t::yield_type&); 10 | void zz(coro_t::yield_type&); 11 | 12 | 13 | void xx(coro_t::yield_type& yield) 14 | { 15 | std::cout << "xx 1" << std::endl; 16 | b = new coro_t::call_type(yy); 17 | (*b)(); 18 | std::cout << "xx 2" << std::endl; 19 | yield(*b); 20 | std::cout << "xx 3" << std::endl; 21 | yield(); 22 | } 23 | 24 | void yy(coro_t::yield_type& yield) 25 | { 26 | std::cout << "yy 1" << std::endl; 27 | yield(); 28 | std::cout << "yy 2" << std::endl; 29 | z = new coro_t::call_type(zz); 30 | yield(*z); 31 | std::cout << "yy 3" << std::endl; 32 | yield(); 33 | } 34 | 35 | void zz(coro_t::yield_type& yield) 36 | { 37 | std::cout << "zz 1" << std::endl; 38 | yield(*b); 39 | std::cout << "zz 2" << std::endl; 40 | yield(); 41 | } 42 | 43 | 44 | int main() 45 | { 46 | std::cout << "main 1" << std::endl; 47 | a = new coro_t::call_type(xx); 48 | (*a)(); 49 | std::cout << "main 2" << std::endl; 50 | (*a)(); 51 | std::cout << "main 3" << std::endl; 52 | } 53 | 54 | 55 | // output 56 | // main 1 57 | // xx 1 58 | // yy 1 59 | // xx 2 60 | // yy 2 61 | // zz 1 62 | // yy 3 63 | // main 2 64 | // xx3 65 | // main 3 66 | --------------------------------------------------------------------------------