├── README.md ├── gen_annoy_index └── main.go ├── gen_wordvector_leveldb └── main.go ├── main.go └── util └── util.go /README.md: -------------------------------------------------------------------------------- 1 | # wordvector_be 2 | 3 | 这个项目用 go 语言实现了一个 HTTP 服务,使用腾讯 800 万词的 [word vector 模型](https://ai.tencent.com/ailab/zh/news/detial?id=22) 得到相似关键词和关键词的cosine similarity。索引使用了 spotify 的 [annoy](https://github.com/spotify/annoy) 引擎。 4 | 5 | ## 安装 6 | 7 | 一、首先安装 annoy 的 golang 包,参照 [这个文档](https://github.com/spotify/annoy/blob/master/README_GO.rst),不需要执行所有步骤,只要执行下面命令 8 | 9 | ``` 10 | swig -go -intgosize 64 -cgo -c++ src/annoygomodule.i 11 | mkdir -p $GOPATH/src/annoyindex 12 | cp src/annoygomodule_wrap.cxx src/annoyindex.go \ 13 | src/annoygomodule.h src/annoylib.h src/kissrandom.h test/annoy_test.go $GOPATH/src/annoyindex 14 | ``` 15 | 16 | 二、然后下载腾讯的模型文件,建议使用 aria2c 17 | 18 | ``` 19 | go get github.com/huichen/wordvector_be 20 | cd $GOPATH/src/github.com/huichen/wordvector_be 21 | mkdir data 22 | cd data/ 23 | aria2c -c https://ai.tencent.com/ailab/nlp/data/Tencent_AILab_ChineseEmbedding.tar.gz 24 | tar zxvf https://ai.tencent.com/ailab/nlp/data/Tencent_AILab_ChineseEmbedding.tar.gz 25 | ``` 26 | 27 | 三、将腾讯的 txt 模型文件导出为 leveldb 格式的数据库,进入 gen_wordvector_leveldb 后执行 28 | 29 | ``` 30 | go run main.go 31 | ``` 32 | 33 | 生成的数据库在 data/tencent_embedding_wordvector.db 目录下 34 | 35 | 四、创建 annoy 索引文件和 metadata 数据库,进入 gen_annoy_index 目录,执行 36 | 37 | ``` 38 | go run main.go 39 | ``` 40 | 41 | 你的电脑要有 10G 左右内存。不到 30 分钟后,索引文件生成在 data/tencent_embedding.ann。annoy 索引的 key 是整数 id,不包括关键词和 id 之间的映射关系,这个关系放在了 data/tencent_embedding_index_to_keyword.db 和 data/tencent_embedding_keyword_to_index.db 两个 leveldb 数据库备用。 42 | 43 | ## 使用 44 | 45 | 所有包和数据文件准备好之后,就可以启动服务了: 46 | 47 | ``` 48 | go build 49 | ./wordvector_be 50 | ``` 51 | 52 | 在浏览器打开 http://localhost:3721/get.similar.keywords/?keyword=美白&num=20 ,返回如下,word 字段是关键词,similarity 是关键词词向量之间的 consine similarity,约接近 1 越相似。 53 | 54 | ``` 55 | { 56 | "keywords": [ 57 | { 58 | "word": "美白", 59 | "similarity": 1 60 | }, 61 | { 62 | "word": "淡斑", 63 | "similarity": 0.8916605 64 | }, 65 | { 66 | "word": "美白产品", 67 | "similarity": 0.8722978 68 | }, 69 | { 70 | "word": "美白效果", 71 | "similarity": 0.8654123 72 | }, 73 | { 74 | "word": "想美白", 75 | "similarity": 0.86464494 76 | }, 77 | ... 78 | ``` 79 | 80 | 更多函数见 main.go 代码中的注释。 81 | 82 | ## 参数调优 83 | 84 | 你可能发现了,这个程序返回的相似词和腾讯官方的例子略有不同,因为我们用的是相似紧邻算法,不保证 100% 的召回率。主要有以下参数可以调整 85 | 86 | * numTrees: [gen_annoy_index/main.go](https://github.com/huichen/wordvector_be/blob/master/gen_annoy_index/main.go),近似最近邻计算需要的随机森林中树的个数,树越多召回率越高,但也意味更久的建树时间(一次性)和请求延迟 87 | * kSearch: [main.go](https://github.com/huichen/wordvector_be/blob/master/main.go),搜索栈长度,这个值越大则请求耗时越长,但召回率越高 88 | 89 | 在程序中默认使用 numTrees = 10 和 kSearch = 10000 两个参数,可以得到不错的召回率,压测 100 并发 http 请求的情况下,延迟平均 76 毫秒方差 65 毫秒。如果你有更充足的时间,可以增加 numTrees 延长建树的时间;如果你对服务的并发和延迟有更高要求,可以适当降低 kSearch,不过这样做也会降低召回率。请根据业务需求做适当的权衡。 90 | -------------------------------------------------------------------------------- /gen_annoy_index/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "annoyindex" 5 | "log" 6 | 7 | "github.com/huichen/wordvector_be/util" 8 | 9 | "github.com/syndtr/goleveldb/leveldb" 10 | ) 11 | 12 | const ( 13 | dimVec = 200 14 | numTrees = 10 15 | ) 16 | 17 | func main() { 18 | log.SetFlags(log.Ldate | log.Ltime | log.Lmicroseconds | log.Llongfile) 19 | 20 | // 打开 leveldb,用于读取 wordvector 数据 21 | dbWordVector, err := leveldb.OpenFile("../data/tencent_embedding_wordvector.db", nil) 22 | if err != nil { 23 | log.Panic(err) 24 | } 25 | defer dbWordVector.Close() 26 | 27 | // 用于保存keyword->index 28 | dbIndexToKeywordToIndex, err := leveldb.OpenFile("../data/tencent_embedding_keyword_to_index.db", nil) 29 | if err != nil { 30 | log.Panic(err) 31 | } 32 | defer dbIndexToKeywordToIndex.Close() 33 | 34 | // 用于保存index->keyword 35 | dbIndexToKeyword, err := leveldb.OpenFile("../data/tencent_embedding_index_to_keyword.db", nil) 36 | if err != nil { 37 | log.Panic(err) 38 | } 39 | defer dbIndexToKeyword.Close() 40 | 41 | log.Printf("start loading data") 42 | count := 0 43 | t := annoyindex.NewAnnoyIndexAngular(dimVec) 44 | iter := dbWordVector.NewIterator(nil, nil) 45 | for iter.Next() { 46 | value := iter.Value() 47 | if count%1000000 == 0 { 48 | log.Printf("#records loaded = %d", count) 49 | } 50 | v := []float32{} 51 | for i := 0; i < dimVec; i++ { 52 | e := util.Float32frombytes(value[i*4 : (i+1)*4]) 53 | v = append(v, e) 54 | } 55 | t.AddItem(count, v) 56 | err = dbIndexToKeywordToIndex.Put(iter.Key(), util.Uint32bytes(uint32(count)), nil) 57 | if err != nil { 58 | log.Panic(err) 59 | } 60 | 61 | err = dbIndexToKeyword.Put(util.Uint32bytes(uint32(count)), iter.Key(), nil) 62 | if err != nil { 63 | log.Panic(err) 64 | } 65 | count++ 66 | } 67 | iter.Release() 68 | err = iter.Error() 69 | if err != nil { 70 | log.Panic(err) 71 | } 72 | log.Printf("finished loading data") 73 | 74 | log.Printf("start building") 75 | t.Build(numTrees) // 更多的树意味着更高精度,但建造时间也更长 76 | log.Printf("finished building") 77 | 78 | t.Save("../data/tencent_embedding.ann") 79 | log.Printf("finished saving") 80 | } 81 | -------------------------------------------------------------------------------- /gen_wordvector_leveldb/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "bufio" 5 | "log" 6 | "os" 7 | "strconv" 8 | "strings" 9 | 10 | "github.com/huichen/wordvector_be/util" 11 | "github.com/syndtr/goleveldb/leveldb" 12 | ) 13 | 14 | func main() { 15 | db, err := leveldb.OpenFile("../data/tencent_embedding_wordvector.db", nil) 16 | defer db.Close() 17 | 18 | file, err := os.Open("../data/Tencent_AILab_ChineseEmbedding.txt") 19 | if err != nil { 20 | log.Fatal(err) 21 | } 22 | defer file.Close() 23 | 24 | scanner := bufio.NewScanner(file) 25 | lineCount := 0 26 | var total int64 27 | var dim int 28 | for scanner.Scan() { 29 | lineCount++ 30 | if lineCount == 1 { 31 | fields := strings.Split(scanner.Text(), " ") 32 | total, err = strconv.ParseInt(fields[0], 10, 0) 33 | if err != nil { 34 | log.Fatal(err) 35 | } 36 | l, err := strconv.ParseInt(fields[1], 10, 0) 37 | if err != nil { 38 | log.Fatal(err) 39 | } 40 | log.Printf("vector length = %d", l) 41 | dim = int(l) 42 | continue 43 | } 44 | 45 | fields := strings.Split(scanner.Text(), " ") 46 | if len(fields) != 1+dim { 47 | log.Fatal("向量长度不匹配 %d != %d", len(fields), 1+dim) 48 | } 49 | 50 | vec := make([]byte, 4*dim) 51 | for i := 1; i <= dim; i++ { 52 | value, err := strconv.ParseFloat(fields[i], 32) 53 | if err != nil { 54 | log.Fatal(err) 55 | } 56 | bs := util.Float32bytes(float32(value)) 57 | vec[4*(i-1)] = bs[0] 58 | vec[4*(i-1)+1] = bs[1] 59 | vec[4*(i-1)+2] = bs[2] 60 | vec[4*(i-1)+3] = bs[3] 61 | } 62 | 63 | err = db.Put([]byte(fields[0]), vec, nil) 64 | if err != nil { 65 | log.Panic(err) 66 | } 67 | 68 | if lineCount%1000 == 0 { 69 | log.Printf("已写入 %d 条记录,共 %d 条", lineCount, total) 70 | } 71 | } 72 | 73 | if err := scanner.Err(); err != nil { 74 | log.Fatal(err) 75 | } 76 | log.Printf("载入 wordvector 完毕") 77 | 78 | } 79 | -------------------------------------------------------------------------------- /main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "annoyindex" 5 | "encoding/json" 6 | "flag" 7 | "fmt" 8 | "log" 9 | "math" 10 | "net/http" 11 | "os" 12 | "os/signal" 13 | "strconv" 14 | "syscall" 15 | 16 | "github.com/huichen/sego" 17 | "github.com/huichen/wordvector_be/util" 18 | "github.com/syndtr/goleveldb/leveldb" 19 | ) 20 | 21 | const ( 22 | vecDim = 200 23 | kSearch = 10000 24 | 25 | defaultNumReturnKeywords = 10 26 | maxNumReturnKeywords = 100 27 | ) 28 | 29 | var ( 30 | port = flag.String("port", ":3721", "http 服务端口") 31 | httpPathPrefix = flag.String("http_path_prefix", "", "") 32 | dict = flag.String("dict", "", "sego 词典,从 github.com/huichen/sego/data/dictionary.txt 下载") 33 | 34 | dbIndexToKeyword *leveldb.DB 35 | dbKeywordToIndex *leveldb.DB 36 | annoyIndex annoyindex.AnnoyIndex 37 | segmenter sego.Segmenter 38 | ) 39 | 40 | func main() { 41 | flag.Parse() 42 | log.SetFlags(log.Ldate | log.Ltime | log.Lmicroseconds | log.Llongfile) 43 | if *dict != "" { 44 | segmenter.LoadDictionary(*dict) 45 | } 46 | 47 | var err error 48 | dbIndexToKeyword, err = leveldb.OpenFile("data/tencent_embedding_index_to_keyword.db", nil) 49 | if err != nil { 50 | log.Panic(err) 51 | } 52 | defer dbIndexToKeyword.Close() 53 | 54 | dbKeywordToIndex, err = leveldb.OpenFile("data/tencent_embedding_keyword_to_index.db", nil) 55 | if err != nil { 56 | log.Panic(err) 57 | } 58 | defer dbKeywordToIndex.Close() 59 | 60 | annoyIndex = annoyindex.NewAnnoyIndexAngular(vecDim) 61 | annoyIndex.Load("data/tencent_embedding.ann") 62 | 63 | http.HandleFunc(fmt.Sprintf("%s/get.similar.keywords/", *httpPathPrefix), getSimilarKeyword) 64 | http.HandleFunc(fmt.Sprintf("%s/get.similar.keywords.from.vector/", *httpPathPrefix), getSimilarKeywordFromVector) 65 | http.HandleFunc(fmt.Sprintf("%s/get.word.vector/", *httpPathPrefix), getWordVector) 66 | http.HandleFunc(fmt.Sprintf("%s/get.similarity.score/", *httpPathPrefix), getSimilarityScore) 67 | go func() { 68 | if err := http.ListenAndServe(*port, nil); err != nil { 69 | panic(err) 70 | } 71 | }() 72 | 73 | errc := make(chan error, 2) 74 | go func() { 75 | c := make(chan os.Signal) 76 | signal.Notify(c, syscall.SIGINT) 77 | errc <- fmt.Errorf("%s", <-c) 78 | }() 79 | log.Println("terminated ", <-errc) 80 | } 81 | 82 | /* 83 | 从一个或者多个关键词找相似词 84 | HTTP 请求参数 85 | /get.similar.keywords/?keyword=xxx&num=yyy 86 | 支持多个 keyword 参数(词向量之和),num不指定的话默认10个,比如 87 | /get.similar.keywords/?keyword=xxx&keyword=yyy&keyword=zzz 88 | 89 | 特殊用法:当 keyword 只有一个且是词向量表中没有的短语时,并且 --dict 参数载入了一个 90 | 词典,则将改 keyword 分词之后求多个分词的词向量之和的相似词 91 | 92 | */ 93 | 94 | type SimilarKeywordResponse struct { 95 | Keywords []Keyword `json:"keywords"` 96 | } 97 | 98 | type Keyword struct { 99 | Word string `json:"word"` 100 | Similarity float32 `json:"similarity"` 101 | } 102 | 103 | func getSimilarKeyword(w http.ResponseWriter, r *http.Request) { 104 | key, ok := r.URL.Query()["keyword"] 105 | if !ok || len(key) == 0 { 106 | http.Error(w, "必须输入 keyword", http.StatusInternalServerError) 107 | return 108 | } 109 | num, ok := r.URL.Query()["num"] 110 | var numKeywords int 111 | if !ok || len(num) != 1 { 112 | numKeywords = defaultNumReturnKeywords 113 | } else { 114 | var err error 115 | numKeywords, err = strconv.Atoi(num[0]) 116 | if err != nil { 117 | log.Printf("%s", err) 118 | http.Error(w, err.Error(), http.StatusInternalServerError) 119 | return 120 | } 121 | } 122 | if numKeywords <= 0 { 123 | numKeywords = defaultNumReturnKeywords 124 | } else if numKeywords > maxNumReturnKeywords { 125 | numKeywords = maxNumReturnKeywords 126 | } 127 | 128 | wordVec := make([]float32, vecDim) 129 | _, err := dbKeywordToIndex.Get([]byte(key[0]), nil) 130 | if err != nil { 131 | if len(key) == 1 && *dict != "" { 132 | // 只有一个关键词,且不出现在向量词表的特殊情况 133 | segments := segmenter.Segment([]byte(key[0])) 134 | key = sego.SegmentsToSlice(segments, false) 135 | } else { 136 | http.Error(w, err.Error(), http.StatusInternalServerError) 137 | return 138 | } 139 | } 140 | 141 | validKeywords := 0 142 | for _, k := range key { 143 | id, err := dbKeywordToIndex.Get([]byte(k), nil) 144 | if err != nil { 145 | continue 146 | } 147 | validKeywords++ 148 | index := util.Uint32frombytes(id) 149 | var wv []float32 150 | annoyIndex.GetItem(int(index), &wv) 151 | for i, v := range wv { 152 | wordVec[i] = wordVec[i] + v 153 | } 154 | } 155 | if validKeywords == 0 { 156 | http.Error(w, "没有找到匹配关键词", http.StatusInternalServerError) 157 | return 158 | } 159 | 160 | var result []int 161 | annoyIndex.GetNnsByVector(wordVec, numKeywords, kSearch, &result) 162 | var sim SimilarKeywordResponse 163 | for _, k := range result { 164 | keyword, err := dbIndexToKeyword.Get(util.Uint32bytes(uint32(k)), nil) 165 | if err != nil { 166 | log.Printf("%s", err) 167 | http.Error(w, err.Error(), http.StatusInternalServerError) 168 | return 169 | } 170 | similarityScore := getCosineSimilarityByVector(wordVec, k) 171 | sim.Keywords = append(sim.Keywords, Keyword{ 172 | Word: string(keyword), 173 | Similarity: similarityScore, 174 | }) 175 | } 176 | 177 | data, err := json.Marshal(sim) 178 | if err != nil { 179 | log.Printf("%s", err) 180 | http.Error(w, err.Error(), http.StatusInternalServerError) 181 | return 182 | } 183 | w.Header().Set("Content-Type", "application/json") 184 | w.Header().Set("Access-Control-Allow-Origin", "*") 185 | w.Write(data) 186 | } 187 | 188 | /* 189 | 从一个或者多个关键词找相似词 190 | HTTP POST 请求参数 191 | /get.similar.keywords.from.vector/ 192 | body 是 SimilarKeywordFromVectorRequest 结构体的 json 193 | */ 194 | 195 | type SimilarKeywordFromVectorRequest struct { 196 | NumKeywords int `json:"numKeywords"` 197 | Vector []float32 `json:"vector"` 198 | } 199 | 200 | func getSimilarKeywordFromVector(w http.ResponseWriter, r *http.Request) { 201 | var req SimilarKeywordFromVectorRequest 202 | if r.Body == nil { 203 | http.Error(w, "Please send a request body", 400) 204 | return 205 | } 206 | err := json.NewDecoder(r.Body).Decode(&req) 207 | if err != nil { 208 | http.Error(w, err.Error(), 400) 209 | return 210 | } 211 | 212 | wordVec := req.Vector 213 | if len(wordVec) != vecDim { 214 | http.Error(w, "vector 维度不匹配", http.StatusInternalServerError) 215 | return 216 | } 217 | 218 | numKeywords := req.NumKeywords 219 | if numKeywords <= 0 { 220 | numKeywords = defaultNumReturnKeywords 221 | } else if numKeywords > maxNumReturnKeywords { 222 | numKeywords = maxNumReturnKeywords 223 | } 224 | 225 | var result []int 226 | annoyIndex.GetNnsByVector(wordVec, numKeywords, kSearch, &result) 227 | var sim SimilarKeywordResponse 228 | for _, k := range result { 229 | keyword, err := dbIndexToKeyword.Get(util.Uint32bytes(uint32(k)), nil) 230 | if err != nil { 231 | log.Printf("%s", err) 232 | http.Error(w, err.Error(), http.StatusInternalServerError) 233 | return 234 | } 235 | similarityScore := getCosineSimilarityByVector(wordVec, k) 236 | sim.Keywords = append(sim.Keywords, Keyword{ 237 | Word: string(keyword), 238 | Similarity: similarityScore, 239 | }) 240 | } 241 | 242 | data, err := json.Marshal(sim) 243 | if err != nil { 244 | log.Printf("%s", err) 245 | http.Error(w, err.Error(), http.StatusInternalServerError) 246 | return 247 | } 248 | w.Header().Set("Content-Type", "application/json") 249 | w.Header().Set("Access-Control-Allow-Origin", "*") 250 | w.Write(data) 251 | } 252 | 253 | /* 254 | 返回一个或者多个关键词的词向量 255 | HTTP 请求参数 256 | /get.word.vector/?keyword=xxx 257 | 支持多个 keyword 参数(词向量之和),比如 258 | /get.similar.keywords/?keyword=xxx&keyword=yyy&keyword=zzz 259 | 260 | */ 261 | 262 | type WordVectorResponse struct { 263 | Vector []float32 `json:"vector"` 264 | } 265 | 266 | func getWordVector(w http.ResponseWriter, r *http.Request) { 267 | key, ok := r.URL.Query()["keyword"] 268 | if !ok || len(key) == 0 { 269 | http.Error(w, "必须输入 keyword", http.StatusInternalServerError) 270 | return 271 | } 272 | wordVec := make([]float32, vecDim) 273 | for _, k := range key { 274 | id, err := dbKeywordToIndex.Get([]byte(k), nil) 275 | if err != nil { 276 | log.Printf("%s", err) 277 | http.Error(w, err.Error(), http.StatusInternalServerError) 278 | return 279 | } 280 | index := util.Uint32frombytes(id) 281 | var wv []float32 282 | annoyIndex.GetItem(int(index), &wv) 283 | for i, v := range wv { 284 | wordVec[i] = wordVec[i] + v 285 | } 286 | } 287 | 288 | var resp WordVectorResponse 289 | resp.Vector = wordVec 290 | data, err := json.Marshal(resp) 291 | if err != nil { 292 | log.Printf("%s", err) 293 | http.Error(w, err.Error(), http.StatusInternalServerError) 294 | return 295 | } 296 | w.Header().Set("Access-Control-Allow-Origin", "*") 297 | w.Header().Set("Content-Type", "application/json") 298 | w.Write(data) 299 | } 300 | 301 | /* 302 | 计算两个词的相似度 303 | HTTP 请求参数 304 | /get.similarity.score/?keyword1=xxx&keyword2=yyy 305 | */ 306 | 307 | type SimilarityScoreResponse struct { 308 | Score float32 `json:"score"` 309 | } 310 | 311 | func getSimilarityScore(w http.ResponseWriter, r *http.Request) { 312 | key1, ok := r.URL.Query()["keyword1"] 313 | if !ok || len(key1) != 1 { 314 | http.Error(w, "必须输入 keyword", http.StatusInternalServerError) 315 | return 316 | } 317 | id1, err := dbKeywordToIndex.Get([]byte(key1[0]), nil) 318 | if err != nil { 319 | log.Printf("%s", err) 320 | http.Error(w, err.Error(), http.StatusInternalServerError) 321 | return 322 | } 323 | index1 := util.Uint32frombytes(id1) 324 | 325 | key2, ok := r.URL.Query()["keyword2"] 326 | if !ok || len(key2) != 1 { 327 | http.Error(w, "必须输入 keyword", http.StatusInternalServerError) 328 | return 329 | } 330 | id2, err := dbKeywordToIndex.Get([]byte(key2[0]), nil) 331 | if err != nil { 332 | log.Printf("%s", err) 333 | http.Error(w, err.Error(), http.StatusInternalServerError) 334 | return 335 | } 336 | index2 := util.Uint32frombytes(id2) 337 | 338 | var resp SimilarityScoreResponse 339 | resp.Score = getCosineSimilarity(int(index1), int(index2)) 340 | data, err := json.Marshal(resp) 341 | if err != nil { 342 | log.Printf("%s", err) 343 | http.Error(w, err.Error(), http.StatusInternalServerError) 344 | return 345 | } 346 | w.Header().Set("Content-Type", "application/json") 347 | w.Header().Set("Access-Control-Allow-Origin", "*") 348 | w.Write(data) 349 | } 350 | 351 | func getCosineSimilarity(i, j int) float32 { 352 | var vec []float32 353 | annoyIndex.GetItem(i, &vec) 354 | 355 | return getCosineSimilarityByVector(vec, j) 356 | } 357 | 358 | func getCosineSimilarityByVector(vec []float32, j int) float32 { 359 | var vec2 []float32 360 | annoyIndex.GetItem(j, &vec2) 361 | 362 | var a, b, c float32 363 | for id, v := range vec { 364 | a = a + v*vec2[id] 365 | b = b + v*v 366 | c = c + vec2[id]*vec2[id] 367 | } 368 | 369 | return a / float32(math.Sqrt(float64(b))*math.Sqrt(float64(c))) 370 | } 371 | -------------------------------------------------------------------------------- /util/util.go: -------------------------------------------------------------------------------- 1 | package util 2 | 3 | import ( 4 | "encoding/binary" 5 | "math" 6 | ) 7 | 8 | 9 | func Float32frombytes(bytes []byte) float32 { 10 | bits := binary.LittleEndian.Uint32(bytes) 11 | float := math.Float32frombits(bits) 12 | return float 13 | } 14 | 15 | func Float32bytes(float float32) []byte { 16 | bits := math.Float32bits(float) 17 | bytes := make([]byte, 8) 18 | binary.LittleEndian.PutUint32(bytes, bits) 19 | return bytes 20 | } 21 | 22 | func Uint32frombytes(bytes []byte) uint32 { 23 | return binary.LittleEndian.Uint32(bytes) 24 | } 25 | 26 | func Uint32bytes(value uint32) []byte { 27 | bs := make([]byte, 4) 28 | binary.LittleEndian.PutUint32(bs, value) 29 | return bs 30 | } 31 | --------------------------------------------------------------------------------