├── .gitignore ├── LICENSE ├── Makefile ├── README.md ├── backends └── backends.go ├── clients ├── client.go ├── client.py └── client.sh ├── detailer └── detailer.go ├── frontend.go ├── images ├── architecture.jpg ├── prometheus.png ├── service-map.png ├── stackdriver-metrics.png ├── stackdriver-request.png ├── webui.jpg └── xray-request.png ├── prometheus.yml ├── rpc ├── defs.pb.go ├── defs.proto ├── search.go └── uuid.go └── static ├── app.js ├── index.html └── styles.css /.gitignore: -------------------------------------------------------------------------------- 1 | # Binaries for programs and plugins 2 | *.exe 3 | *.dll 4 | *.so 5 | *.dylib 6 | 7 | # Test binary, build with `go test -c` 8 | *.test 9 | 10 | # Output of the go coverage tool, specifically when used with LiteIDE 11 | *.out 12 | 13 | # Project-local glide cache, RE: https://github.com/Masterminds/glide/issues/736 14 | .glide/ 15 | 16 | # DB related files and directories 17 | data/ 18 | 19 | # Generated binaries 20 | bin/ 21 | 22 | # Miscellaneous files 23 | .DS_Store 24 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | all: install protoc binaries 2 | 3 | install: 4 | go get ./... 5 | 6 | protoc: 7 | protoc -I rpc rpc/defs.proto --go_out=plugins=grpc:rpc 8 | 9 | binaries: backends_bin frontend_bin detailer_bin 10 | 11 | run-microservices: binaries 12 | ./bin/detailer_mu & 13 | ./bin/backends_mu & 14 | ./bin/frontend_mu & 15 | 16 | kill-microservices: 17 | sudo pkill detailer_mu backends_mu frontend_mu 18 | 19 | detailer_bin: 20 | go build -o ./bin/detailer_mu ./detailer 21 | 22 | backends_bin: 23 | go build -o bin/backends_mu ./backends 24 | 25 | frontend_bin: 26 | go build -o bin/frontend_mu . 27 | 28 | build-microservices: 29 | CGO_ENABLED=0 GOOS=linux go build -o ./bin/detailer_mu_linux ./detailer 30 | CGO_ENABLED=0 GOOS=linux go build -o ./bin/backends_mu_linux ./backends 31 | CGO_ENABLED=0 GOOS=linux go build -o ./bin/frontend_mu_linux . 32 | 33 | clean: 34 | rm -rf bin/ 35 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## media-search 2 | 3 | media-search is a service for searching for media such as on YouTube by queries, accessible both on the commandline 4 | and on a web user interface, via HTTP requests. It is instrumented with OpenCensus which gives us distributed tracing and 5 | monitoring for visibility into our entire system as a call propagates through the various microservices. 6 | 7 | ![](./images/architecture.jpg) 8 | 9 | ### Structure 10 | 11 | It consists of: 12 | 13 | * A frontend service (OFE) which is accessible by HTTP requests 14 | * Clients in Python3, Go, Javascript+HTML(accessible via webpage) 15 | * A database to store information about already seen queries(caching) 16 | * A backend for searching for content on YouTube's servers 17 | * A backend for getting Ids 18 | * A backend for asynchronous detail retrieval of searched queries so that later media inspections quickly 19 | pull up information on searched media 20 | * An instrumented Mongo Go driver from https://github.com/orijtech/mongo-go-driver 21 | 22 | ### How it works 23 | 24 | Normal access is by hitting the frontend service (OFE) via HTTP requests to /search with either GET or POST methods. 25 | OFE then invokes a gRPC transport based connection to the search backend(SB) which then searches for content from YouTube. 26 | Because content search from YouTube incurs API quota costs as well as time expense having to fetch from YouTube's servers, 27 | it helps to cache results so that subsequent repetitions will return cached content in very little time. 28 | Once results have been returned during a cache-miss, they are cached to MongoDB and a subsequent asynchronous call is made 29 | to a gRPC accessible service that then fetches individual meta information about each video and also caches that to MongoDB. 30 | 31 | The architectural diagram looks something like this: 32 | ![](./images/architecture-diagram.png) 33 | 34 | After tracing through requests, the service map might look like this: 35 | ![](./images/service-map.png) 36 | 37 | ### Requirements 38 | 39 | Name|Installation resource|Notes 40 | ---|---|--- 41 | Go1.9+|https://golang.org/doc/install 42 | Prometheus|https://prometheus.io/docs/introduction/first\_steps| 43 | AWS Credentials|https://docs.aws.amazon.com/sdk-for-go/v1/developer-guide/configuring-sdk.html| 44 | Google Cloud Platform credentials file|https://cloud.google.com/docs/authentication/getting-started| 45 | MongoDB instance or credentials|https://docs.mongodb.com/getting-started/shell/installation/|You can easily install a local MongoDB instance if you do not have access to a cloud hosted one by installing `mongod` 46 | Stackdriver Trace|https://console.cloud.google.com/apis/library/cloudtrace.googleapis.com/?q=stackdriver|Enable the API for your GCP project by also visiting https://console.cloud.google.com/apis/library and searching for the API "Stackdriver" 47 | Stackdriver Monitoring|https://console.cloud.google.com/apis/library/monitoring.googleapis.com/?q=stackdriver|Enable the API for your GCP project by also visiting https://console.cloud.google.com/apis/library and searching for the API "Stackdriver" 48 | AWS X-Ray|https://docs.aws.amazon.com/xray/latest/devguide/xray-services-lambda.html| 49 | 50 | ### Installing the source code 51 | ```shell 52 | go get -u -v github.com/orijtech/media-search/... 53 | ``` 54 | 55 | ### Running the project 56 | 57 | Assuming you already set the credentials in [Requirements](#Requirements), run 58 | ```shell 59 | make run-microservices 60 | ``` 61 | 62 | Also don't forget to run Prometheus like this 63 | ```shell 64 | prometheus --config.file=prometheus.yml 65 | ``` 66 | 67 | If you'd like to terminate all the running binaries/microservices, and have `pkill` in your shell, you can run: 68 | ```shell 69 | make kill-microservices 70 | ``` 71 | 72 | ### Clients 73 | 74 | Client|Language|Running it 75 | ---|---|--- 76 | Web UI|Javascript+HTML|Visit http://localhost:9778 77 | clients/client.go|Go|`go run clients/client.go` 78 | clients/client.py|Python3|`python3 clients/client.py` 79 | clients/client.sh|Shell|`./clients/client.sh` 80 | 81 | 82 | The WebUI looks something like this 83 | ![](./images/webui.jpg) 84 | 85 | ### Inspecting traces and metrics 86 | You can examine traces and metrics by visiting 87 | * AWS X-Ray 88 | * Stackdriver 89 | 90 | ### Screenshots 91 | The clients' HTTP requests propagate their 92 | traces through to the server and back, and then to the exporters yielding 93 | insights such as: 94 | 95 | #### HTTP requests 96 | ![](./images/stackdriver-http-request.png) 97 | ![](./images/x-ray-http-request.png) 98 | 99 | #### DB operations 100 | ![](./images/stackdriver-request.png) 101 | ![](./images/xray-request.png) 102 | ![](./images/prometheus.png) 103 | ![](./images/stackdriver-metrics.png) 104 | ![](./images/service-map.png) 105 | -------------------------------------------------------------------------------- /backends/backends.go: -------------------------------------------------------------------------------- 1 | // Copyright 2018, OpenCensus Authors 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | package main 16 | 17 | import ( 18 | "flag" 19 | "fmt" 20 | "log" 21 | "net" 22 | "net/http" 23 | "strings" 24 | "time" 25 | 26 | "google.golang.org/grpc" 27 | 28 | "contrib.go.opencensus.io/exporter/stackdriver" 29 | xray "github.com/census-instrumentation/opencensus-go-exporter-aws" 30 | "go.opencensus.io/exporter/prometheus" 31 | "go.opencensus.io/plugin/ocgrpc" 32 | "go.opencensus.io/plugin/ochttp" 33 | "go.opencensus.io/stats/view" 34 | "go.opencensus.io/trace" 35 | 36 | "github.com/orijtech/media-search/rpc" 37 | "github.com/orijtech/otils" 38 | ) 39 | 40 | func init() { 41 | xe, err := xray.NewExporter(xray.WithVersion("latest")) 42 | if err != nil { 43 | log.Fatalf("X-Ray newExporter: %v", err) 44 | } 45 | se, err := stackdriver.NewExporter(stackdriver.Options{ProjectID: otils.EnvOrAlternates("OPENCENSUS_GCP_PROJECTID", "census-demos")}) 46 | if err != nil { 47 | log.Fatalf("Stackdriver newExporter: %v", err) 48 | } 49 | pe, err := prometheus.NewExporter(prometheus.Options{Namespace: "mediasearch"}) 50 | if err != nil { 51 | log.Fatalf("Prometheus newExporter: %v", err) 52 | } 53 | 54 | // Register the trace exporters 55 | trace.RegisterExporter(se) 56 | trace.RegisterExporter(xe) 57 | 58 | // Register the metrics exporters 59 | view.RegisterExporter(pe) 60 | view.RegisterExporter(se) 61 | 62 | // Serve the Prometheus metrics 63 | go func() { 64 | mux := http.NewServeMux() 65 | mux.Handle("/metrics", pe) 66 | log.Fatal(http.ListenAndServe(":9988", mux)) 67 | }() 68 | 69 | view.SetReportingPeriod(10 * time.Second) 70 | } 71 | 72 | func main() { 73 | var onHTTP bool 74 | var port int 75 | flag.BoolVar(&onHTTP, "http", false, "if set true, run it as an HTTP server instead of as a gRPC server") 76 | flag.IntVar(&port, "port", 8899, "the port on which to run the server") 77 | flag.Parse() 78 | 79 | addr := fmt.Sprintf(":%d", port) 80 | 81 | // searchAPI handles both gRPC and HTTP transports. 82 | key := "YOUTUBE_API_KEY" 83 | envAPIKey := strings.TrimSpace(otils.EnvOrAlternates(key, "AIzaSyCokXpH0NP3MGqaoEFSshet8YGbsOP0lFE")) 84 | if envAPIKey == "" { 85 | log.Fatalf("Failed to retrieve %q from environment", key) 86 | } 87 | if err := view.Register(ochttp.DefaultClientViews...); err != nil { 88 | log.Fatalf("Failed to register DefaultClientViews for YouTube client API's sake: %v", err) 89 | } 90 | 91 | searchAPI, err := rpc.NewSearch(rpc.WithYouTubeAPIKey(envAPIKey)) 92 | if err != nil { 93 | log.Fatalf("Failed to create SearchAPI, error: %v", err) 94 | } 95 | genIDAPI, err := rpc.NewGenID() 96 | if err != nil { 97 | log.Fatalf("Failed to create GenIDAPI, error: %v", err) 98 | } 99 | 100 | switch onHTTP { 101 | case true: 102 | allViews := append(ochttp.DefaultServerViews, ochttp.DefaultClientViews...) 103 | if err := view.Register(allViews...); err != nil { 104 | log.Fatalf("Failed to register all HTTP views, error: %v", err) 105 | } 106 | mux := http.NewServeMux() 107 | mux.Handle("/search", searchAPI) 108 | mux.Handle("/id", genIDAPI) 109 | h := &ochttp.Handler{Handler: mux} 110 | 111 | if err := http.ListenAndServe(addr, h); err != nil { 112 | log.Fatalf("HTTP server ListenAndServe error: %v", err) 113 | } 114 | 115 | default: 116 | allViews := append(ocgrpc.DefaultServerViews, ocgrpc.DefaultClientViews...) 117 | if err := view.Register(allViews...); err != nil { 118 | log.Fatalf("Failed to register all gRPC views, error: %v", err) 119 | } 120 | 121 | ln, err := net.Listen("tcp", addr) 122 | if err != nil { 123 | log.Fatalf("Failed to listen on address %q error: %v", addr, err) 124 | } 125 | log.Printf("Serving as gRPC server at %q", addr) 126 | srv := grpc.NewServer(grpc.StatsHandler(&ocgrpc.ServerHandler{})) 127 | rpc.RegisterSearchServer(srv, searchAPI) 128 | rpc.RegisterGenIDServer(srv, genIDAPI) 129 | if err := srv.Serve(ln); err != nil { 130 | log.Fatalf("gRPC server Serve error: %v", err) 131 | } 132 | } 133 | } 134 | -------------------------------------------------------------------------------- /clients/client.go: -------------------------------------------------------------------------------- 1 | // Copyright 2018, OpenCensus Authors 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | package main 16 | 17 | import ( 18 | "bufio" 19 | "bytes" 20 | "encoding/json" 21 | "fmt" 22 | "io/ioutil" 23 | "log" 24 | "net/http" 25 | "os" 26 | 27 | "github.com/orijtech/otils" 28 | "github.com/orijtech/youtube" 29 | ) 30 | 31 | func main() { 32 | client := &http.Client{} 33 | br := bufio.NewReader(os.Stdin) 34 | for { 35 | fmt.Printf("Content to search$ ") 36 | input, _, err := br.ReadLine() 37 | if err != nil { 38 | log.Fatalf("Failed to read input: %v", err) 39 | } 40 | inBlob, err := json.Marshal(map[string]string{ 41 | "keywords": string(input), 42 | }) 43 | if err != nil { 44 | log.Fatalf("Failed to json.Marshal input blob: %v", err) 45 | } 46 | req, err := http.NewRequest("POST", "http://localhost:9778/search", bytes.NewReader(inBlob)) 47 | if err != nil { 48 | log.Fatalf("Failed to build POST request: %v", err) 49 | } 50 | res, err := client.Do(req) 51 | if err != nil { 52 | log.Fatalf("Failed to POST: %v", err) 53 | } 54 | outBlob, err := ioutil.ReadAll(res.Body) 55 | _ = res.Body.Close() 56 | if !otils.StatusOK(res.StatusCode) { 57 | log.Printf("Error encountered: statusCode: %d message: %s", res.StatusCode, outBlob) 58 | continue 59 | } 60 | if err != nil { 61 | log.Fatalf("Failed to read res.Body: %v", err) 62 | } 63 | var pages []*youtube.SearchPage 64 | if err := json.Unmarshal(outBlob, &pages); err != nil { 65 | log.Fatalf("Unmarshaling responses: %v", err) 66 | } 67 | for _, page := range pages { 68 | for _, video := range page.Items { 69 | if video == nil { 70 | continue 71 | } 72 | snippet := video.Snippet 73 | if video.Id.VideoId != "" { 74 | fmt.Printf("URL: https://youtu.be/%s\n", video.Id.VideoId) 75 | } else if video.Id.ChannelId != "" { 76 | fmt.Printf("ChannelURL: https://www.youtube.com/channel/%s\n", 77 | video.Id.ChannelId) 78 | } 79 | fmt.Printf("Title: %s\nDescription: %s\n\n\n", snippet.Title, snippet.Description) 80 | } 81 | } 82 | } 83 | } 84 | -------------------------------------------------------------------------------- /clients/client.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | """ 4 | Copyright 2018, OpenCensus Authors 5 | 6 | Licensed under the Apache License, Version 2.0 (the "License"); 7 | you may not use this file except in compliance with the License. 8 | u may obtain a copy of the License at 9 | 10 | http://www.apache.org/licenses/LICENSE-2.0 11 | 12 | Unless required by applicable law or agreed to in writing, software 13 | distributed under the License is distributed on an "AS IS" BASIS, 14 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | See the License for the specific language governing permissions and 16 | limitations under the License. 17 | """ 18 | 19 | import requests 20 | 21 | def main(): 22 | while True: 23 | query = input('Content to search$ ') 24 | doSearch(query) 25 | 26 | def doSearch(query): 27 | res = requests.post('http://localhost:9778/search', json={'keywords': query}) 28 | pages = res.json() 29 | for page in pages: 30 | items = page['items'] 31 | for i, item in enumerate(items): 32 | id = item['id'] 33 | if 'videoId' in id: 34 | print('URL: https://youtu.be/{videoId}'.format(**item['id'])) 35 | elif 'channelId' in id: 36 | print('ChannelURL: https://www.youtube.com/channel/{channelId}'.format(**item['id'])) 37 | 38 | snippet = item['snippet'] 39 | snippet.setdefault('description', 'Unknown') 40 | print('Title: {title}\nDescription: {description}\n\n'.format(**snippet)) 41 | 42 | if __name__ == '__main__': 43 | main() 44 | -------------------------------------------------------------------------------- /clients/client.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # Copyright 2018, OpenCensus Authors 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http:#www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | 18 | while [ 1 ] 19 | do 20 | printf "Content to search$ " 21 | read F 22 | curl "http://localhost:9778/search?keywords=$F" 23 | printf "\n\n" 24 | done 25 | -------------------------------------------------------------------------------- /detailer/detailer.go: -------------------------------------------------------------------------------- 1 | // Copyright 2018, OpenCensus Authors 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | package main 16 | 17 | import ( 18 | "context" 19 | "encoding/json" 20 | "errors" 21 | "flag" 22 | "fmt" 23 | "log" 24 | "net/http" 25 | "time" 26 | 27 | gat "google.golang.org/api/googleapi/transport" 28 | "google.golang.org/api/youtube/v3" 29 | 30 | "contrib.go.opencensus.io/exporter/stackdriver" 31 | xray "github.com/census-instrumentation/opencensus-go-exporter-aws" 32 | "go.opencensus.io/exporter/prometheus" 33 | "go.opencensus.io/plugin/ochttp" 34 | "go.opencensus.io/stats/view" 35 | "go.opencensus.io/trace" 36 | 37 | "github.com/mongodb/mongo-go-driver/bson" 38 | "github.com/mongodb/mongo-go-driver/mongo" 39 | 40 | "github.com/orijtech/otils" 41 | yt "github.com/orijtech/youtube" 42 | ) 43 | 44 | var ytDetailsCollection *mongo.Collection 45 | var yc *yt.Client 46 | 47 | func init() { 48 | // Log into MongoDB 49 | mongoServerURI := otils.EnvOrAlternates("MEDIA_SEARCH_MONGO_SERVER_URI", "localhost:27017") 50 | mongoClient, err := mongo.NewClient("mongodb://" + mongoServerURI) 51 | log.Printf("mongoServerURI: %q\n", mongoServerURI) 52 | if err != nil { 53 | log.Fatalf("Failed to log into Mongo error: %v", err) 54 | } 55 | // Create or get the details collection. 56 | ytDetailsCollection = mongoClient.Database("media-searches").Collection("youtube_details") 57 | 58 | envAPIKey := otils.EnvOrAlternates("YOUTUBE_API_KEY", "AIzaSyCokXpH0NP3MGqaoEFSshet8YGbsOP0lFE") 59 | yc, err = yt.NewWithHTTPClient(&http.Client{ 60 | Transport: &ochttp.Transport{Base: &gat.APIKey{Key: envAPIKey}}, 61 | }) 62 | if err != nil { 63 | log.Fatalf("Creating YouTube client error: %v", err) 64 | } 65 | 66 | xe, err := xray.NewExporter(xray.WithVersion("latest")) 67 | if err != nil { 68 | log.Fatalf("X-Ray newExporter: %v", err) 69 | } 70 | 71 | se, err := stackdriver.NewExporter(stackdriver.Options{ProjectID: otils.EnvOrAlternates("OPENCENSUS_GCP_PROJECTID", "census-demos")}) 72 | if err != nil { 73 | log.Fatalf("Stackdriver newExporter: %v", err) 74 | } 75 | pe, err := prometheus.NewExporter(prometheus.Options{Namespace: "mediasearch"}) 76 | if err != nil { 77 | log.Fatalf("Prometheus newExporter: %v", err) 78 | } 79 | 80 | // Now register the exporters 81 | trace.RegisterExporter(xe) 82 | trace.RegisterExporter(se) 83 | view.RegisterExporter(se) 84 | view.RegisterExporter(pe) 85 | 86 | // Configure the tracer 87 | trace.ApplyConfig(trace.Config{DefaultSampler: trace.AlwaysSample()}) 88 | view.SetReportingPeriod(10 * time.Second) 89 | 90 | // Serve the Prometheus metrics 91 | go func() { 92 | mux := http.NewServeMux() 93 | mux.Handle("/metrics", pe) 94 | log.Fatal(http.ListenAndServe(":9989", mux)) 95 | }() 96 | 97 | // And then set the trace config with the default sampler. 98 | view.SetReportingPeriod(15 * time.Second) 99 | } 100 | 101 | func main() { 102 | var port int 103 | flag.IntVar(&port, "port", 9944, "the port to run the server on") 104 | flag.Parse() 105 | 106 | addr := fmt.Sprintf(":%d", port) 107 | mux := http.NewServeMux() 108 | mux.HandleFunc("/", handleDetailing) 109 | h := &ochttp.Handler{Handler: mux} 110 | 111 | log.Printf("Serving on %q", addr) 112 | if err := http.ListenAndServe(addr, h); err != nil { 113 | log.Fatalf("Failed to serve the detailing server: %v", err) 114 | } 115 | } 116 | 117 | func handleDetailing(w http.ResponseWriter, r *http.Request) { 118 | _, span := trace.StartSpan(r.Context(), "/youtube-detailing") 119 | defer span.End() 120 | 121 | if r.Method != "POST" { 122 | http.Error(w, fmt.Sprintf(`only accepting "POST" not %q`, r.Method), http.StatusMethodNotAllowed) 123 | return 124 | } 125 | 126 | // Detailing looks up YouTube IDs by ID and then updates MongoDB 127 | // with the entry to ensure that later information lookup e.g. on Mobile 128 | // is fast and seamless for scrolling and search indexing. 129 | var idList []string 130 | dec := json.NewDecoder(r.Body) 131 | defer r.Body.Close() 132 | 133 | if err := dec.Decode(&idList); err != nil { 134 | http.Error(w, err.Error(), http.StatusInternalServerError) 135 | return 136 | } 137 | 138 | // We don't care too much about the result, we 139 | // just need to fire off this callback so that 140 | // whenever videos can be detailed in the background, 141 | // then they will be detailed. 142 | go performDetailing(idList) 143 | } 144 | 145 | func performDetailing(idList []string) { 146 | ctx, span := trace.StartSpan(context.Background(), "detailing") 147 | defer span.End() 148 | 149 | videos, err := lookupAndSetYouTubeDetails(ctx, idList) 150 | if err != nil { 151 | log.Printf("Detailing error: %v idList=%#v", err, idList) 152 | return 153 | } 154 | 155 | for _, video := range videos { 156 | if video == nil { 157 | continue 158 | } 159 | filter := bson.NewDocument(bson.EC.String("yt_id", video.Id)) 160 | _, _ = ytDetailsCollection.UpdateOne(ctx, filter, video) 161 | } 162 | } 163 | 164 | var errNotFound = errors.New("no details found for video") 165 | 166 | func lookupAndSetYouTubeDetails(ctx context.Context, youtubeIDs []string) ([]*youtube.Video, error) { 167 | log.Printf("Got requests: %#v\n", youtubeIDs) 168 | ctx, span := trace.StartSpan(ctx, "lookup-and-set-details") 169 | defer span.End() 170 | 171 | videoPages, err := yc.ById(ctx, youtubeIDs...) 172 | if err != nil { 173 | return nil, err 174 | } 175 | 176 | var detailsList []*youtube.Video 177 | for page := range videoPages { 178 | if page.Err != nil { 179 | continue 180 | } 181 | 182 | for _, item := range page.Items { 183 | if item != nil { 184 | detailsList = append(detailsList, item) 185 | } 186 | } 187 | } 188 | 189 | if len(detailsList) == 0 { 190 | return nil, errNotFound 191 | } 192 | 193 | return detailsList, nil 194 | } 195 | -------------------------------------------------------------------------------- /frontend.go: -------------------------------------------------------------------------------- 1 | // Copyright 2018, OpenCensus Authors 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | package main 16 | 17 | import ( 18 | "context" 19 | "encoding/json" 20 | "log" 21 | "net/http" 22 | "reflect" 23 | "time" 24 | 25 | "google.golang.org/grpc" 26 | 27 | "github.com/mongodb/mongo-go-driver/bson" 28 | "github.com/mongodb/mongo-go-driver/mongo" 29 | 30 | "contrib.go.opencensus.io/exporter/stackdriver" 31 | xray "github.com/census-instrumentation/opencensus-go-exporter-aws" 32 | "go.opencensus.io/exporter/prometheus" 33 | "go.opencensus.io/plugin/ocgrpc" 34 | "go.opencensus.io/plugin/ochttp" 35 | "go.opencensus.io/stats" 36 | "go.opencensus.io/stats/view" 37 | "go.opencensus.io/tag" 38 | "go.opencensus.io/trace" 39 | "go.opencensus.io/zpages" 40 | 41 | "github.com/orijtech/media-search/rpc" 42 | "github.com/orijtech/otils" 43 | ) 44 | 45 | var ytSearchesCollection *mongo.Collection 46 | var genIDClient rpc.GenIDClient 47 | var searchClient rpc.SearchClient 48 | 49 | func init() { 50 | se, err := stackdriver.NewExporter(stackdriver.Options{ 51 | MetricPrefix: "mediasearch", 52 | ProjectID: otils.EnvOrAlternates("OPENCENSUS_GCP_PROJECTID", "census-demos"), 53 | }) 54 | if err != nil { 55 | log.Fatalf("Stackdriver newExporter error: %v", err) 56 | } 57 | xe, err := xray.NewExporter(xray.WithVersion("latest")) 58 | if err != nil { 59 | log.Fatalf("AWS X-Ray newExporter error: %v", err) 60 | } 61 | pe, err := prometheus.NewExporter(prometheus.Options{Namespace: "mediasearch"}) 62 | if err != nil { 63 | log.Fatalf("Prometheus newExporter error: %v", err) 64 | } 65 | 66 | // Now register the exporters 67 | trace.RegisterExporter(se) 68 | trace.RegisterExporter(xe) 69 | view.RegisterExporter(se) 70 | view.RegisterExporter(pe) 71 | 72 | // Always sample for demo purposes 73 | trace.ApplyConfig(trace.Config{DefaultSampler: trace.AlwaysSample()}) 74 | 75 | // Serve the Prometheus metrics 76 | go func() { 77 | mux := http.NewServeMux() 78 | mux.Handle("/metrics", pe) 79 | log.Fatal(http.ListenAndServe(":9888", mux)) 80 | }() 81 | 82 | go func() { 83 | mux := http.NewServeMux() 84 | zpages.Handle(mux, "/debug") 85 | log.Fatal(http.ListenAndServe(":7788", mux)) 86 | }() 87 | 88 | view.SetReportingPeriod(10 * time.Second) 89 | 90 | // Register the views from MongoDB's Go driver 91 | if err := view.Register(mongo.AllViews...); err != nil { 92 | log.Fatalf("Failed to register MongoDB views: %v", err) 93 | } 94 | 95 | // And then for the custom views 96 | err = view.Register([]*view.View{ 97 | {Name: "cache_hits", Description: "cache hits", Measure: cacheHits, Aggregation: view.Count()}, 98 | {Name: "cache_misses", Description: "cache misses", Measure: cacheMisses, Aggregation: view.Count()}, 99 | { 100 | Name: "cache_insertion_errors", Description: "cache insertion errors", 101 | Measure: cacheInsertionErrors, Aggregation: view.Count(), TagKeys: []tag.Key{keyCacheType}, 102 | }, { 103 | 104 | Name: "youtube_api_errors", Description: "youtube errors", 105 | Measure: youtubeAPIErrors, Aggregation: view.Count(), 106 | }, { 107 | Name: "mongo_errors", Description: "MongoDB errors", 108 | Measure: mongoErrors, Aggregation: view.Count(), 109 | }, 110 | }...) 111 | if err != nil { 112 | log.Fatalf("Failed to register custom views: %v", err) 113 | } 114 | 115 | log.Printf("Successfully finished exporter and view registration") 116 | 117 | // Log into MongoDB 118 | mongoServerURI := otils.EnvOrAlternates("MEDIA_SEARCH_MONGO_SERVER_URI", "localhost:27017") 119 | mongoClient, err := mongo.NewClient("mongodb://" + mongoServerURI) 120 | log.Printf("mongoServerURI: %q\n", mongoServerURI) 121 | if err != nil { 122 | log.Fatalf("Failed to log into Mongo error: %v", err) 123 | } 124 | // Connect to the server 125 | if err := mongoClient.Connect(context.Background()); err != nil { 126 | log.Fatalf("Failed to connect to the MongoDB server: %v", err) 127 | } 128 | // Create or get the searches collection. 129 | ytSearchesCollection = mongoClient.Database("media-searches").Collection("youtube_searches") 130 | } 131 | 132 | func main() { 133 | // Firstly dial to the search service 134 | searchAddr := ":8899" 135 | conn, err := grpc.Dial(searchAddr, grpc.WithInsecure(), grpc.WithStatsHandler(&ocgrpc.ClientHandler{})) 136 | if err != nil { 137 | log.Fatalf("Failed to dial to gRPC server: %v", err) 138 | } 139 | searchClient = rpc.NewSearchClient(conn) 140 | genIDClient = rpc.NewGenIDClient(conn) 141 | log.Printf("Successfully dialed to the gRPC {id, search} services at %q", searchAddr) 142 | 143 | // Subscribe to every view available since the service is a mix of gRPC and HTTP, client and server services. 144 | allViews := append(ochttp.DefaultClientViews, ochttp.DefaultServerViews...) 145 | allViews = append(allViews, ocgrpc.DefaultClientViews...) 146 | allViews = append(allViews, ocgrpc.DefaultServerViews...) 147 | if err := view.Register(allViews...); err != nil { 148 | log.Fatalf("Failed to register all the default {ocgrpc, ochttp} views: %v", err) 149 | } 150 | 151 | addr := ":9778" 152 | mux := http.NewServeMux() 153 | mux.HandleFunc("/search", search) 154 | mux.Handle("/", http.FileServer(http.Dir("./static"))) 155 | 156 | h := &ochttp.Handler{ 157 | // Wrap the handler with CORS 158 | Handler: otils.CORSMiddlewareAllInclusive(mux), 159 | } 160 | log.Printf("Serving on %q", addr) 161 | if err := http.ListenAndServe(addr, h); err != nil { 162 | log.Fatalf("ListenAndServe err: %v", err) 163 | } 164 | } 165 | 166 | type dbCacheKV struct { 167 | CacheID string `json:"cache_id" bson:"cache_id,omitempty"` 168 | Key string `json:"key" bson:"key,omitempty"` 169 | Value []byte `json:"value" bson:"value,omitempty"` 170 | CacheTime time.Time `json:"ct" bson:"ct,omitempty"` 171 | } 172 | 173 | var rpcNothing = new(rpc.Nothing) 174 | 175 | func search(w http.ResponseWriter, r *http.Request) { 176 | sc := trace.FromContext(r.Context()).SpanContext() 177 | log.Printf("search here: %+v\n", sc) 178 | ctx, span := trace.StartSpan(r.Context(), "/search") 179 | defer span.End() 180 | 181 | if r.Method == "OPTIONS" { 182 | return 183 | } 184 | 185 | q, err := rpc.ExtractQuery(ctx, r) 186 | if err != nil { 187 | http.Error(w, err.Error(), http.StatusBadRequest) 188 | return 189 | } 190 | 191 | keywords := q.Keywords 192 | if keywords == "" { 193 | http.Error(w, "Expecting keywords", http.StatusBadRequest) 194 | return 195 | } 196 | filter := bson.NewDocument(bson.EC.String("key", q.Keywords)) 197 | 198 | span.Annotate([]trace.Attribute{ 199 | trace.StringAttribute("db", "mongodb"), 200 | trace.StringAttribute("driver", "go"), 201 | }, "Checking cache if the query is present") 202 | 203 | dbRes := ytSearchesCollection.FindOne(ctx, filter) 204 | // 1. Firstly check if this has been cached before 205 | cachedKV := new(dbCacheKV) 206 | 207 | switch err := dbRes.Decode(cachedKV); err { 208 | case nil: // Cache hit! 209 | if !reflect.DeepEqual(cachedKV, blankDBKV) { 210 | span.Annotate([]trace.Attribute{ 211 | trace.BoolAttribute("hit", true), 212 | trace.StringAttribute("db", "mongodb"), 213 | trace.StringAttribute("driver", "go"), 214 | }, "Cache hit") 215 | stats.Record(ctx, cacheHits.M(1)) 216 | w.Write(cachedKV.Value) 217 | return 218 | } 219 | 220 | // Otherwise this is false cache hit! 221 | 222 | case bson.ErrElementNotFound, mongo.ErrNoDocuments: 223 | // Cache miss, now retrieve the results below 224 | 225 | default: 226 | stats.Record(ctx, mongoErrors.M(1)) 227 | // We've failed to decode but oh well, that was just a cache miss 228 | // the user should still get their result back! Thus continue below 229 | // to fetch the results from the search API. 230 | } 231 | 232 | // 2. Otherwise that was a cache-miss, now retrieve it then save it 233 | stats.Record(ctx, cacheMisses.M(1)) 234 | 235 | // 3. Get the global CacheID 236 | cacheID, err := genIDClient.NewID(ctx, rpcNothing) 237 | if err != nil { 238 | http.Error(w, err.Error(), http.StatusInternalServerError) 239 | return 240 | } 241 | 242 | span.Annotate([]trace.Attribute{ 243 | trace.BoolAttribute("hit", false), 244 | trace.StringAttribute("db", "mongodb"), 245 | trace.StringAttribute("driver", "go"), 246 | }, "Cache miss, hence YouTube API search") 247 | 248 | results, err := searchClient.SearchIt(ctx, q) 249 | if err != nil { 250 | stats.Record(ctx, youtubeAPIErrors.M(1)) 251 | span.Annotate([]trace.Attribute{ 252 | trace.StringAttribute("api_error", err.Error()), 253 | trace.StringAttribute("db", "mongodb"), 254 | trace.StringAttribute("driver", "go"), 255 | }, "YouTube API search error") 256 | http.Error(w, err.Error(), http.StatusInternalServerError) 257 | return 258 | } 259 | 260 | outBlob, err := json.Marshal(results.Results) 261 | if err != nil { 262 | http.Error(w, err.Error(), http.StatusInternalServerError) 263 | return 264 | } 265 | 266 | // 4. Now cache it so that next time it'll be a hit. 267 | insertKV := &dbCacheKV{ 268 | CacheID: cacheID.Value, 269 | Key: keywords, 270 | Value: outBlob, 271 | CacheTime: time.Now(), 272 | } 273 | 274 | if _, err := ytSearchesCollection.InsertOne(ctx, insertKV); err != nil { 275 | ctx, _ = tag.New(ctx, tag.Upsert(keyCacheType, "mongo")) 276 | stats.Record(ctx, cacheInsertionErrors.M(1)) 277 | } 278 | 279 | _, _ = w.Write(outBlob) 280 | } 281 | 282 | var ( 283 | cacheHits = stats.Int64("cache_hits", "the number of cache hits", stats.UnitNone) 284 | cacheMisses = stats.Int64("cache_misses", "the number of cache misses", stats.UnitNone) 285 | 286 | cacheInsertionErrors = stats.Int64("cache_insertion_errors", "the number of cache insertion errors", stats.UnitNone) 287 | 288 | youtubeAPIErrors = stats.Int64("youtube_api_errors", "the number of youtube API lookup errors", stats.UnitNone) 289 | mongoErrors = stats.Int64("mongo_errors", "the number of MongoDB errors", stats.UnitNone) 290 | 291 | blankDBKV = new(dbCacheKV) 292 | keyCacheType = mustKey("cache_type") 293 | ) 294 | 295 | func mustKey(sk string) tag.Key { 296 | k, err := tag.NewKey(sk) 297 | if err != nil { 298 | log.Fatalf("Creating new key %q error: %v", sk, err) 299 | } 300 | return k 301 | } 302 | -------------------------------------------------------------------------------- /images/architecture.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/orijtech/media-search/57ee466cc324b8c8994245b0d2c595d3df92fef6/images/architecture.jpg -------------------------------------------------------------------------------- /images/prometheus.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/orijtech/media-search/57ee466cc324b8c8994245b0d2c595d3df92fef6/images/prometheus.png -------------------------------------------------------------------------------- /images/service-map.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/orijtech/media-search/57ee466cc324b8c8994245b0d2c595d3df92fef6/images/service-map.png -------------------------------------------------------------------------------- /images/stackdriver-metrics.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/orijtech/media-search/57ee466cc324b8c8994245b0d2c595d3df92fef6/images/stackdriver-metrics.png -------------------------------------------------------------------------------- /images/stackdriver-request.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/orijtech/media-search/57ee466cc324b8c8994245b0d2c595d3df92fef6/images/stackdriver-request.png -------------------------------------------------------------------------------- /images/webui.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/orijtech/media-search/57ee466cc324b8c8994245b0d2c595d3df92fef6/images/webui.jpg -------------------------------------------------------------------------------- /images/xray-request.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/orijtech/media-search/57ee466cc324b8c8994245b0d2c595d3df92fef6/images/xray-request.png -------------------------------------------------------------------------------- /prometheus.yml: -------------------------------------------------------------------------------- 1 | global: 2 | scrape_interval: 10s 3 | 4 | external_labels: 5 | monitor: 'media_search' 6 | 7 | scrape_configs: 8 | - job_name: 'media_search' 9 | 10 | scrape_interval: 10s 11 | 12 | static_configs: 13 | - targets: ['localhost:9888', 'localhost:9988', 'localhost:9989'] 14 | -------------------------------------------------------------------------------- /rpc/defs.pb.go: -------------------------------------------------------------------------------- 1 | // Code generated by protoc-gen-go. DO NOT EDIT. 2 | // source: defs.proto 3 | 4 | /* 5 | Package rpc is a generated protocol buffer package. 6 | 7 | It is generated from these files: 8 | defs.proto 9 | 10 | It has these top-level messages: 11 | ID 12 | Nothing 13 | Query 14 | SearchResult 15 | YouTubeResult 16 | YouTubeSnippet 17 | Thumbnail 18 | YouTubeID 19 | SearchResults 20 | */ 21 | package rpc 22 | 23 | import proto "github.com/golang/protobuf/proto" 24 | import fmt "fmt" 25 | import math "math" 26 | 27 | import ( 28 | context "golang.org/x/net/context" 29 | grpc "google.golang.org/grpc" 30 | ) 31 | 32 | // Reference imports to suppress errors if they are not otherwise used. 33 | var _ = proto.Marshal 34 | var _ = fmt.Errorf 35 | var _ = math.Inf 36 | 37 | // This is a compile-time assertion to ensure that this generated file 38 | // is compatible with the proto package it is being compiled against. 39 | // A compilation error at this line likely means your copy of the 40 | // proto package needs to be updated. 41 | const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package 42 | 43 | type ID struct { 44 | Value string `protobuf:"bytes,1,opt,name=value" json:"value,omitempty"` 45 | } 46 | 47 | func (m *ID) Reset() { *m = ID{} } 48 | func (m *ID) String() string { return proto.CompactTextString(m) } 49 | func (*ID) ProtoMessage() {} 50 | func (*ID) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } 51 | 52 | func (m *ID) GetValue() string { 53 | if m != nil { 54 | return m.Value 55 | } 56 | return "" 57 | } 58 | 59 | type Nothing struct { 60 | } 61 | 62 | func (m *Nothing) Reset() { *m = Nothing{} } 63 | func (m *Nothing) String() string { return proto.CompactTextString(m) } 64 | func (*Nothing) ProtoMessage() {} 65 | func (*Nothing) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } 66 | 67 | type Query struct { 68 | Keywords string `protobuf:"bytes,1,opt,name=keywords" json:"keywords,omitempty"` 69 | MaxPages int32 `protobuf:"varint,2,opt,name=maxPages" json:"maxPages,omitempty"` 70 | MaxResultsPerPage int32 `protobuf:"varint,3,opt,name=maxResultsPerPage" json:"maxResultsPerPage,omitempty"` 71 | } 72 | 73 | func (m *Query) Reset() { *m = Query{} } 74 | func (m *Query) String() string { return proto.CompactTextString(m) } 75 | func (*Query) ProtoMessage() {} 76 | func (*Query) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } 77 | 78 | func (m *Query) GetKeywords() string { 79 | if m != nil { 80 | return m.Keywords 81 | } 82 | return "" 83 | } 84 | 85 | func (m *Query) GetMaxPages() int32 { 86 | if m != nil { 87 | return m.MaxPages 88 | } 89 | return 0 90 | } 91 | 92 | func (m *Query) GetMaxResultsPerPage() int32 { 93 | if m != nil { 94 | return m.MaxResultsPerPage 95 | } 96 | return 0 97 | } 98 | 99 | type SearchResult struct { 100 | Index uint64 `protobuf:"varint,1,opt,name=index" json:"index,omitempty"` 101 | Items []*YouTubeResult `protobuf:"bytes,2,rep,name=items" json:"items,omitempty"` 102 | Err string `protobuf:"bytes,3,opt,name=err" json:"err,omitempty"` 103 | } 104 | 105 | func (m *SearchResult) Reset() { *m = SearchResult{} } 106 | func (m *SearchResult) String() string { return proto.CompactTextString(m) } 107 | func (*SearchResult) ProtoMessage() {} 108 | func (*SearchResult) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} } 109 | 110 | func (m *SearchResult) GetIndex() uint64 { 111 | if m != nil { 112 | return m.Index 113 | } 114 | return 0 115 | } 116 | 117 | func (m *SearchResult) GetItems() []*YouTubeResult { 118 | if m != nil { 119 | return m.Items 120 | } 121 | return nil 122 | } 123 | 124 | func (m *SearchResult) GetErr() string { 125 | if m != nil { 126 | return m.Err 127 | } 128 | return "" 129 | } 130 | 131 | type YouTubeResult struct { 132 | ItemId *ID `protobuf:"bytes,1,opt,name=itemId" json:"itemId,omitempty"` 133 | Etag string `protobuf:"bytes,2,opt,name=etag" json:"etag,omitempty"` 134 | Id *YouTubeID `protobuf:"bytes,3,opt,name=id" json:"id,omitempty"` 135 | Kind string `protobuf:"bytes,4,opt,name=kind" json:"kind,omitempty"` 136 | Snippet *YouTubeSnippet `protobuf:"bytes,5,opt,name=snippet" json:"snippet,omitempty"` 137 | } 138 | 139 | func (m *YouTubeResult) Reset() { *m = YouTubeResult{} } 140 | func (m *YouTubeResult) String() string { return proto.CompactTextString(m) } 141 | func (*YouTubeResult) ProtoMessage() {} 142 | func (*YouTubeResult) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4} } 143 | 144 | func (m *YouTubeResult) GetItemId() *ID { 145 | if m != nil { 146 | return m.ItemId 147 | } 148 | return nil 149 | } 150 | 151 | func (m *YouTubeResult) GetEtag() string { 152 | if m != nil { 153 | return m.Etag 154 | } 155 | return "" 156 | } 157 | 158 | func (m *YouTubeResult) GetId() *YouTubeID { 159 | if m != nil { 160 | return m.Id 161 | } 162 | return nil 163 | } 164 | 165 | func (m *YouTubeResult) GetKind() string { 166 | if m != nil { 167 | return m.Kind 168 | } 169 | return "" 170 | } 171 | 172 | func (m *YouTubeResult) GetSnippet() *YouTubeSnippet { 173 | if m != nil { 174 | return m.Snippet 175 | } 176 | return nil 177 | } 178 | 179 | type YouTubeSnippet struct { 180 | ChannelId string `protobuf:"bytes,1,opt,name=channelId" json:"channelId,omitempty"` 181 | ChannelTitle string `protobuf:"bytes,2,opt,name=channelTitle" json:"channelTitle,omitempty"` 182 | Description string `protobuf:"bytes,3,opt,name=description" json:"description,omitempty"` 183 | PublishedAt string `protobuf:"bytes,4,opt,name=publishedAt" json:"publishedAt,omitempty"` 184 | Thumbnails map[string]*Thumbnail `protobuf:"bytes,5,rep,name=thumbnails" json:"thumbnails,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` 185 | Title string `protobuf:"bytes,6,opt,name=title" json:"title,omitempty"` 186 | } 187 | 188 | func (m *YouTubeSnippet) Reset() { *m = YouTubeSnippet{} } 189 | func (m *YouTubeSnippet) String() string { return proto.CompactTextString(m) } 190 | func (*YouTubeSnippet) ProtoMessage() {} 191 | func (*YouTubeSnippet) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{5} } 192 | 193 | func (m *YouTubeSnippet) GetChannelId() string { 194 | if m != nil { 195 | return m.ChannelId 196 | } 197 | return "" 198 | } 199 | 200 | func (m *YouTubeSnippet) GetChannelTitle() string { 201 | if m != nil { 202 | return m.ChannelTitle 203 | } 204 | return "" 205 | } 206 | 207 | func (m *YouTubeSnippet) GetDescription() string { 208 | if m != nil { 209 | return m.Description 210 | } 211 | return "" 212 | } 213 | 214 | func (m *YouTubeSnippet) GetPublishedAt() string { 215 | if m != nil { 216 | return m.PublishedAt 217 | } 218 | return "" 219 | } 220 | 221 | func (m *YouTubeSnippet) GetThumbnails() map[string]*Thumbnail { 222 | if m != nil { 223 | return m.Thumbnails 224 | } 225 | return nil 226 | } 227 | 228 | func (m *YouTubeSnippet) GetTitle() string { 229 | if m != nil { 230 | return m.Title 231 | } 232 | return "" 233 | } 234 | 235 | type Thumbnail struct { 236 | Height int64 `protobuf:"varint,1,opt,name=height" json:"height,omitempty"` 237 | Width int64 `protobuf:"varint,2,opt,name=width" json:"width,omitempty"` 238 | Url string `protobuf:"bytes,3,opt,name=url" json:"url,omitempty"` 239 | } 240 | 241 | func (m *Thumbnail) Reset() { *m = Thumbnail{} } 242 | func (m *Thumbnail) String() string { return proto.CompactTextString(m) } 243 | func (*Thumbnail) ProtoMessage() {} 244 | func (*Thumbnail) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{6} } 245 | 246 | func (m *Thumbnail) GetHeight() int64 { 247 | if m != nil { 248 | return m.Height 249 | } 250 | return 0 251 | } 252 | 253 | func (m *Thumbnail) GetWidth() int64 { 254 | if m != nil { 255 | return m.Width 256 | } 257 | return 0 258 | } 259 | 260 | func (m *Thumbnail) GetUrl() string { 261 | if m != nil { 262 | return m.Url 263 | } 264 | return "" 265 | } 266 | 267 | type YouTubeID struct { 268 | Kind string `protobuf:"bytes,1,opt,name=kind" json:"kind,omitempty"` 269 | VideoId string `protobuf:"bytes,2,opt,name=videoId" json:"videoId,omitempty"` 270 | PlaylistId string `protobuf:"bytes,3,opt,name=playlistId" json:"playlistId,omitempty"` 271 | } 272 | 273 | func (m *YouTubeID) Reset() { *m = YouTubeID{} } 274 | func (m *YouTubeID) String() string { return proto.CompactTextString(m) } 275 | func (*YouTubeID) ProtoMessage() {} 276 | func (*YouTubeID) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{7} } 277 | 278 | func (m *YouTubeID) GetKind() string { 279 | if m != nil { 280 | return m.Kind 281 | } 282 | return "" 283 | } 284 | 285 | func (m *YouTubeID) GetVideoId() string { 286 | if m != nil { 287 | return m.VideoId 288 | } 289 | return "" 290 | } 291 | 292 | func (m *YouTubeID) GetPlaylistId() string { 293 | if m != nil { 294 | return m.PlaylistId 295 | } 296 | return "" 297 | } 298 | 299 | type SearchResults struct { 300 | Results []*SearchResult `protobuf:"bytes,1,rep,name=Results" json:"Results,omitempty"` 301 | } 302 | 303 | func (m *SearchResults) Reset() { *m = SearchResults{} } 304 | func (m *SearchResults) String() string { return proto.CompactTextString(m) } 305 | func (*SearchResults) ProtoMessage() {} 306 | func (*SearchResults) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{8} } 307 | 308 | func (m *SearchResults) GetResults() []*SearchResult { 309 | if m != nil { 310 | return m.Results 311 | } 312 | return nil 313 | } 314 | 315 | func init() { 316 | proto.RegisterType((*ID)(nil), "rpc.ID") 317 | proto.RegisterType((*Nothing)(nil), "rpc.Nothing") 318 | proto.RegisterType((*Query)(nil), "rpc.Query") 319 | proto.RegisterType((*SearchResult)(nil), "rpc.SearchResult") 320 | proto.RegisterType((*YouTubeResult)(nil), "rpc.YouTubeResult") 321 | proto.RegisterType((*YouTubeSnippet)(nil), "rpc.YouTubeSnippet") 322 | proto.RegisterType((*Thumbnail)(nil), "rpc.thumbnail") 323 | proto.RegisterType((*YouTubeID)(nil), "rpc.YouTubeID") 324 | proto.RegisterType((*SearchResults)(nil), "rpc.SearchResults") 325 | } 326 | 327 | // Reference imports to suppress errors if they are not otherwise used. 328 | var _ context.Context 329 | var _ grpc.ClientConn 330 | 331 | // This is a compile-time assertion to ensure that this generated file 332 | // is compatible with the grpc package it is being compiled against. 333 | const _ = grpc.SupportPackageIsVersion4 334 | 335 | // Client API for GenID service 336 | 337 | type GenIDClient interface { 338 | NewID(ctx context.Context, in *Nothing, opts ...grpc.CallOption) (*ID, error) 339 | } 340 | 341 | type genIDClient struct { 342 | cc *grpc.ClientConn 343 | } 344 | 345 | func NewGenIDClient(cc *grpc.ClientConn) GenIDClient { 346 | return &genIDClient{cc} 347 | } 348 | 349 | func (c *genIDClient) NewID(ctx context.Context, in *Nothing, opts ...grpc.CallOption) (*ID, error) { 350 | out := new(ID) 351 | err := grpc.Invoke(ctx, "/rpc.GenID/NewID", in, out, c.cc, opts...) 352 | if err != nil { 353 | return nil, err 354 | } 355 | return out, nil 356 | } 357 | 358 | // Server API for GenID service 359 | 360 | type GenIDServer interface { 361 | NewID(context.Context, *Nothing) (*ID, error) 362 | } 363 | 364 | func RegisterGenIDServer(s *grpc.Server, srv GenIDServer) { 365 | s.RegisterService(&_GenID_serviceDesc, srv) 366 | } 367 | 368 | func _GenID_NewID_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { 369 | in := new(Nothing) 370 | if err := dec(in); err != nil { 371 | return nil, err 372 | } 373 | if interceptor == nil { 374 | return srv.(GenIDServer).NewID(ctx, in) 375 | } 376 | info := &grpc.UnaryServerInfo{ 377 | Server: srv, 378 | FullMethod: "/rpc.GenID/NewID", 379 | } 380 | handler := func(ctx context.Context, req interface{}) (interface{}, error) { 381 | return srv.(GenIDServer).NewID(ctx, req.(*Nothing)) 382 | } 383 | return interceptor(ctx, in, info, handler) 384 | } 385 | 386 | var _GenID_serviceDesc = grpc.ServiceDesc{ 387 | ServiceName: "rpc.GenID", 388 | HandlerType: (*GenIDServer)(nil), 389 | Methods: []grpc.MethodDesc{ 390 | { 391 | MethodName: "NewID", 392 | Handler: _GenID_NewID_Handler, 393 | }, 394 | }, 395 | Streams: []grpc.StreamDesc{}, 396 | Metadata: "defs.proto", 397 | } 398 | 399 | // Client API for Search service 400 | 401 | type SearchClient interface { 402 | SearchIt(ctx context.Context, in *Query, opts ...grpc.CallOption) (*SearchResults, error) 403 | } 404 | 405 | type searchClient struct { 406 | cc *grpc.ClientConn 407 | } 408 | 409 | func NewSearchClient(cc *grpc.ClientConn) SearchClient { 410 | return &searchClient{cc} 411 | } 412 | 413 | func (c *searchClient) SearchIt(ctx context.Context, in *Query, opts ...grpc.CallOption) (*SearchResults, error) { 414 | out := new(SearchResults) 415 | err := grpc.Invoke(ctx, "/rpc.Search/SearchIt", in, out, c.cc, opts...) 416 | if err != nil { 417 | return nil, err 418 | } 419 | return out, nil 420 | } 421 | 422 | // Server API for Search service 423 | 424 | type SearchServer interface { 425 | SearchIt(context.Context, *Query) (*SearchResults, error) 426 | } 427 | 428 | func RegisterSearchServer(s *grpc.Server, srv SearchServer) { 429 | s.RegisterService(&_Search_serviceDesc, srv) 430 | } 431 | 432 | func _Search_SearchIt_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { 433 | in := new(Query) 434 | if err := dec(in); err != nil { 435 | return nil, err 436 | } 437 | if interceptor == nil { 438 | return srv.(SearchServer).SearchIt(ctx, in) 439 | } 440 | info := &grpc.UnaryServerInfo{ 441 | Server: srv, 442 | FullMethod: "/rpc.Search/SearchIt", 443 | } 444 | handler := func(ctx context.Context, req interface{}) (interface{}, error) { 445 | return srv.(SearchServer).SearchIt(ctx, req.(*Query)) 446 | } 447 | return interceptor(ctx, in, info, handler) 448 | } 449 | 450 | var _Search_serviceDesc = grpc.ServiceDesc{ 451 | ServiceName: "rpc.Search", 452 | HandlerType: (*SearchServer)(nil), 453 | Methods: []grpc.MethodDesc{ 454 | { 455 | MethodName: "SearchIt", 456 | Handler: _Search_SearchIt_Handler, 457 | }, 458 | }, 459 | Streams: []grpc.StreamDesc{}, 460 | Metadata: "defs.proto", 461 | } 462 | 463 | func init() { proto.RegisterFile("defs.proto", fileDescriptor0) } 464 | 465 | var fileDescriptor0 = []byte{ 466 | // 570 bytes of a gzipped FileDescriptorProto 467 | 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x6c, 0x54, 0xdd, 0x6e, 0xd3, 0x30, 468 | 0x14, 0x5e, 0x92, 0xa5, 0x5d, 0x4e, 0xb7, 0xc1, 0x0e, 0x08, 0x45, 0x15, 0x1a, 0x95, 0xe1, 0xa2, 469 | 0x88, 0xd1, 0x8b, 0x22, 0x21, 0x84, 0xb8, 0x41, 0x14, 0xa1, 0x08, 0x31, 0x0d, 0x6f, 0x37, 0xbb, 470 | 0x23, 0xad, 0x4d, 0x63, 0x2d, 0x4d, 0x22, 0xdb, 0xd9, 0xd6, 0x27, 0xe2, 0x35, 0x78, 0x34, 0x64, 471 | 0xc7, 0x09, 0x19, 0xe3, 0xee, 0x7c, 0x9f, 0xbf, 0xf3, 0x7f, 0x12, 0x00, 0xc6, 0x7f, 0xaa, 0x59, 472 | 0x25, 0x4b, 0x5d, 0x62, 0x20, 0xab, 0x15, 0x19, 0x83, 0x9f, 0x2c, 0xf0, 0x31, 0x84, 0xd7, 0x69, 473 | 0x5e, 0xf3, 0xd8, 0x9b, 0x78, 0xd3, 0x88, 0x36, 0x80, 0x44, 0x30, 0x3c, 0x2d, 0x75, 0x26, 0x8a, 474 | 0x35, 0xd9, 0x40, 0xf8, 0xbd, 0xe6, 0x72, 0x8b, 0x63, 0xd8, 0xbb, 0xe2, 0xdb, 0x9b, 0x52, 0x32, 475 | 0xe5, 0xc4, 0x1d, 0x36, 0x6f, 0x9b, 0xf4, 0xf6, 0x2c, 0x5d, 0x73, 0x15, 0xfb, 0x13, 0x6f, 0x1a, 476 | 0xd2, 0x0e, 0xe3, 0x09, 0x1c, 0x6d, 0xd2, 0x5b, 0xca, 0x55, 0x9d, 0x6b, 0x75, 0xc6, 0xa5, 0x61, 477 | 0xe3, 0xc0, 0x8a, 0xee, 0x3f, 0x90, 0x1f, 0xb0, 0x7f, 0xce, 0x53, 0xb9, 0xca, 0x1a, 0xde, 0xd4, 478 | 0x27, 0x0a, 0xc6, 0x6f, 0x6d, 0xca, 0x5d, 0xda, 0x00, 0x9c, 0x42, 0x28, 0x34, 0xdf, 0x98, 0x64, 479 | 0xc1, 0x74, 0x34, 0xc7, 0x99, 0xac, 0x56, 0xb3, 0xcb, 0xb2, 0xbe, 0xa8, 0x97, 0xbc, 0x71, 0xa4, 480 | 0x8d, 0x00, 0x1f, 0x42, 0xc0, 0xa5, 0xb4, 0xf9, 0x22, 0x6a, 0x4c, 0xf2, 0xcb, 0x83, 0x83, 0x3b, 481 | 0x52, 0x7c, 0x06, 0x03, 0x23, 0x4e, 0x98, 0x4d, 0x32, 0x9a, 0x0f, 0x6d, 0xb8, 0x64, 0x41, 0x1d, 482 | 0x8d, 0x08, 0xbb, 0x5c, 0xa7, 0x6b, 0xdb, 0x5a, 0x44, 0xad, 0x8d, 0xc7, 0xe0, 0x0b, 0x66, 0xe3, 483 | 0x8e, 0xe6, 0x87, 0xfd, 0xfc, 0xc9, 0x82, 0xfa, 0xc2, 0xfa, 0x5c, 0x89, 0x82, 0xc5, 0xbb, 0x8d, 484 | 0x8f, 0xb1, 0xf1, 0x35, 0x0c, 0x55, 0x21, 0xaa, 0x8a, 0xeb, 0x38, 0xb4, 0x8e, 0x8f, 0xfa, 0x8e, 485 | 0xe7, 0xcd, 0x13, 0x6d, 0x35, 0xe4, 0xb7, 0x0f, 0x87, 0x77, 0xdf, 0xf0, 0x29, 0x44, 0xab, 0x2c, 486 | 0x2d, 0x0a, 0x9e, 0xbb, 0x6a, 0x23, 0xfa, 0x97, 0x40, 0x02, 0xfb, 0x0e, 0x5c, 0x08, 0x9d, 0x73, 487 | 0x57, 0xef, 0x1d, 0x0e, 0x27, 0x30, 0x62, 0x5c, 0xad, 0xa4, 0xa8, 0xb4, 0x28, 0x0b, 0x37, 0x98, 488 | 0x3e, 0x65, 0x14, 0x55, 0xbd, 0xcc, 0x85, 0xca, 0x38, 0xfb, 0xa8, 0x5d, 0x03, 0x7d, 0x0a, 0x3f, 489 | 0x01, 0xe8, 0xac, 0xde, 0x2c, 0x8b, 0x54, 0xe4, 0x2a, 0x0e, 0xed, 0x0e, 0x9e, 0xff, 0xa7, 0x95, 490 | 0xd9, 0x45, 0xa7, 0xfa, 0x5c, 0x68, 0xb9, 0xa5, 0x3d, 0x37, 0xb3, 0x59, 0x6d, 0xab, 0x1c, 0x34, 491 | 0x97, 0x67, 0xc1, 0xf8, 0x1b, 0x3c, 0xf8, 0xc7, 0xc9, 0xac, 0xf0, 0x8a, 0x6f, 0x5d, 0xb7, 0xc6, 492 | 0xc4, 0x17, 0xed, 0xd1, 0xfa, 0xbd, 0xf1, 0x77, 0xa1, 0xdd, 0x11, 0xbf, 0xf7, 0xdf, 0x79, 0xe4, 493 | 0x2b, 0x44, 0x1d, 0x8f, 0x4f, 0x60, 0x90, 0x71, 0xb1, 0xce, 0xb4, 0x8d, 0x15, 0x50, 0x87, 0x4c, 494 | 0x25, 0x37, 0x82, 0xe9, 0xcc, 0x86, 0x0b, 0x68, 0x03, 0x4c, 0xda, 0x5a, 0xe6, 0xed, 0xe5, 0xd4, 495 | 0x32, 0x27, 0x97, 0x10, 0x75, 0x3b, 0xee, 0xf6, 0xeb, 0xf5, 0xf6, 0x1b, 0xc3, 0xf0, 0x5a, 0x30, 496 | 0x5e, 0x26, 0xcc, 0x8d, 0xbe, 0x85, 0x78, 0x0c, 0x50, 0xe5, 0xe9, 0x36, 0x17, 0x4a, 0x27, 0xcc, 497 | 0xc5, 0xec, 0x31, 0xe4, 0x03, 0x1c, 0xf4, 0xcf, 0x5e, 0xe1, 0x2b, 0x18, 0x3a, 0x33, 0xf6, 0xec, 498 | 0x7c, 0x8f, 0x6c, 0x93, 0x7d, 0x11, 0x6d, 0x15, 0xf3, 0x97, 0x10, 0x7e, 0xe1, 0x45, 0xb2, 0xc0, 499 | 0x09, 0x84, 0xa7, 0xfc, 0x26, 0x59, 0xe0, 0xbe, 0x55, 0xbb, 0x6f, 0x78, 0xdc, 0x1e, 0x34, 0xd9, 500 | 0x99, 0xbf, 0x85, 0x41, 0x13, 0x03, 0x4f, 0x60, 0xaf, 0xb1, 0x12, 0x8d, 0x60, 0x05, 0xf6, 0x3b, 501 | 0x1f, 0xe3, 0xbd, 0x44, 0x8a, 0xec, 0x2c, 0x07, 0xf6, 0xcf, 0xf1, 0xe6, 0x4f, 0x00, 0x00, 0x00, 502 | 0xff, 0xff, 0xe9, 0x54, 0x6d, 0x23, 0x47, 0x04, 0x00, 0x00, 503 | } 504 | -------------------------------------------------------------------------------- /rpc/defs.proto: -------------------------------------------------------------------------------- 1 | // Copyright 2018, OpenCensus Authors 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | syntax = "proto3"; 16 | 17 | package rpc; 18 | 19 | message ID { 20 | string value = 1; 21 | } 22 | 23 | message Nothing { 24 | } 25 | 26 | message Query { 27 | string keywords = 1; 28 | int32 maxPages = 2; 29 | int32 maxResultsPerPage = 3; 30 | } 31 | 32 | message SearchResult { 33 | uint64 index = 1; 34 | repeated YouTubeResult items = 2; 35 | string err = 3; 36 | } 37 | 38 | message YouTubeResult { 39 | ID itemId = 1; 40 | string etag = 2; 41 | YouTubeID id = 3; 42 | string kind = 4; 43 | YouTubeSnippet snippet = 5; 44 | } 45 | 46 | message YouTubeSnippet { 47 | string channelId = 1; 48 | string channelTitle = 2; 49 | string description = 3; 50 | string publishedAt = 4; 51 | map thumbnails = 5; 52 | string title = 6; 53 | } 54 | 55 | message thumbnail { 56 | int64 height = 1; 57 | int64 width = 2; 58 | string url = 3; 59 | } 60 | 61 | message YouTubeID { 62 | string kind = 1; 63 | string videoId = 2; 64 | string playlistId = 3; 65 | } 66 | 67 | service GenID { 68 | rpc NewID(Nothing) returns (ID) {} 69 | } 70 | 71 | message SearchResults { 72 | repeated SearchResult Results = 1; 73 | } 74 | 75 | service Search { 76 | rpc SearchIt(Query) returns (SearchResults) {} 77 | } 78 | -------------------------------------------------------------------------------- /rpc/search.go: -------------------------------------------------------------------------------- 1 | // Copyright 2018, OpenCensus Authors 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | package rpc 16 | 17 | import ( 18 | "bytes" 19 | "context" 20 | "encoding/json" 21 | "fmt" 22 | "io" 23 | "io/ioutil" 24 | "log" 25 | "net/http" 26 | 27 | gat "google.golang.org/api/googleapi/transport" 28 | "google.golang.org/api/youtube/v3" 29 | 30 | "go.opencensus.io/plugin/ochttp" 31 | "go.opencensus.io/stats" 32 | "go.opencensus.io/tag" 33 | "go.opencensus.io/trace" 34 | 35 | "github.com/orijtech/callback" 36 | "github.com/orijtech/otils" 37 | yt "github.com/orijtech/youtube" 38 | ) 39 | 40 | type Search struct { 41 | client *yt.Client 42 | } 43 | 44 | type SearchInitOption interface { 45 | init(*Search) 46 | } 47 | 48 | type withClient struct { 49 | yc *yt.Client 50 | } 51 | 52 | var _ SearchInitOption = (*withClient)(nil) 53 | 54 | func (wf *withClient) init(ss *Search) { 55 | ss.client = wf.yc 56 | } 57 | 58 | func WithYouTubeAPIKey(apiKey string) SearchInitOption { 59 | yc, err := yt.NewWithHTTPClient(&http.Client{ 60 | Transport: &ochttp.Transport{Base: &gat.APIKey{Key: apiKey}}, 61 | }) 62 | if err != nil { 63 | log.Fatalf("WithYouTubeAPIKey: failed to create client, error: %v", err) 64 | } 65 | return &withClient{yc: yc} 66 | } 67 | 68 | var videoDetailingHTTPServerURL = otils.EnvOrAlternates("YOUTUBE_DETAILS_HTTP_SERVER_URL", "http://localhost:9944") 69 | 70 | func NewSearch(opts ...SearchInitOption) (*Search, error) { 71 | ss := new(Search) 72 | for _, opt := range opts { 73 | opt.init(ss) 74 | } 75 | return ss, nil 76 | } 77 | 78 | var _ SearchServer = (*Search)(nil) 79 | 80 | var youtubeSearches = stats.Int64("youtube_searches", "The number of YouTube searches", "1") 81 | var youtubeAPIErrors = stats.Int64("youtube_api_errors", "The number of YouTube API errors", "1") 82 | 83 | func (ss *Search) SearchIt(ctx context.Context, q *Query) (*SearchResults, error) { 84 | ctx, span := trace.StartSpan(ctx, "searchIt") 85 | defer span.End() 86 | 87 | // If blank or unset, ensure they are set 88 | q.setDefaultLimits() 89 | log.Printf("q: %v\n", q) 90 | 91 | ctx, err := tag.New(ctx, tag.Insert(tagKey("service"), "youtube-search")) 92 | if err != nil { 93 | return nil, err 94 | } 95 | stats.Record(ctx, youtubeSearches.M(1)) 96 | 97 | pagesChan, err := ss.client.Search(ctx, &yt.SearchParam{ 98 | Query: q.Keywords, 99 | MaxPage: uint64(q.MaxPages), 100 | MaxResultsPerPage: uint64(q.MaxResultsPerPage), 101 | }) 102 | if err != nil { 103 | stats.Record(ctx, youtubeAPIErrors.M(1)) 104 | span.Annotate([]trace.Attribute{ 105 | trace.StringAttribute("api_error", err.Error()), 106 | }, "YouTube API search error") 107 | return nil, err 108 | } 109 | 110 | var srl []*SearchResult 111 | i := uint64(0) 112 | idListForDetails := make([]string, 0, 10) 113 | for page := range pagesChan { 114 | if len(page.Items) == 0 { 115 | continue 116 | } 117 | 118 | items := make([]*YouTubeResult, 0, len(page.Items)) 119 | for _, item := range page.Items { 120 | if item != nil { 121 | sr := toSearchResult(item) 122 | items = append(items, sr) 123 | if sr.Id.VideoId != "" { 124 | idListForDetails = append(idListForDetails, sr.Id.VideoId) 125 | } 126 | } 127 | } 128 | 129 | if len(items) > 0 { 130 | i += 1 131 | srl = append(srl, &SearchResult{ 132 | Items: items, 133 | Index: i, 134 | }) 135 | } 136 | } 137 | 138 | if len(idListForDetails) > 0 && false { 139 | log.Printf("Firing off callback for %#v\n", idListForDetails) 140 | // Then fire off the callback to enable background 141 | // retrieval of detailed information of found videos. 142 | cb := callback.Callback{ 143 | URL: videoDetailingHTTPServerURL, 144 | Payload: idListForDetails, 145 | } 146 | cb.Do(ctx) 147 | } 148 | 149 | return &SearchResults{Results: srl}, nil 150 | } 151 | 152 | func tagKey(key string) tag.Key { 153 | k, _ := tag.NewKey(key) 154 | return k 155 | } 156 | 157 | func toSearchResult(yi *youtube.SearchResult) *YouTubeResult { 158 | return &YouTubeResult{ 159 | Etag: yi.Etag, 160 | Kind: yi.Kind, 161 | Id: &YouTubeID{ 162 | Kind: yi.Id.Kind, 163 | VideoId: yi.Id.VideoId, 164 | PlaylistId: yi.Id.PlaylistId, 165 | }, 166 | Snippet: toSearchSnippet(yi.Snippet), 167 | } 168 | } 169 | 170 | func toSearchSnippet(snip *youtube.SearchResultSnippet) *YouTubeSnippet { 171 | return &YouTubeSnippet{ 172 | ChannelTitle: snip.ChannelTitle, 173 | ChannelId: snip.ChannelId, 174 | Description: snip.Description, 175 | PublishedAt: snip.PublishedAt, 176 | Title: snip.Title, 177 | Thumbnails: map[string]*Thumbnail{ 178 | "default": toThumbnail(snip.Thumbnails.Default), 179 | "high": toThumbnail(snip.Thumbnails.High), 180 | "maxres": toThumbnail(snip.Thumbnails.Maxres), 181 | "medium": toThumbnail(snip.Thumbnails.Medium), 182 | "standard": toThumbnail(snip.Thumbnails.Standard), 183 | }, 184 | } 185 | } 186 | 187 | func toThumbnail(th *youtube.Thumbnail) *Thumbnail { 188 | if th == nil { 189 | return nil 190 | } 191 | return &Thumbnail{ 192 | Height: th.Height, 193 | Width: th.Width, 194 | Url: th.Url, 195 | } 196 | } 197 | 198 | // And for HTTP based RPCs 199 | func (ss *Search) ServeHTTP(w http.ResponseWriter, r *http.Request) { 200 | ctx, span := trace.StartSpan(r.Context(), "/search") 201 | defer span.End() 202 | 203 | q, err := ExtractQuery(ctx, r) 204 | if err != nil { 205 | http.Error(w, err.Error(), http.StatusBadRequest) 206 | return 207 | } 208 | 209 | results, err := ss.SearchIt(ctx, q) 210 | if err != nil { 211 | http.Error(w, err.Error(), http.StatusInternalServerError) 212 | return 213 | } 214 | enc := json.NewEncoder(w) 215 | _ = enc.Encode(results) 216 | } 217 | 218 | func ExtractQuery(ctx context.Context, r *http.Request) (*Query, error) { 219 | ctx, span := trace.StartSpan(ctx, "/extract-query") 220 | defer span.End() 221 | 222 | var body io.Reader 223 | 224 | switch r.Method { 225 | default: 226 | return nil, fmt.Errorf("Unacceptable method %q", r.Method) 227 | 228 | case "PUT", "POST": 229 | defer r.Body.Close() 230 | body = r.Body 231 | span.Annotate([]trace.Attribute{ 232 | trace.StringAttribute("method", r.Method), 233 | trace.BoolAttribute("has_body", true), 234 | }, "Parsed a POST/PUT request") 235 | 236 | goto parseJSON 237 | 238 | case "GET": 239 | qv := r.URL.Query() 240 | outMap := make(map[string]string) 241 | for key := range qv { 242 | outMap[key] = qv.Get(key) 243 | } 244 | intermediateBlob, err := json.Marshal(outMap) 245 | if err != nil { 246 | return nil, err 247 | } 248 | body = bytes.NewReader(intermediateBlob) 249 | span.Annotate([]trace.Attribute{ 250 | trace.StringAttribute("method", "GET"), 251 | trace.BoolAttribute("has_body", false), 252 | }, "Parsed a GET request") 253 | 254 | } 255 | 256 | parseJSON: 257 | _, span2 := trace.StartSpan(ctx, "/parse-json") 258 | defer span2.End() 259 | 260 | // By this point we are extracting only JSON. 261 | blob, err := ioutil.ReadAll(body) 262 | if err != nil { 263 | return nil, err 264 | } 265 | qy := new(Query) 266 | if err := json.Unmarshal(blob, qy); err != nil { 267 | return nil, err 268 | } 269 | qy.setDefaultLimits() 270 | return qy, nil 271 | } 272 | 273 | func (q *Query) setDefaultLimits() { 274 | if q.MaxResultsPerPage <= 0 { 275 | q.MaxResultsPerPage = 10 276 | } 277 | if q.MaxPages <= 0 { 278 | q.MaxPages = 1 279 | } 280 | } 281 | -------------------------------------------------------------------------------- /rpc/uuid.go: -------------------------------------------------------------------------------- 1 | // Copyright 2018, OpenCensus Authors 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | package rpc 16 | 17 | import ( 18 | "context" 19 | "encoding/json" 20 | "net/http" 21 | 22 | "github.com/rs/xid" 23 | "go.opencensus.io/trace" 24 | ) 25 | 26 | type GID int 27 | 28 | var _ GenIDServer = (*GID)(nil) 29 | 30 | func NewGenID() (*GID, error) { 31 | return new(GID), nil 32 | } 33 | 34 | func (gi *GID) NewID(ctx context.Context, _ *Nothing) (*ID, error) { 35 | ctx, span := trace.StartSpan(ctx, "newID") 36 | defer span.End() 37 | 38 | return &ID{Value: xid.New().String()}, nil 39 | } 40 | 41 | func (gi *GID) ServeHTTP(w http.ResponseWriter, r *http.Request) { 42 | id, err := gi.NewID(r.Context(), nil) 43 | if err != nil { 44 | http.Error(w, err.Error(), http.StatusInternalServerError) 45 | return 46 | } 47 | enc := json.NewEncoder(w) 48 | enc.Encode(id) 49 | } 50 | -------------------------------------------------------------------------------- /static/app.js: -------------------------------------------------------------------------------- 1 | var nodes = { 2 | searchInput: document.querySelector('.js-search-input'), 3 | searchButton: document.querySelector('.js-search-button'), 4 | searchSection: document.querySelector('.js-search-section'), 5 | resultsSection: document.querySelector('.js-results-section'), 6 | loader: document.querySelector('.js-loader') 7 | }; 8 | 9 | var isSearching = false; 10 | var searchResults = []; 11 | 12 | // Basic HTTP request method 13 | function sendRequest(object) { 14 | var successCallback = object.successCallback, 15 | errorCallback = object.errorCallback, 16 | method = object.method.toUpperCase(), 17 | data = object.data, 18 | url = object.url, 19 | xhr = new XMLHttpRequest(); 20 | 21 | var usesJson = (method === 'POST' || method === 'PUT'); 22 | 23 | xhr.open(method, url); 24 | xhr.setRequestHeader('Access-Control-Allow-Origin', '*'); 25 | 26 | if (usesJson) { 27 | xhr.setRequestHeader('Content-Type', 'application/json'); 28 | } 29 | 30 | xhr.onreadystatechange = function() { 31 | if (xhr.readyState === 4) { 32 | if (xhr.status === 200) { 33 | return successCallback(JSON.parse(xhr.responseText)); 34 | } else { 35 | return errorCallback(xhr.status); 36 | } 37 | } 38 | } 39 | 40 | if (usesJson) { 41 | xhr.send(JSON.stringify(data)); 42 | } else { 43 | xhr.send(); 44 | } 45 | } 46 | 47 | function collapseSearchSection() { 48 | if (nodes.searchSection.classList.contains('search-section-collapsed')) { 49 | return; 50 | } 51 | 52 | nodes.searchSection.classList.add('search-section-collapsed'); 53 | nodes.resultsSection.classList.add('results-section-expanded'); 54 | } 55 | 56 | function clearResults() { 57 | var results = [].slice.call(document.querySelectorAll('.result-container')); 58 | results.forEach(function(node) { 59 | node.parentNode.removeChild(node); 60 | }); 61 | } 62 | 63 | function onSearchBegin() { 64 | collapseSearchSection(); 65 | clearResults(); 66 | nodes.loader.classList.remove('hidden'); 67 | isSearching = true; 68 | } 69 | 70 | function onSearchEnd() { 71 | isSearching = false; 72 | nodes.loader.classList.add('hidden'); 73 | } 74 | 75 | function appendResult(result) { 76 | var div = document.createElement('div'); 77 | var anchor = document.createElement('a'); 78 | var imgDiv = document.createElement('div'); 79 | var img = document.createElement('img'); 80 | var p = document.createElement('p'); 81 | 82 | div.classList.add('result-container'); 83 | anchor.classList.add('result-link'); 84 | imgDiv.classList.add('result-image-container'); 85 | p.classList.add('result-title'); 86 | 87 | anchor.href = result.url; 88 | imgDiv.style.backgroundImage = 'url('+result.thumbnail+')'; 89 | imgDiv.style.backgroundPosition = 'center'; 90 | imgDiv.style.backgroundSize = 'cover'; 91 | p.textContent = result.title; 92 | 93 | div.appendChild(anchor); 94 | div.appendChild(imgDiv); 95 | div.appendChild(p); 96 | nodes.resultsSection.appendChild(div); 97 | 98 | if (result.resultType === 'video') { 99 | anchor.addEventListener('click', function(e) { 100 | e.preventDefault(); 101 | 102 | showInlinePlayer(result.id); 103 | }); 104 | } 105 | } 106 | 107 | function successCallback(response) { 108 | onSearchEnd(); 109 | 110 | var results = response[0].items.map(function(item) { 111 | var url = 'https://youtube.com/'; 112 | var resultType = null; 113 | var thumbnail = null; 114 | var id = null; 115 | 116 | if (item.id.videoId) { 117 | id = item.id.videoId; 118 | resultType = 'video'; 119 | url = url + 'watch?v=' + item.id.videoId; 120 | } else { 121 | id = item.id.channelId; 122 | resultType = 'channel'; 123 | url = url + 'channel/' + item.id.channelId; 124 | } 125 | 126 | if (item.snippet.thumbnails.high) { 127 | thumbnail = item.snippet.thumbnails.high.url; 128 | } else if (item.snippet.thumbnail.medium) { 129 | thumbnail = item.snippet.thumbnails.medium.url; 130 | } else { 131 | item.snippet.thumbnails.default.url; 132 | } 133 | 134 | return { 135 | id: id, 136 | url: url, 137 | title: item.snippet.title, 138 | thumbnail: thumbnail, 139 | resultType: resultType 140 | }; 141 | }); 142 | 143 | results.forEach(function(result) { 144 | appendResult(result); 145 | }); 146 | } 147 | 148 | function errorCallback() { 149 | onSearchEnd(); 150 | alert('Something went wrong.'); 151 | } 152 | 153 | function onSearchClick() { 154 | if (isSearching) { 155 | return; 156 | } 157 | 158 | onSearchBegin(); 159 | 160 | var query = nodes.searchInput.value.trim(); 161 | 162 | sendRequest({ 163 | method: 'POST', 164 | data: {"keywords": query, "maxResultsPerPage": 25}, 165 | url: window.location.origin + '/search', 166 | successCallback: successCallback, 167 | errorCallback: errorCallback 168 | }); 169 | } 170 | 171 | function showInlinePlayer(youtubeId) { 172 | var background = document.createElement('div'); 173 | var modal = document.createElement('div'); 174 | var loader = document.createElement('i'); 175 | var iframe = document.createElement('iframe'); 176 | 177 | document.body.classList.add('modal-active'); 178 | background.classList.add('modal-background', 'js-modal-background'); 179 | modal.classList.add('modal'); 180 | loader.classList.add('material-icons', 'loader', 'modal-loader'); 181 | iframe.classList.add('modal-iframe'); 182 | 183 | loader.textContent = 'cached'; 184 | 185 | iframe.width = 560; 186 | iframe.height = 315; 187 | iframe.src = 'https://www.youtube.com/embed/' + youtubeId; 188 | iframe.frameborder = 0; 189 | iframe.allow = 'autoplay; encrypted-media'; 190 | iframe.allowfullscreen = true; 191 | 192 | background.appendChild(modal); 193 | modal.appendChild(loader); 194 | modal.appendChild(iframe); 195 | document.body.appendChild(background); 196 | 197 | var scrollTop = window.pageYOffset || (document.documentElement || document.body.parentNode || document.body).scrollTop; 198 | background.style.top = scrollTop + 'px'; 199 | 200 | background.addEventListener('click', function() { 201 | background.parentNode.removeChild(background); 202 | document.body.classList.remove('modal-active'); 203 | }); 204 | } 205 | 206 | nodes.searchButton.addEventListener('click', onSearchClick) 207 | -------------------------------------------------------------------------------- /static/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | OpenCensus Demo 7 | 8 | 9 | 10 | 11 | 12 | 13 |
14 |
15 | 16 | 17 |
18 |
19 | 20 |
21 | 22 |
23 | 24 | 25 | 26 | 27 | -------------------------------------------------------------------------------- /static/styles.css: -------------------------------------------------------------------------------- 1 | body.app { 2 | margin: 0; 3 | padding: 0; 4 | font-family: 'Open Sans', sans-serif; 5 | 6 | /* Easy centering */ 7 | position: absolute; 8 | bottom: 0; 9 | right: 0; 10 | left: 0; 11 | top: 0; 12 | } 13 | 14 | body.modal-active { 15 | height: 100vh; 16 | overflow: hidden; 17 | } 18 | 19 | .section { 20 | flex: 1; 21 | } 22 | 23 | .search-section { 24 | display: flex; 25 | justify-content: center; 26 | height: auto; 27 | min-height: 50vh; 28 | 29 | -webkit-transition: min-height 800ms ease-out; 30 | -moz-transition: min-height 800ms ease-out; 31 | -o-transition: min-height 800ms ease-out; 32 | transition: min-height 800ms ease-out; 33 | } 34 | 35 | .search-section-collapsed { 36 | min-height: 100px; 37 | } 38 | 39 | .search-container { 40 | align-self: flex-end; 41 | position: relative; 42 | } 43 | 44 | .search-container input { 45 | border: none; 46 | font-size: 3em; 47 | width: calc(100% - 80px); 48 | } 49 | 50 | .search-container input:focus { 51 | outline: none; 52 | } 53 | 54 | .search-button { 55 | border: none; 56 | background: green; 57 | border-radius: 4px; 58 | font-size: 2em; 59 | color: #fff; 60 | cursor: pointer; 61 | background: #87e1ff; 62 | 63 | position: absolute; 64 | right: 10px; 65 | top: 5px; 66 | bottom: 5px; 67 | } 68 | 69 | .search-button:focus { 70 | outline: none; 71 | } 72 | 73 | .results-section { 74 | background: #87e1ff; /* pleasant blue */ 75 | padding: 40px 100px; 76 | min-height: 50vh; 77 | -webkit-transition: min-height 800ms ease-out; 78 | -moz-transition: min-height 800ms ease-out; 79 | -o-transition: min-height 800ms ease-out; 80 | transition: min-height 800ms ease-out; 81 | } 82 | 83 | .results-section-expanded { 84 | min-height: calc(100vh - 50px); 85 | } 86 | 87 | .result-container { 88 | float: left; 89 | width: 19%; 90 | margin-right: 10px; 91 | margin-bottom: 20px; 92 | background: #fff; 93 | border-radius: 4px; 94 | overflow: hidden; 95 | padding: 0; 96 | box-shadow: 0 1px #FFFFFF inset, 0 1px 3px rgba(34, 25, 25, 0.4); 97 | position: relative; 98 | } 99 | 100 | .result-link { 101 | position: absolute; 102 | top: 0; 103 | left: 0; 104 | right: 0; 105 | bottom: 0; 106 | } 107 | 108 | .result-image-container { 109 | width: 100%; 110 | padding-bottom: 56.25%; 111 | } 112 | 113 | .result-title { 114 | padding: 5px 10px; 115 | background: #fff; 116 | margin: 0; 117 | font-size: .8em; 118 | text-overflow: ellipsis; 119 | overflow: hidden; 120 | white-space: nowrap; 121 | } 122 | 123 | @-webkit-keyframes rotate{ 124 | to{-webkit-transform: rotate(360deg);} 125 | } 126 | 127 | @keyframes rotate { 128 | to{transform: rotate(360deg);} 129 | } 130 | 131 | .hidden { 132 | display: none !important; 133 | } 134 | 135 | .loader { 136 | animation: rotate 1.5s ease infinite; 137 | -webkit-animation: rotate 1.5s ease infinite; 138 | color: #fff; 139 | font-size: 3em; 140 | display: block; 141 | text-align: center; 142 | } 143 | 144 | .loader.modal-loader { 145 | color: rgba(0, 0, 0, .8); 146 | height: 48px; 147 | width: 48px; 148 | position: absolute; 149 | } 150 | 151 | .modal-background { 152 | z-index: 999; 153 | position: absolute; 154 | left: 0; 155 | right: 0; 156 | height: 100vh; 157 | background: rgba(0, 0, 0, .3); 158 | display: flex; 159 | align-items: center; 160 | justify-content: center; 161 | } 162 | 163 | .modal { 164 | height: fit-content; 165 | border-radius: 4px; 166 | padding: 10px; 167 | background: #fff; 168 | 169 | position: relative; 170 | display: flex; 171 | align-items: center; 172 | justify-content: center; 173 | } 174 | 175 | .modal-loader { 176 | z-index: 1; 177 | } 178 | 179 | .modal-iframe { 180 | z-index: 2; 181 | border: none; 182 | } 183 | --------------------------------------------------------------------------------