├── .gitignore ├── .travis.yml ├── ATTRIBUTION.md ├── CMakeLists.txt ├── LICENSE ├── README.md ├── account ├── .dockerignore ├── CMakeLists.txt ├── Dockerfile ├── README.md ├── acct.py ├── build.sh ├── main.py └── shell.sh ├── ad-content ├── .dockerignore ├── CMakeLists.txt ├── README.md ├── ad-decision │ ├── .dockerignore │ ├── CMakeLists.txt │ ├── Dockerfile │ ├── README.md │ ├── adkeyword.py │ ├── build.sh │ ├── main.py │ ├── metadata.py │ ├── nginx.conf │ └── shell.sh ├── archive │ ├── .dockerignore │ ├── .gitignore │ ├── CMakeLists.txt │ ├── Dockerfile │ ├── build.sh │ └── shell.sh └── frontend │ ├── .dockerignore │ ├── CMakeLists.txt │ ├── Dockerfile │ ├── adstats.py │ ├── build.sh │ ├── inventory.json │ ├── inventory.py │ ├── main.py │ ├── nginx.conf │ └── shell.sh ├── ad-insertion ├── CMakeLists.txt ├── README.md ├── ad-segment │ ├── CMakeLists.txt │ ├── Dockerfile │ ├── build.sh │ ├── create_dash.sh │ ├── create_hls.sh │ └── shell.sh ├── ad-static │ ├── .dockerignore │ ├── .gitignore │ ├── CMakeLists.txt │ ├── Dockerfile │ ├── build.sh │ └── shell.sh ├── ad-transcode │ ├── CMakeLists.txt │ ├── Dockerfile │ ├── build.sh │ ├── main.py │ ├── process.py │ ├── shell.sh │ └── workload.py ├── analytics │ ├── .gitignore │ ├── CMakeLists.txt │ ├── README.md │ ├── VCAC-A │ │ ├── Dockerfile.5.launcher.vcac-a │ │ ├── ffmpeg │ │ │ ├── Dockerfile │ │ │ └── pipelines │ │ │ │ ├── emotion_recognition │ │ │ │ └── 1 │ │ │ │ │ └── pipeline.json │ │ │ │ ├── face_recognition │ │ │ │ └── 1 │ │ │ │ │ └── pipeline.json │ │ │ │ └── object_detection │ │ │ │ └── 1 │ │ │ │ └── pipeline.json │ │ ├── gst │ │ │ ├── Dockerfile │ │ │ └── pipelines │ │ │ │ ├── emotion_recognition │ │ │ │ └── 1 │ │ │ │ │ └── pipeline.json │ │ │ │ ├── face_recognition │ │ │ │ └── 1 │ │ │ │ │ └── pipeline.json │ │ │ │ └── object_detection │ │ │ │ └── 1 │ │ │ │ └── pipeline.json │ │ └── run-container.sh │ ├── Xeon │ │ ├── ffmpeg │ │ │ ├── Dockerfile │ │ │ └── pipelines │ │ │ │ ├── emotion_recognition │ │ │ │ └── 1 │ │ │ │ │ └── pipeline.json │ │ │ │ ├── face_recognition │ │ │ │ └── 1 │ │ │ │ │ └── pipeline.json │ │ │ │ └── object_detection │ │ │ │ └── 1 │ │ │ │ └── pipeline.json │ │ └── gst │ │ │ ├── Dockerfile │ │ │ └── pipelines │ │ │ ├── emotion_recognition │ │ │ └── 1 │ │ │ │ └── pipeline.json │ │ │ ├── face_recognition │ │ │ └── 1 │ │ │ │ └── pipeline.json │ │ │ └── object_detection │ │ │ └── 1 │ │ │ └── pipeline.json │ ├── analyze.py │ ├── build.sh │ ├── gallery │ │ ├── face_gallery_FP16 │ │ │ ├── features │ │ │ │ ├── BikeQuick1_boy_0_frame_0_idx_0.tensor │ │ │ │ ├── BikeQuick1_boy_1_frame_0_idx_0.tensor │ │ │ │ ├── BikeQuick1_girl_0_frame_0_idx_0.tensor │ │ │ │ ├── BikeQuick1_girl_1_frame_0_idx_0.tensor │ │ │ │ ├── HorseScene1_blue_man_0_frame_0_idx_0.tensor │ │ │ │ ├── HorseScene1_blue_man_1_frame_0_idx_0.tensor │ │ │ │ ├── HorseScene1_blue_man_1_frame_0_idx_1.tensor │ │ │ │ ├── HorseScene1_red_woman_0_frame_0_idx_0.tensor │ │ │ │ ├── HorseScene1_red_woman_1_frame_0_idx_0.tensor │ │ │ │ ├── WalkScene1_blue_man_0_frame_0_idx_0.tensor │ │ │ │ ├── WalkScene1_blue_man_1_frame_0_idx_0.tensor │ │ │ │ ├── WalkScene1_grey_man_0_frame_0_idx_0.tensor │ │ │ │ └── WalkScene1_grey_man_1_frame_0_idx_0.tensor │ │ │ ├── gallery.json │ │ │ └── gallery.json.gst │ │ └── face_gallery_FP32 │ │ │ ├── features │ │ │ ├── BikeQuick1_boy_0_frame_0_idx_0.tensor │ │ │ ├── BikeQuick1_boy_1_frame_0_idx_0.tensor │ │ │ ├── BikeQuick1_girl_0_frame_0_idx_0.tensor │ │ │ ├── BikeQuick1_girl_1_frame_0_idx_0.tensor │ │ │ ├── HorseScene1_blue_man_0_frame_0_idx_0.tensor │ │ │ ├── HorseScene1_blue_man_1_frame_0_idx_0.tensor │ │ │ ├── HorseScene1_blue_man_1_frame_0_idx_1.tensor │ │ │ ├── HorseScene1_red_woman_0_frame_0_idx_0.tensor │ │ │ ├── HorseScene1_red_woman_1_frame_0_idx_0.tensor │ │ │ ├── WalkScene1_blue_man_0_frame_0_idx_0.tensor │ │ │ ├── WalkScene1_blue_man_1_frame_0_idx_0.tensor │ │ │ ├── WalkScene1_grey_man_0_frame_0_idx_0.tensor │ │ │ └── WalkScene1_grey_man_1_frame_0_idx_0.tensor │ │ │ ├── gallery.json │ │ │ └── gallery.json.gst │ ├── merged_segment.py │ ├── models │ │ ├── emotion_recognition │ │ │ └── 1 │ │ │ │ ├── FP16 │ │ │ │ ├── emotions-recognition-retail-0003-fp16.bin │ │ │ │ └── emotions-recognition-retail-0003-fp16.xml │ │ │ │ ├── FP32 │ │ │ │ ├── emotions-recognition-retail-0003.bin │ │ │ │ └── emotions-recognition-retail-0003.xml │ │ │ │ ├── INT8 │ │ │ │ ├── emotions-recognition-retail-0003-int8.bin │ │ │ │ └── emotions-recognition-retail-0003-int8.xml │ │ │ │ ├── emotions-recognition-retail-0003.json │ │ │ │ └── emotions-recognition-retail-0003.json.gst │ │ ├── face_detection_adas │ │ │ └── 1 │ │ │ │ ├── FP16 │ │ │ │ ├── face-detection-adas-0001-fp16.bin │ │ │ │ └── face-detection-adas-0001-fp16.xml │ │ │ │ ├── FP32 │ │ │ │ ├── face-detection-adas-0001.bin │ │ │ │ └── face-detection-adas-0001.xml │ │ │ │ ├── INT8 │ │ │ │ ├── face-detection-adas-0001-int8.bin │ │ │ │ └── face-detection-adas-0001-int8.xml │ │ │ │ ├── face-detection-adas-0001.json │ │ │ │ └── face-detection-adas-0001.json.gst │ │ ├── face_detection_retail │ │ │ └── 1 │ │ │ │ ├── FP16 │ │ │ │ ├── face-detection-retail-0004-fp16.bin │ │ │ │ └── face-detection-retail-0004-fp16.xml │ │ │ │ ├── FP32 │ │ │ │ ├── face-detection-retail-0004.bin │ │ │ │ └── face-detection-retail-0004.xml │ │ │ │ ├── INT8 │ │ │ │ ├── face-detection-retail-0004-int8.bin │ │ │ │ └── face-detection-retail-0004-int8.xml │ │ │ │ ├── face-detection-retail-0004.json │ │ │ │ └── face-detection-retail-0004.json.gst │ │ ├── face_reidentification │ │ │ └── 1 │ │ │ │ ├── FP16 │ │ │ │ ├── face-reidentification-retail-0095-fp16.bin │ │ │ │ └── face-reidentification-retail-0095-fp16.xml │ │ │ │ ├── FP32 │ │ │ │ ├── face-reidentification-retail-0095.bin │ │ │ │ └── face-reidentification-retail-0095.xml │ │ │ │ ├── INT8 │ │ │ │ ├── face-reidentification-retail-0095.bin │ │ │ │ └── face-reidentification-retail-0095.xml │ │ │ │ ├── face-reidentification-retail-0095.json │ │ │ │ └── face-reidentification-retail-0095.json.gst │ │ ├── landmarks_regression │ │ │ └── 1 │ │ │ │ ├── FP16 │ │ │ │ ├── landmarks-regression-retail-0009-fp16.bin │ │ │ │ └── landmarks-regression-retail-0009-fp16.xml │ │ │ │ ├── FP32 │ │ │ │ ├── landmarks-regression-retail-0009.bin │ │ │ │ └── landmarks-regression-retail-0009.xml │ │ │ │ ├── INT8 │ │ │ │ ├── landmarks-regression-retail-0009.bin │ │ │ │ └── landmarks-regression-retail-0009.xml │ │ │ │ ├── landmarks-regression-retail-0009.json │ │ │ │ └── landmarks-regression-retail-0009.json.gst │ │ └── object_detection │ │ │ └── 1 │ │ │ ├── FP16 │ │ │ ├── mobilenet-ssd-fp16.bin │ │ │ └── mobilenet-ssd-fp16.xml │ │ │ ├── FP32 │ │ │ ├── mobilenet-ssd.bin │ │ │ └── mobilenet-ssd.xml │ │ │ ├── INT8 │ │ │ ├── mobilenet-ssd-int8.bin │ │ │ └── mobilenet-ssd-int8.xml │ │ │ ├── mobilenet-ssd.json │ │ │ └── mobilenet-ssd.json.gst │ ├── runva.py │ └── shell.sh ├── frontend │ ├── CMakeLists.txt │ ├── Dockerfile │ ├── build.sh │ ├── main.py │ ├── manifest.py │ ├── manifest_dash.py │ ├── manifest_hls.py │ ├── nginx.conf │ ├── schedule.py │ ├── segment.py │ └── shell.sh └── kafka2db │ ├── CMakeLists.txt │ ├── Dockerfile │ ├── Dockerfile.1.kafka │ ├── build.sh │ ├── main.py │ └── shell.sh ├── cdn ├── .dockignore ├── CMakeLists.txt ├── Dockerfile ├── README.md ├── analytics.py ├── build.sh ├── debug.py ├── main.py ├── nginx.conf └── shell.sh ├── common ├── CMakeLists.txt ├── Dockerfile ├── abr_hls_dash.py ├── build.sh ├── db.py ├── messaging.py ├── shell.sh ├── workload.py ├── zkdata.py └── zkstate.py ├── content-provider ├── .dockerignore ├── CMakeLists.txt ├── README.md ├── archive │ ├── .dockerignore │ ├── .gitignore │ ├── CMakeLists.txt │ ├── Dockerfile │ ├── build.sh │ ├── create_dash.sh │ ├── create_hls.sh │ └── shell.sh ├── frontend │ ├── .dockerignore │ ├── CMakeLists.txt │ ├── Dockerfile │ ├── build.sh │ ├── html │ │ ├── css │ │ │ ├── app.css │ │ │ ├── foundation.min.css │ │ │ └── scale.css │ │ ├── favicon.ico │ │ ├── header.shtml │ │ ├── icon │ │ │ ├── foundation-icons.css │ │ │ ├── foundation-icons.eot │ │ │ ├── foundation-icons.svg │ │ │ ├── foundation-icons.ttf │ │ │ └── foundation-icons.woff │ │ ├── image │ │ │ ├── Jack.jpg │ │ │ ├── Jenny.jpg │ │ │ ├── Jocelyn.jpg │ │ │ ├── John.jpg │ │ │ ├── Mike.jpg │ │ │ ├── Unknown.jpg │ │ │ ├── Victor.jpg │ │ │ ├── anger.png │ │ │ ├── happy.png │ │ │ ├── home-url.png │ │ │ ├── intel-logo-white-100.png │ │ │ ├── neutral.png │ │ │ ├── object_10_cow.png │ │ │ ├── object_11_diningtable.png │ │ │ ├── object_12_dog.png │ │ │ ├── object_13_horse.png │ │ │ ├── object_14_motorbike.png │ │ │ ├── object_15_person.png │ │ │ ├── object_16_pottedplant.png │ │ │ ├── object_17_sheep.png │ │ │ ├── object_18_sofa.png │ │ │ ├── object_19_train.png │ │ │ ├── object_1_aeroplane.png │ │ │ ├── object_1_face.png │ │ │ ├── object_20_tvmonitor.png │ │ │ ├── object_2_bicycle.png │ │ │ ├── object_3_bird.png │ │ │ ├── object_4_boat.png │ │ │ ├── object_5_bottle.png │ │ │ ├── object_6_bus.png │ │ │ ├── object_7_car.png │ │ │ ├── object_8_cat.png │ │ │ ├── object_9_chair.png │ │ │ ├── placeholder.jpg │ │ │ ├── sad.png │ │ │ ├── screen.png │ │ │ ├── surprise.png │ │ │ ├── vcac-a.gif │ │ │ └── xeon.png │ │ ├── index.html │ │ ├── js │ │ │ ├── app-api.js │ │ │ ├── app-debug.js │ │ │ ├── app-header.js │ │ │ ├── app-player.js │ │ │ ├── app-worker.js │ │ │ ├── app.js │ │ │ ├── scale.js │ │ │ └── vendor │ │ │ │ ├── Chart.min.js │ │ │ │ ├── chartjs-plugin-colorschemes.min.js │ │ │ │ ├── dash.all.min.js │ │ │ │ ├── foundation.min.js │ │ │ │ ├── hls.min.js │ │ │ │ ├── jquery-3.3.1.min.js │ │ │ │ ├── moment.min.js │ │ │ │ ├── shaka-player.compiled.js │ │ │ │ └── what-input.min.js │ │ ├── player.shtml │ │ └── scale.html │ ├── main.py │ ├── nginx.conf │ ├── playlist.py │ ├── schedule.py │ └── shell.sh └── transcode │ ├── CMakeLists.txt │ ├── Dockerfile │ ├── build.sh │ ├── main.py │ ├── shell.sh │ └── workload.py ├── deployment ├── CMakeLists.txt ├── certificate │ ├── .dockerignore │ ├── .gitignore │ ├── CMakeLists.txt │ ├── Dockerfile │ ├── build.sh │ ├── self-sign.sh │ └── shell.sh ├── docker-swarm │ ├── .gitignore │ ├── CMakeLists.txt │ ├── README.md │ ├── account.m4 │ ├── ad-content.m4 │ ├── ad-decision.m4 │ ├── ad-insertion.m4 │ ├── ad-transcode.m4 │ ├── analytics.m4 │ ├── build.sh │ ├── cdn.m4 │ ├── content-provider.m4 │ ├── content-transcode.m4 │ ├── database.m4 │ ├── docker-compose.yml.m4 │ ├── kafka.m4 │ ├── kafka2db.m4 │ ├── network.m4 │ ├── platform.m4 │ ├── secret.m4 │ ├── start.sh │ ├── stop.sh │ └── zookeeper.m4 └── kubernetes │ ├── .gitignore │ ├── CMakeLists.txt │ ├── README.md │ ├── ad-archive-pv.yaml.m4 │ ├── ad-cache-pv.yaml.m4 │ ├── ad-segment-pv.yaml.m4 │ ├── ad-static-pv.yaml.m4 │ ├── build.sh │ ├── helm │ ├── .gitignore │ ├── CMakeLists.txt │ ├── adi │ │ ├── Chart.yaml │ │ ├── README.md │ │ ├── templates │ │ │ ├── _helpers.tpl │ │ │ ├── account.yaml │ │ │ ├── ad-archive-pvc.yaml │ │ │ ├── ad-cache-pvc.yaml │ │ │ ├── ad-content.yaml │ │ │ ├── ad-decision.yaml │ │ │ ├── ad-insertion.yaml │ │ │ ├── ad-segment-pvc.yaml │ │ │ ├── ad-static-pvc.yaml │ │ │ ├── ad-transcode.yaml │ │ │ ├── analytics.yaml │ │ │ ├── cdn.yaml │ │ │ ├── content-provider.yaml │ │ │ ├── content-transcode.yaml │ │ │ ├── database.yaml │ │ │ ├── kafka.yaml │ │ │ ├── kafka2db.yaml │ │ │ ├── video-archive-pvc.yaml │ │ │ ├── video-cache-pvc.yaml │ │ │ └── zookeeper.yaml │ │ └── values.yaml.m4 │ ├── build.sh │ ├── start.sh │ └── stop.sh │ ├── mkvolume.sh │ ├── start.sh │ ├── stop.sh │ ├── video-archive-pv.yaml.m4 │ ├── video-cache-pv.yaml.m4 │ ├── volume-info.sh │ └── yaml │ ├── CMakeLists.txt │ ├── account.yaml.m4 │ ├── ad-archive-pvc.yaml.m4 │ ├── ad-cache-pvc.yaml.m4 │ ├── ad-content.yaml.m4 │ ├── ad-decision.yaml.m4 │ ├── ad-insertion.yaml.m4 │ ├── ad-segment-pvc.yaml.m4 │ ├── ad-static-pvc.yaml.m4 │ ├── ad-transcode.yaml.m4 │ ├── analytics.yaml.m4 │ ├── build.sh │ ├── cdn.yaml.m4 │ ├── content-provider.yaml.m4 │ ├── content-transcode.yaml.m4 │ ├── database.yaml.m4 │ ├── kafka.yaml.m4 │ ├── kafka2db.yaml.m4 │ ├── platform.m4 │ ├── start.sh │ ├── stop.sh │ ├── video-archive-pvc.yaml.m4 │ ├── video-cache-pvc.yaml.m4 │ └── zookeeper.yaml.m4 ├── doc ├── ad-insertion-sequence.png ├── ad-insertion-service-arch.png ├── adi-ui.gif ├── cmake.md ├── content-provider-arch.png ├── customize.md ├── dist.md ├── overall-arch.png ├── script.md └── vcac-a.md ├── script ├── build.sh ├── deployment.cmake ├── mk-dist.sh ├── scan-all.cmake ├── scan-yaml.awk ├── service.cmake ├── shell.sh ├── travis_build.sh └── update-image.sh └── volume ├── .gitignore ├── ad └── archive │ ├── car6.mp4 │ ├── catfood.mp4 │ └── travel6.mp4 └── video └── archive └── .gitignore /.gitignore: -------------------------------------------------------------------------------- 1 | build/* 2 | archive/* 3 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: bash 2 | dist: xenial 3 | services: docker 4 | 5 | addons: 6 | apt: 7 | packages: 8 | - docker-ce 9 | update: true 10 | 11 | env: 12 | - image=ad-content/ad-decision 13 | - image=account 14 | - image=common 15 | - image=ad-content/frontend 16 | - image=ad-content/archive 17 | - image=ad-insertion/frontend 18 | - image=ad-insertion/ad-static 19 | - image=ad-insertion/kafka2db 20 | - image=ad-insertion/ad-transcode 21 | - image=deployment 22 | - image=cdn 23 | - image=content-provider/transcode 24 | - image=content-provider/frontend 25 | - image=content-provider/archive 26 | 27 | before_install: 28 | - docker version 29 | 30 | script: 31 | - mkdir build && cd build && cmake .. 32 | - cd ${image} 33 | - make 34 | 35 | matrix: 36 | include: 37 | # ad-insertion/analytic-service 38 | - name: ad-insertion/analytics 39 | env: image=ad-insertion/analytics 40 | script: 41 | - mkdir build && cd build && cmake .. 42 | - travis_wait 180 ${TRAVIS_BUILD_DIR}/script/travis_build.sh ad-insertion/analytics 43 | after_failure: 44 | - tail --lines=5000 log_ad-insertion_analytics.log 45 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2019, Intel Corporation 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions are met: 8 | 9 | * Redistributions of source code must retain the above copyright notice, this 10 | list of conditions and the following disclaimer. 11 | 12 | * Redistributions in binary form must reproduce the above copyright notice, 13 | this list of conditions and the following disclaimer in the documentation 14 | and/or other materials provided with the distribution. 15 | 16 | * Neither the name of the copyright holder nor the names of its 17 | contributors may be used to endorse or promote products derived from 18 | this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | -------------------------------------------------------------------------------- /account/.dockerignore: -------------------------------------------------------------------------------- 1 | CMakeLists.txt 2 | *.sh 3 | README.md 4 | -------------------------------------------------------------------------------- /account/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | set(service "ssai_account_service") 2 | include("${CMAKE_SOURCE_DIR}/script/service.cmake") 3 | add_dependencies(build_${service} build_ssai_common) 4 | -------------------------------------------------------------------------------- /account/Dockerfile: -------------------------------------------------------------------------------- 1 | 2 | FROM ssai_common 3 | 4 | COPY *.py /home/ 5 | CMD ["/bin/bash","-c","/home/main.py"] 6 | WORKDIR /home 7 | EXPOSE 80 8 | 9 | #### 10 | ARG USER=docker 11 | ARG GROUP=docker 12 | ARG UID 13 | ARG GID 14 | ## must use ; here to ignore user exist status code 15 | RUN [ ${GID} -gt 0 ] && groupadd -f -g ${GID} ${GROUP}; \ 16 | [ ${UID} -gt 0 ] && useradd -d /home -g ${GID} -K UID_MAX=${UID} -K UID_MIN=${UID} ${USER}; \ 17 | chown -R ${UID}:${GID} /home 18 | USER ${UID} 19 | #### 20 | -------------------------------------------------------------------------------- /account/README.md: -------------------------------------------------------------------------------- 1 | The Account service implements a REST service that returns the user profile information such as movie subscription and AD preference. 2 | 3 | ### User: 4 | 5 | For simplicity, the implementation hard-coded a few users and their profile information as follows: 6 | 7 | | Username | Subscription | AD Preference | 8 | |:--------:|:------------:|:-------------:| 9 | | guest | basic | any | 10 | | 20 popular names | universal | sports or family randomly selected at startup | 11 | | others | universal | any | 12 | 13 | ### Subscription: 14 | 15 | The following subscription levels are supported: 16 | 17 | | Subscription | Description | 18 | |:------------:|-------------| 19 | | basic | Play three videos out of the video archive. | 20 | | universal | Play all videos in the video archive. | 21 | 22 | ### AD Preference: 23 | 24 | The supported AD preferences are as follows: 25 | 26 | | Preference | Description | 27 | |:----------:|-------------| 28 | | any | No preference. | 29 | | sports | Prefer to see sport related AD. | 30 | | family | Prefer to see family product AD. | 31 | 32 | ### Interface: 33 | 34 | The Account service exposes the following interface on port 8080: 35 | 36 | | Path | Description | 37 | |----|------| 38 | |GET /acct?name=\|The end point returns the user profile information. | 39 | -------------------------------------------------------------------------------- /account/acct.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | from tornado import web 4 | import random 5 | 6 | class AcctHandler(web.RequestHandler): 7 | def __init__(self, app, request, **kwargs): 8 | super(AcctHandler, self).__init__(app, request, **kwargs) 9 | self._users={ 10 | "guest": { 11 | "subscription": "basic", 12 | "ad-preference": "any", 13 | }, 14 | "default": { 15 | "subscription": "universal", 16 | "ad-preference": "any", 17 | }, 18 | } 19 | random.seed() 20 | for name in ["sophia","emma","isabella","olivia","ava","emily","abigail","mia","madison","elizabeth","sophia","emma","isabella","olivia","ava","emily","abigail","mia","madison","elizabeth"]: 21 | self._users[name]={ 22 | "subscription": "universal", 23 | "ad-preference": ["sports","family"][int(random.random())%2], 24 | } 25 | 26 | def check_origin(self, origin): 27 | return True 28 | 29 | def get(self): 30 | name = str(self.get_argument("name")).lower() 31 | if name not in self._users: name="default" 32 | self.set_status(200,"OK") 33 | self.write(self._users[name]) 34 | -------------------------------------------------------------------------------- /account/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | IMAGE="ssai_account_service" 4 | DIR=$(dirname $(readlink -f "$0")) 5 | 6 | . "$DIR/../script/build.sh" 7 | -------------------------------------------------------------------------------- /account/main.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | from tornado import ioloop, web 4 | from tornado.options import define, options, parse_command_line 5 | import acct 6 | 7 | app = web.Application([(r'/acct', acct.AcctHandler)]) 8 | 9 | if __name__ == "__main__": 10 | define("port", default=8080, help="the binding port", type=int) 11 | define("ip", default="0.0.0.0", help="the binding ip") 12 | parse_command_line() 13 | print("Listening to " + options.ip + ":" + str(options.port)) 14 | app.listen(options.port, address=options.ip) 15 | ioloop.IOLoop.instance().start() 16 | -------------------------------------------------------------------------------- /account/shell.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | IMAGE="ssai_account_service" 4 | DIR=$(dirname $(readlink -f "$0")) 5 | 6 | . "$DIR/../script/shell.sh" 7 | -------------------------------------------------------------------------------- /ad-content/.dockerignore: -------------------------------------------------------------------------------- 1 | CMakeLists.txt 2 | README.md 3 | *.sh 4 | -------------------------------------------------------------------------------- /ad-content/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | include("${CMAKE_SOURCE_DIR}/script/scan-all.cmake") -------------------------------------------------------------------------------- /ad-content/README.md: -------------------------------------------------------------------------------- 1 | The AD Content service archives the AD videos and serves them upon request. 2 | 3 | ### Interface: 4 | 5 | The AD Content service exposes the following interface(s) on 8080: 6 | 7 | | Path | Description | 8 | |----|------| 9 | |GET/| Return the AD video content. | 10 | |GET/inventory| Returns the inventory. | 11 | |GET/adstats | Return a stat page that shows the statistics of AD clips played. | 12 | |POST/adstats | Submit an AD viewing report, in the request body. | 13 | 14 | e.g. report to post 15 | {"uri":"cat.mp4", "clicked":1, "watched":1} 16 | 17 | -------------------------------------------------------------------------------- /ad-content/ad-decision/.dockerignore: -------------------------------------------------------------------------------- 1 | CMakeLists.txt 2 | README.md 3 | *.sh 4 | -------------------------------------------------------------------------------- /ad-content/ad-decision/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | set(service "ssai_ad_decision_frontend") 2 | include("${CMAKE_SOURCE_DIR}/script/service.cmake") 3 | add_dependencies(build_${service} build_ssai_common) 4 | -------------------------------------------------------------------------------- /ad-content/ad-decision/Dockerfile: -------------------------------------------------------------------------------- 1 | 2 | FROM ssai_common 3 | 4 | COPY *.py /home/ 5 | COPY *.conf /etc/nginx/ 6 | CMD ["/bin/bash","-c","/home/main.py&/usr/sbin/nginx"] 7 | WORKDIR /home 8 | 9 | #### 10 | ARG USER=docker 11 | ARG GROUP=docker 12 | ARG UID 13 | ARG GID 14 | ## must use ; here to ignore user exist status code 15 | RUN [ ${GID} -gt 0 ] && groupadd -f -g ${GID} ${GROUP}; \ 16 | [ ${UID} -gt 0 ] && useradd -d /home -g ${GID} -K UID_MAX=${UID} -K UID_MIN=${UID} ${USER}; \ 17 | touch /var/run/nginx.pid && \ 18 | mkdir -p /var/log/nginx /var/lib/nginx /var/www/cache && \ 19 | chown -R ${UID}:${GID} /home /var/run/nginx.pid /var/www /var/log/nginx /var/lib/nginx 20 | USER ${UID} 21 | #### 22 | -------------------------------------------------------------------------------- /ad-content/ad-decision/README.md: -------------------------------------------------------------------------------- 1 | 2 | The AD Decision Service 3 | 4 | The service makes decision on what AD to show in the next AD break, and returns the AD URL. The decision is based on combination of user AD preference and available cues, results from analyzing the video content. 5 | 6 | ### Interface: 7 | 8 | The AD Decision service exposes the following interface(s) on port 8080: 9 | 10 | | Path | Description | 11 | |----|------| 12 | |Get /metadata | return the reponse template. | 13 | |POST /metadata | Submit the list of meta data (in the request body): {"metadata":[],"user":{"name":"","keywords":[]}}. Return the AD URL. | 14 | 15 | -------------------------------------------------------------------------------- /ad-content/ad-decision/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | IMAGE="ssai_ad_decision_frontend" 4 | DIR=$(dirname $(readlink -f "$0")) 5 | 6 | . "$DIR/../../script/build.sh" 7 | -------------------------------------------------------------------------------- /ad-content/ad-decision/main.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | from tornado import ioloop, web 4 | from tornado.options import define, options, parse_command_line 5 | from metadata import MetaDataHandler 6 | 7 | app = web.Application([ 8 | (r'/metadata',MetaDataHandler), 9 | ]) 10 | 11 | if __name__ == "__main__": 12 | define("port", default=2222, help="the binding port", type=int) 13 | define("ip", default="127.0.0.1", help="the binding ip") 14 | parse_command_line() 15 | print("Listening to " + options.ip + ":" + str(options.port)) 16 | app.listen(options.port, address=options.ip) 17 | ioloop.IOLoop.instance().start() 18 | 19 | -------------------------------------------------------------------------------- /ad-content/ad-decision/nginx.conf: -------------------------------------------------------------------------------- 1 | 2 | worker_processes 4; 3 | worker_rlimit_nofile 2048; 4 | daemon off; 5 | 6 | events { 7 | worker_connections 1024; 8 | } 9 | 10 | http { 11 | include mime.types; 12 | default_type application/octet-stream; 13 | 14 | server { 15 | listen 8080; 16 | server_name _; 17 | 18 | location /metadata { 19 | rewrite ^(/metadata.*) $1 break; 20 | proxy_pass http://localhost:2222; 21 | } 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /ad-content/ad-decision/shell.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | IMAGE="ssai_ad_decision_frontend" 4 | DIR=$(dirname $(readlink -f "$0")) 5 | 6 | . "$DIR/../../script/shell.sh" 7 | -------------------------------------------------------------------------------- /ad-content/archive/.dockerignore: -------------------------------------------------------------------------------- 1 | CMakeLists.txt 2 | *.mp4 3 | *.sh -------------------------------------------------------------------------------- /ad-content/archive/.gitignore: -------------------------------------------------------------------------------- 1 | *.mp4 -------------------------------------------------------------------------------- /ad-content/archive/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | set(service "ssai_ad_content_archive") 2 | include("${CMAKE_SOURCE_DIR}/script/service.cmake") -------------------------------------------------------------------------------- /ad-content/archive/Dockerfile: -------------------------------------------------------------------------------- 1 | 2 | FROM ubuntu:18.04 3 | RUN apt-get update && apt-get install -y -q youtube-dl && rm -rf /var/lib/apt/lists/* 4 | 5 | #### 6 | ARG USER=docker 7 | ARG GROUP=docker 8 | ARG UID 9 | ARG GID 10 | ## must use ; here to ignore user exist status code 11 | RUN [ ${GID} -gt 0 ] && groupadd -f -g ${GID} ${GROUP}; \ 12 | [ ${UID} -gt 0 ] && useradd -d /home -g ${GID} -K UID_MAX=${UID} -K UID_MIN=${UID} ${USER}; \ 13 | chown -R ${UID}:${GID} /home 14 | USER ${UID} 15 | #### 16 | 17 | -------------------------------------------------------------------------------- /ad-content/archive/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | DIR=$(dirname $(readlink -f "$0")) 4 | clips=() 5 | 6 | case "$(cat /proc/1/sched | head -n 1)" in 7 | *build.sh*) 8 | cd /mnt 9 | for clip in "${clips[@]}"; do 10 | clip_name="${clip/*=/}.mp4" 11 | if test ! -f "$clip_name"; then 12 | youtube-dl -f 'mp4[width=1920]/mp4[width=1280]/mp4[width=640]/mp4[width=480]' -o "$clip_name" "$clip" 13 | fi 14 | done 15 | ;; 16 | *) 17 | download="false" 18 | mkdir -p "${DIR}/../../volume/ad/archive" 19 | for clip in "${clips[@]}"; do 20 | if test ! -f "${DIR}/../../volume/ad/archive/${clip/*=/}.mp4"; then 21 | download="true" 22 | fi 23 | done 24 | 25 | IMAGE="ssai_ad_content_archive" 26 | . "$DIR/../../script/build.sh" 27 | if test "$download" = "true"; then 28 | . "$DIR/shell.sh" /home/build.sh 29 | fi 30 | ;; 31 | esac 32 | -------------------------------------------------------------------------------- /ad-content/archive/shell.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | IMAGE="ssai_ad_content_archive" 4 | DIR=$(dirname $(readlink -f "$0")) 5 | OPTIONS=("--volume=$DIR/../../volume/ad/archive:/mnt:rw" "--volume=$DIR:/home:ro") 6 | 7 | . "$DIR/../../script/shell.sh" 8 | -------------------------------------------------------------------------------- /ad-content/frontend/.dockerignore: -------------------------------------------------------------------------------- 1 | CMakeLists.txt 2 | *.sh -------------------------------------------------------------------------------- /ad-content/frontend/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | set(service "ssai_ad_content_frontend") 2 | include("${CMAKE_SOURCE_DIR}/script/service.cmake") 3 | add_dependencies(build_${service} build_ssai_common) 4 | -------------------------------------------------------------------------------- /ad-content/frontend/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ssai_common 2 | 3 | COPY *.py /home/ 4 | COPY inventory.json /home/ 5 | COPY *.conf /etc/nginx/ 6 | CMD ["/bin/bash","-c","/home/main.py&/usr/sbin/nginx"] 7 | VOLUME ["/var/www/archive"] 8 | WORKDIR /home 9 | EXPOSE 8080 10 | 11 | #### 12 | ARG USER=docker 13 | ARG GROUP=docker 14 | ARG UID 15 | ARG GID 16 | ## must use ; here to ignore user exist status code 17 | RUN [ ${GID} -gt 0 ] && groupadd -f -g ${GID} ${GROUP}; \ 18 | [ ${UID} -gt 0 ] && useradd -d /home -g ${GID} -K UID_MAX=${UID} -K UID_MIN=${UID} ${USER}; \ 19 | touch /var/run/nginx.pid && \ 20 | mkdir -p /var/log/nginx /var/lib/nginx /var/www/cache && \ 21 | chown -R ${UID}:${GID} /home /var/run/nginx.pid /var/www /var/log/nginx /var/lib/nginx 22 | USER ${UID} 23 | #### 24 | 25 | -------------------------------------------------------------------------------- /ad-content/frontend/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | IMAGE="ssai_ad_content_frontend" 4 | DIR=$(dirname $(readlink -f "$0")) 5 | 6 | . "$DIR/../../script/build.sh" 7 | -------------------------------------------------------------------------------- /ad-content/frontend/inventory.json: -------------------------------------------------------------------------------- 1 | [ 2 | {"uri" : "http://ad-content-service:8080/car6.mp4", 3 | "navigation_url" : "http://www.intel.com", 4 | "keywords" : ["car", "sports", "motorbike", "bicycle", "anger", "sad", "surprise"], 5 | "duration" : "5" 6 | }, 7 | {"uri" : "http://ad-content-service:8080/travel6.mp4", 8 | "navigation_url" : "http://www.intel.com", 9 | "keywords" : ["person", "diningtable", "sofa", "train", "boat", "aeroplane", "face", "tvmonitor", "chair", "happy", "neutral", "bus"], 10 | "duration" : "5" 11 | }, 12 | {"uri" : "http://ad-content-service:8080/catfood.mp4", 13 | "navigation_url" : "http://www.intel.com", 14 | "keywords" : ["bottle","cat", "pets", "cow", "dog", "horse", "pottedplant", "sheep", "bird"], 15 | "duration" : "5" 16 | } 17 | ] 18 | -------------------------------------------------------------------------------- /ad-content/frontend/inventory.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | from tornado import web, gen 4 | from tornado.httpclient import AsyncHTTPClient 5 | from os import listdir 6 | import json 7 | 8 | archive_root="/var/www/archive" 9 | 10 | class InventoryHandler(web.RequestHandler): 11 | def __init__(self, app, request, **kwargs): 12 | super(InventoryHandler, self).__init__(app, request, **kwargs) 13 | inventory = [] 14 | with open('/home/inventory.json') as f: 15 | inventory = json.loads(str(f.read())) 16 | self._inventory=json.dumps(inventory) 17 | 18 | def check_origin(self, origin): 19 | return True 20 | 21 | @gen.coroutine 22 | def get(self): 23 | self.set_status(200,"OK") 24 | self.set_header("Content-Type", "application/json") 25 | self.write(self._inventory) 26 | -------------------------------------------------------------------------------- /ad-content/frontend/main.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | from tornado import ioloop, web 4 | from tornado.options import define, options, parse_command_line 5 | from inventory import InventoryHandler 6 | from adstats import AdStatsHandler 7 | 8 | app = web.Application([ 9 | (r'/inventory',InventoryHandler), 10 | (r'/adstats',AdStatsHandler), 11 | ]) 12 | 13 | if __name__ == "__main__": 14 | define("port", default=2222, help="the binding port", type=int) 15 | define("ip", default="127.0.0.1", help="the binding ip") 16 | parse_command_line() 17 | print("Listening to " + options.ip + ":" + str(options.port)) 18 | app.listen(options.port, address=options.ip) 19 | ioloop.IOLoop.instance().start() 20 | -------------------------------------------------------------------------------- /ad-content/frontend/nginx.conf: -------------------------------------------------------------------------------- 1 | worker_processes 5; 2 | worker_rlimit_nofile 8192; 3 | daemon off; 4 | 5 | events { 6 | worker_connections 4096; 7 | } 8 | 9 | http { 10 | include mime.types; 11 | default_type application/octet-stream; 12 | 13 | server { 14 | listen 8080; 15 | server_name _; 16 | 17 | location / { 18 | root /var/www/archive/; 19 | add_header 'Access-Control-Allow-Origin' '*' always; 20 | } 21 | 22 | location /inventory { 23 | rewrite ^(/inventory.*) $1 break; 24 | proxy_pass http://localhost:2222; 25 | } 26 | 27 | location /adstats { 28 | rewrite ^(/adstats.*) $1 break; 29 | proxy_pass http://localhost:2222; 30 | } 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /ad-content/frontend/shell.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | IMAGE="ssai_ad_content_frontend" 4 | DIR=$(dirname $(readlink -f "$0")) 5 | 6 | . "$DIR/../script/shell.sh" 7 | -------------------------------------------------------------------------------- /ad-insertion/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | include("${CMAKE_SOURCE_DIR}/script/scan-all.cmake") 2 | -------------------------------------------------------------------------------- /ad-insertion/ad-segment/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | set(service "ssai_ad_content_segment") 2 | add_custom_target(addash "${CMAKE_CURRENT_SOURCE_DIR}/build.sh" "dash" "${MINRESOLUTION}" DEPENDS build_${service}) 3 | add_custom_target(adhls "${CMAKE_CURRENT_SOURCE_DIR}/build.sh" "hls" "${MINRESOLUTION}" DEPENDS build_${service}) 4 | include("${CMAKE_SOURCE_DIR}/script/service.cmake") 5 | add_dependencies(build_${service} build_ssai_common) 6 | -------------------------------------------------------------------------------- /ad-insertion/ad-segment/Dockerfile: -------------------------------------------------------------------------------- 1 | 2 | FROM openvisualcloud/xeon-ubuntu1804-media-ffmpeg:20.7 3 | RUN apt-get update && apt-get install -y -q bc && rm -rf /var/lib/apt/lists/*; 4 | 5 | #### 6 | ARG USER=docker 7 | ARG GROUP=docker 8 | ARG UID 9 | ARG GID 10 | ## must use ; here to ignore user exist status code 11 | RUN [ ${GID} -gt 0 ] && groupadd -f -g ${GID} ${GROUP}; \ 12 | [ ${UID} -gt 0 ] && useradd -d /home -g ${GID} -K UID_MAX=${UID} -K UID_MIN=${UID} ${USER}; \ 13 | chown -R ${UID}:${GID} /home 14 | USER ${UID} 15 | #### 16 | 17 | -------------------------------------------------------------------------------- /ad-insertion/ad-segment/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | IMAGE="ssai_ad_content_segment" 4 | DIR=$(dirname $(readlink -f "$0")) 5 | clips=() 6 | 7 | case "$(cat /proc/1/sched | head -n 1)" in 8 | *build.sh*) 9 | cd /mnt 10 | mkdir -p segment/archive segment/dash segment/hls 11 | for clip in `find archive -name "*.mp4" -print`; do 12 | clip_name="${clip/*\//}" 13 | #echo $clip_name 14 | if test ! -f "segment/archive/$clip_name"; then 15 | ffmpeg -i "archive/$clip_name" -vf "scale=1920:960,pad=1920:1080:0:60:black,drawtext=text='Server-Side AD Insertion':x=(w-text_w)/2:y=(h-text_h)/3:fontsize=100:fontcolor=white:fontfile=/usr/share/fonts/truetype/dejavu/DejaVuSans.ttf" -y "segment/archive/$clip_name" 16 | fi 17 | if test "$1" == "dash"; then 18 | /home/create_dash.sh "$clip_name" ${2} & 19 | fi 20 | if test "$1" == "hls"; then 21 | /home/create_hls.sh "$clip_name" ${2} & 22 | fi 23 | done 24 | wait 25 | ;; 26 | *) 27 | mkdir -p "$DIR/../../volume/ad/archive" 28 | mkdir -p "$DIR/../../volume/ad/segment/archive" 29 | mkdir -p "$DIR/../../volume/ad/segment/hls" 30 | mkdir -p "$DIR/../../volume/ad/segment/dash" 31 | . "$DIR/../../script/build.sh" 32 | . "$DIR/shell.sh" /home/build.sh $@ 33 | ;; 34 | esac 35 | 36 | -------------------------------------------------------------------------------- /ad-insertion/ad-segment/shell.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | IMAGE="ssai_ad_content_segment" 4 | DIR=$(dirname $(readlink -f "$0")) 5 | OPTIONS=("--volume=$DIR/../../volume/ad:/mnt:rw" "--volume=$DIR:/home:ro") 6 | 7 | . "$DIR/../../script/shell.sh" 8 | -------------------------------------------------------------------------------- /ad-insertion/ad-static/.dockerignore: -------------------------------------------------------------------------------- 1 | CMakeLists.txt 2 | archive/* 3 | -------------------------------------------------------------------------------- /ad-insertion/ad-static/.gitignore: -------------------------------------------------------------------------------- 1 | archive/* 2 | -------------------------------------------------------------------------------- /ad-insertion/ad-static/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | set(service "ssai_ad_insertion_ad_static") 2 | add_custom_target(adstatic "${CMAKE_CURRENT_SOURCE_DIR}/build.sh" "adstatic" "${MINRESOLUTION}" DEPENDS build_${service}) 3 | include("${CMAKE_SOURCE_DIR}/script/service.cmake") 4 | -------------------------------------------------------------------------------- /ad-insertion/ad-static/Dockerfile: -------------------------------------------------------------------------------- 1 | 2 | FROM openvisualcloud/xeon-ubuntu1804-media-ffmpeg:20.7 3 | 4 | #### 5 | ARG USER=docker 6 | ARG GROUP=docker 7 | ARG UID 8 | ARG GID 9 | ## must use ; here to ignore user exist status code 10 | RUN [ ${GID} -gt 0 ] && groupadd -f -g ${GID} ${GROUP}; \ 11 | [ ${UID} -gt 0 ] && useradd -d /home -g ${GID} -K UID_MAX=${UID} -K UID_MIN=${UID} ${USER}; \ 12 | chown -R ${UID}:${GID} /home 13 | USER ${UID} 14 | #### 15 | 16 | -------------------------------------------------------------------------------- /ad-insertion/ad-static/shell.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | IMAGE="ssai_ad_insertion_ad_static" 4 | DIR=$(dirname $(readlink -f "$0")) 5 | OPTIONS=("--volume=$DIR/../../volume/ad/static:/mnt:rw" "--volume=$DIR:/home:ro") 6 | 7 | . "$DIR/../../script/shell.sh" 8 | -------------------------------------------------------------------------------- /ad-insertion/ad-transcode/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | set(service "ssai_ad_transcode") 2 | include("${CMAKE_SOURCE_DIR}/script/service.cmake") 3 | add_dependencies(build_${service} build_ssai_common) 4 | -------------------------------------------------------------------------------- /ad-insertion/ad-transcode/Dockerfile: -------------------------------------------------------------------------------- 1 | 2 | FROM openvisualcloud/xeon-ubuntu1804-media-ffmpeg:20.7 3 | 4 | RUN DEBIAN_FRONTEND=noninteractive apt-get update && apt-get install -y -q --no-install-recommends nginx python3-tornado python3-urllib3 python3-requests python3-psutil python3-pip && rm -rf /var/lib/apt/lists/* && \ 5 | pip3 install 'kafka-python>=1.4.7' 'kazoo>=2.6.1' 6 | 7 | COPY --from=ssai_common /home/*.py /home/ 8 | COPY *.py /home/ 9 | CMD ["/bin/bash","-c","/home/main.py"] 10 | WORKDIR /home 11 | 12 | #### 13 | ARG USER=docker 14 | ARG GROUP=docker 15 | ARG UID 16 | ARG GID 17 | ## must use ; here to ignore user exist status code 18 | RUN [ ${GID} -gt 0 ] && groupadd -f -g ${GID} ${GROUP}; \ 19 | [ ${UID} -gt 0 ] && useradd -d /home -g ${GID} -K UID_MAX=${UID} -K UID_MIN=${UID} ${USER}; \ 20 | touch /var/run/nginx.pid && \ 21 | mkdir -p /var/log/nginx /var/lib/nginx /var/www/cache && \ 22 | chown -R ${UID}:${GID} /home /var/run/nginx.pid /var/www /var/log/nginx /var/lib/nginx 23 | USER ${UID} 24 | #### 25 | 26 | -------------------------------------------------------------------------------- /ad-insertion/ad-transcode/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | IMAGE="ssai_ad_transcode" 4 | DIR=$(dirname $(readlink -f "$0")) 5 | 6 | . "$DIR/../../script/build.sh" 7 | -------------------------------------------------------------------------------- /ad-insertion/ad-transcode/main.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | from messaging import Consumer 4 | from process import ADTranscode 5 | from db import DataBase 6 | import traceback 7 | import time 8 | 9 | kafka_topic="ad_transcode_sched" 10 | kafka_group="ad_transcode_creator" 11 | 12 | db = DataBase() 13 | consumer = Consumer(kafka_group) 14 | 15 | while True: 16 | try: 17 | print("ad transcode service: listening to messages", flush=True) 18 | for msg in consumer.messages(kafka_topic): 19 | ADTranscode(msg,db) 20 | except Exception as e: 21 | print(traceback.format_exc(), flush=True) 22 | time.sleep(10) 23 | 24 | consumer.close() 25 | db.close() 26 | -------------------------------------------------------------------------------- /ad-insertion/ad-transcode/shell.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | IMAGE="ssai_ad_transcode" 4 | DIR=$(dirname $(readlink -f "$0")) 5 | OPTIONS=("--volume=${DIR}/../../volume/ad/cache/dash:/var/www/adinsert/dash:ro" "--volume=${DIR}/../../volume/ad/cache/hls:/var/www/adinsert/hls:ro" "--volume=${DIR}/../../volume/ad/static:/var/www/skipped:ro" "--volume=${DIR}/../../volume/ad/segment/dash:/var/www/adinsert/segment/dash:ro" "--volume=${DIR}/../../volume/ad/segment/hls:/var/www/adinsert/segment/hls:ro") 6 | 7 | . "$DIR/../../script/shell.sh" 8 | -------------------------------------------------------------------------------- /ad-insertion/ad-transcode/workload.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | from messaging import Producer 4 | import socket 5 | import datetime 6 | import psutil 7 | import time 8 | import json 9 | import sys 10 | 11 | kafka_topic="workloads" 12 | 13 | if __name__ == "__main__": 14 | prefix=""; 15 | if len(sys.argv)>1: prefix=sys.argv[1] 16 | instance=socket.gethostname()[0:3] 17 | machine=prefix+instance 18 | 19 | while True: 20 | try: 21 | p=Producer() 22 | while True: 23 | p.send(kafka_topic,json.dumps({ 24 | "time":datetime.datetime.utcnow().isoformat(), 25 | "machine": machine, 26 | "workload": psutil.cpu_percent(), 27 | })); 28 | time.sleep(1); 29 | p.close() 30 | except Exception as e: 31 | print(str(e)) 32 | time.sleep(2) 33 | -------------------------------------------------------------------------------- /ad-insertion/analytics/.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__/ 2 | -------------------------------------------------------------------------------- /ad-insertion/analytics/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | set(service "ssai_analytics_service") 2 | include("${CMAKE_SOURCE_DIR}/script/service.cmake") 3 | add_dependencies(build_${service} build_ssai_common) 4 | -------------------------------------------------------------------------------- /ad-insertion/analytics/README.md: -------------------------------------------------------------------------------- 1 | 2 | The analytics-service analyzes the video stream segments utilizing the Open Visual Cloud media analytics software stack: 3 | 4 | - [FFmpeg media analytics stack for the Intel Scalable Processor platform](https://github.com/OpenVisualCloud/Dockerfiles/tree/master/Xeon/ubuntu-18.04/analytics/ffmpeg) 5 | - [GStreamer media analytics stack for the Intel Scalable Processor platform](https://github.com/OpenVisualCloud/Dockerfiles/tree/master/Xeon/ubuntu-18.04/analytics/gst) 6 | - [FFmpeg media analytics stack for Intel VCAC-A](https://github.com/OpenVisualCloud/Dockerfiles/tree/master/VCAC-A/ubuntu-18.04/analytics/ffmpeg) 7 | - [GStreamer media analytics stack for Intel VCAC-A](https://github.com/OpenVisualCloud/Dockerfiles/tree/master/VCAC-A/ubuntu-18.04/analytics/gst) 8 | 9 | -------------------------------------------------------------------------------- /ad-insertion/analytics/VCAC-A/Dockerfile.5.launcher.vcac-a: -------------------------------------------------------------------------------- 1 | # vcac-container-launcher 2 | 3 | FROM docker:stable 4 | COPY VCAC-A/run-container.sh /usr/local/bin 5 | ENTRYPOINT ["/usr/local/bin/run-container.sh"] 6 | 7 | -------------------------------------------------------------------------------- /ad-insertion/analytics/VCAC-A/ffmpeg/Dockerfile: -------------------------------------------------------------------------------- 1 | # ssai_analytics_ffmpeg_vcac-a 2 | 3 | FROM centos:7.6.1810 as build 4 | 5 | ARG VA_SERVING_REPO=https://raw.githubusercontent.com/intel/video-analytics-serving 6 | ARG VA_SERVING_TAG="v0.3.0-alpha" 7 | 8 | RUN mkdir -p /home/vaserving/common/utils && touch /home/vaserving/__init__.py /home/vaserving/common/__init__.py /home/vaserving/common/utils/__init__.py && for x in common/utils/logging.py common/settings.py arguments.py ffmpeg_pipeline.py gstreamer_pipeline.py model_manager.py pipeline.py pipeline_manager.py schema.py vaserving.py; do curl -o /home/vaserving/$x -L ${VA_SERVING_REPO}/${VA_SERVING_TAG}/vaserving/$x; done 9 | 10 | COPY models/ /home/models/ 11 | COPY gallery/ /home/gallery/ 12 | COPY VCAC-A/ffmpeg/pipelines/ /home/pipelines/ 13 | COPY *.py /home/ 14 | COPY --from=ssai_common /home/*.py /home/ 15 | 16 | FROM openvisualcloud/vcaca-ubuntu1804-analytics-ffmpeg:20.7 17 | 18 | # Fetch python3 and Install python3 19 | RUN DEBIAN_FRONTEND=noninteractive apt-get update && apt-get install -y -q --no-install-recommends libgirepository-1.0-1 libsoup2.4.1 libjson-c3 python3-jsonschema python3-gi python3-requests python3-tornado python3-pip python3-setuptools python3-wheel && rm -rf /var/lib/apt/lists/* && \ 20 | pip3 install 'kafka-python>=1.4.7' 'kazoo>=2.6.1' 21 | 22 | COPY --from=build /home/ /home/ 23 | ENV FRAMEWORK=ffmpeg 24 | WORKDIR /home 25 | CMD ["/home/analyze.py"] 26 | 27 | #### 28 | ARG USER=docker 29 | ARG GROUP=docker 30 | ARG UID 31 | ARG GID 32 | ## must use ; here to ignore user exist status code 33 | RUN [ ${GID} -gt 0 ] && groupadd -f -g ${GID} ${GROUP}; \ 34 | [ ${UID} -gt 0 ] && useradd -d /home/${USER} -g ${GID} -K UID_MAX=${UID} -K UID_MIN=${UID} ${USER}; \ 35 | chown -R ${UID}:${GID} /home 36 | USER ${UID} 37 | #### 38 | -------------------------------------------------------------------------------- /ad-insertion/analytics/VCAC-A/ffmpeg/pipelines/emotion_recognition/1/pipeline.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "emotion_recognition", 3 | "version": 1, 4 | "type": "FFmpeg", 5 | "template": [ 6 | "-flags unaligned -hwaccel vaapi -hwaccel_output_format vaapi", 7 | " -hwaccel_device /dev/dri/renderD128 -i \"{source[uri]}\" ", 8 | "-vf \"detect=model={models[face_detection_retail][1][network]}", 9 | ":model_proc=\"{models[face_detection_retail][1][proc]}\"", 10 | ":interval=\"{parameters[every-nth-frame]}\":device=HDDL:nireq=\"{parameters[nireq]}\",", 11 | "classify=model=\"{models[emotion_recognition][1][network]}\"", 12 | ":model_proc=\"{models[emotion_recognition][1][proc]}\":device=HDDL,", 13 | "metaconvert\"", 14 | " -an -y -f metapublish" 15 | ], 16 | "description": "Emotion Recognition", 17 | "parameters": { 18 | "type": "object", 19 | "properties": { 20 | "inference-interval": { 21 | "element": "detection", 22 | "type": "integer", 23 | "minimum": 0, 24 | "maximum": 4294967295, 25 | "default": 1 26 | }, 27 | "nireq": { 28 | "element": "detection", 29 | "type": "integer", 30 | "minimum": 1, 31 | "maximum": 64, 32 | "default": 2 33 | } 34 | } 35 | } 36 | } -------------------------------------------------------------------------------- /ad-insertion/analytics/VCAC-A/ffmpeg/pipelines/face_recognition/1/pipeline.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "face_recognition", 3 | "version": 1, 4 | "type": "FFmpeg", 5 | "template": [ 6 | "-flags unaligned -hwaccel vaapi -hwaccel_output_format vaapi", 7 | " -hwaccel_device /dev/dri/renderD128 -i \"{source[uri]}\" ", 8 | "-vf \"detect=model={models[face_detection_retail][1][network]}", 9 | ":model_proc=\"{models[face_detection_retail][1][proc]}\"", 10 | ":interval=\"{parameters[inference-interval]}\":device=HDDL:nireq=\"{parameters[nireq]}\",", 11 | "classify=model=\"{models[face_reidentification][1][network]}\":model_proc=\"{models[face_reidentification][1][proc]}\":device=HDDL,", 12 | "identify=gallery=\"/home/gallery/face_gallery_FP16/gallery.json\",metaconvert\"", 13 | " -y -an -f metapublish" 14 | ], 15 | "description": "Face Recognition", 16 | "parameters": { 17 | "type": "object", 18 | "properties": { 19 | "inference-interval": { 20 | "element": "detection", 21 | "type": "integer", 22 | "minimum": 0, 23 | "maximum": 4294967295, 24 | "default": 1 25 | }, 26 | "nireq": { 27 | "element": "detection", 28 | "type": "integer", 29 | "minimum": 1, 30 | "maximum": 64, 31 | "default": 2 32 | } 33 | } 34 | } 35 | } -------------------------------------------------------------------------------- /ad-insertion/analytics/VCAC-A/ffmpeg/pipelines/object_detection/1/pipeline.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "object_detection", 3 | "version": 1, 4 | "type": "FFmpeg", 5 | "template": [ 6 | "-flags unaligned -hwaccel vaapi -hwaccel_output_format vaapi -hwaccel_device /dev/dri/renderD128", 7 | " -i \"{source[uri]}\" -vf \"detect=model={models[object_detection][1][network]}", 8 | ":device=HDDL:model_proc=\"{models[object_detection][1][proc]}\":interval={parameters[inference-interval]}", 9 | ":nireq={parameters[nireq]},metaconvert\"", 10 | " -an -y -f metapublish" 11 | ], 12 | "description": "Object Detection", 13 | "parameters": { 14 | "type": "object", 15 | "properties": { 16 | "inference-interval": { 17 | "element": "detection", 18 | "type": "integer", 19 | "minimum": 0, 20 | "maximum": 4294967295, 21 | "default": 6 22 | }, 23 | "nireq": { 24 | "element": "detection", 25 | "type": "integer", 26 | "minimum": 1, 27 | "maximum": 64, 28 | "default": 2 29 | } 30 | } 31 | } 32 | } -------------------------------------------------------------------------------- /ad-insertion/analytics/VCAC-A/gst/pipelines/emotion_recognition/1/pipeline.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "emotion_recognition", 3 | "version": 1, 4 | "type": "GStreamer", 5 | "template":"urisourcebin uri=\"{source[uri]}\" ! concat name=c ! decodebin ! video/x-raw,width=300,height=300 ! videoconvert name=\"videoconvert\" ! gvadetect model-instance-id=det0 model=\"{models[face_detection_retail][1][network]}\" model-proc=\"{models[face_detection_retail][1][proc]}\" name=\"detection\" ! queue ! gvaclassify model-instance-id=cls0 model=\"{models[emotion_recognition][1][network]}\" model-proc=\"{models[emotion_recognition][1][proc]}\" name=\"classification\" ! queue ! gvametaconvert source=\"{source[uri]}\" name=\"metaconvert\" ! queue ! gvametapublish method=kafka name=\"destination\" ! appsink name=appsink", 6 | "description": "Emotion Recognition Pipeline", 7 | "parameters": { 8 | "type" : "object", 9 | "properties" : { 10 | "inference-interval": { 11 | "element":"detection", 12 | "type": "integer", 13 | "minimum": 0, 14 | "maximum": 4294967295 15 | }, 16 | "cpu-throughput-streams": { 17 | "element":"detection", 18 | "type": "string" 19 | }, 20 | "n-threads": { 21 | "element":"videoconvert", 22 | "type": "integer" 23 | }, 24 | "nireq": { 25 | "element":"detection", 26 | "type": "integer", 27 | "minimum": 1, 28 | "maximum": 64, 29 | "default": 6 30 | }, 31 | "device": { 32 | "element":[{"name":"detection","property":"device"},{"name":"classification","property":"device"}], 33 | "default":"HDDL", 34 | "type":"string" 35 | } 36 | } 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /ad-insertion/analytics/VCAC-A/gst/pipelines/object_detection/1/pipeline.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "object_detection", 3 | "version": 1, 4 | "type": "GStreamer", 5 | "template":"urisourcebin uri=\"{source[uri]}\" ! concat name=c ! decodebin ! video/x-raw,width=300,height=300 ! videoconvert name=\"videoconvert\" ! gvadetect model-instance-id=det2 model=\"{models[object_detection][1][network]}\" model-proc=\"{models[object_detection][1][proc]}\" name=\"detection\" ! gvametaconvert source=\"{source[uri]}\" name=\"metaconvert\" ! queue ! gvametapublish method=kafka name=\"destination\" ! appsink name=appsink", 6 | "description": "Object Detection Pipeline", 7 | "parameters": { 8 | "type" : "object", 9 | "properties" : { 10 | "inference-interval": { 11 | "element":"detection", 12 | "type": "integer", 13 | "minimum": 0, 14 | "maximum": 4294967295 15 | }, 16 | "cpu-throughput-streams": { 17 | "element":"detection", 18 | "type": "string" 19 | }, 20 | "n-threads": { 21 | "element":"videoconvert", 22 | "type": "integer" 23 | }, 24 | "nireq": { 25 | "element":"detection", 26 | "type": "integer", 27 | "minimum": 1, 28 | "maximum": 64, 29 | "default": 6 30 | }, 31 | "device": { 32 | "element":"detection", 33 | "default":"HDDL", 34 | "type":"string" 35 | } 36 | } 37 | } 38 | } 39 | 40 | 41 | -------------------------------------------------------------------------------- /ad-insertion/analytics/VCAC-A/run-container.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | function gracefully_exit { 4 | docker kill $(docker ps --format {{.ID}} --filter name=$HOSTNAME) 5 | exit 0 6 | } 7 | 8 | # gracefully exit 9 | trap gracefully_exit SIGTERM 10 | 11 | # setting 12 | ENVS="$(env | grep ^VCAC_ | sed 's/^VCAC_/-e /')" 13 | NETWORKS="$(docker inspect $HOSTNAME --format {{.NetworkSettings.Networks}} | sed -e 's/map\[\(.*\)]/\1/' -e 's/\([A-Za-z0-9_]*\):[^ ]*/--network=\1/g')" 14 | BINDS="$(docker inspect $HOSTNAME --format {{.HostConfig.Mounts}} | sed -e 's/\[\(.*\)]/\1/' -e 's/{\([a-z]*\) \([^ ]*\) \([^ ]*\) \([a-z]*\)[^}]*}/-v \2:\3:\4/g' -e 's/:true/:ro/g' -e 's/:false/:rw/g')" 15 | 16 | # docker run 17 | /usr/local/bin/docker-entrypoint.sh docker run --rm --name $HOSTNAME --user root -v /var/tmp:/var/tmp -v /etc/localtime:/etc/localtime:ro --privileged $ENVS $NETWORKS $BINDS "$@" $VCAC_IMAGE & 18 | 19 | wait 20 | -------------------------------------------------------------------------------- /ad-insertion/analytics/Xeon/ffmpeg/Dockerfile: -------------------------------------------------------------------------------- 1 | # ssai_analytics_ffmpeg_xeon 2 | 3 | FROM centos:7.6.1810 as build 4 | 5 | ARG VA_SERVING_REPO=https://raw.githubusercontent.com/intel/video-analytics-serving 6 | ARG VA_SERVING_TAG="v0.3.0-alpha" 7 | 8 | RUN mkdir -p /home/vaserving/common/utils && touch /home/vaserving/__init__.py /home/vaserving/common/__init__.py /home/vaserving/common/utils/__init__.py && for x in common/utils/logging.py common/settings.py arguments.py ffmpeg_pipeline.py gstreamer_pipeline.py model_manager.py pipeline.py pipeline_manager.py schema.py vaserving.py; do curl -o /home/vaserving/$x -L ${VA_SERVING_REPO}/${VA_SERVING_TAG}/vaserving/$x; done 9 | 10 | COPY ./models/ /home/models/ 11 | COPY ./gallery/ /home/gallery/ 12 | COPY ./Xeon/ffmpeg/pipelines/ /home/pipelines 13 | COPY *.py /home/ 14 | COPY --from=ssai_common /home/*.py /home/ 15 | 16 | From openvisualcloud/xeon-ubuntu1804-analytics-ffmpeg:20.7 17 | 18 | # Fetch python3 and Install python3 19 | RUN DEBIAN_FRONTEND=noninteractive apt-get update && apt-get install -y -q --no-install-recommends python3-gst-1.0 python3-jsonschema python3-pip && rm -rf /var/lib/apt/lists/* && \ 20 | pip3 install 'kafka-python>=1.4.7' 'kazoo>=2.6.1' 21 | 22 | COPY --from=build /home/ /home/ 23 | ENV FRAMEWORK=ffmpeg 24 | WORKDIR /home 25 | CMD ["/home/analyze.py"] 26 | 27 | #### 28 | ARG USER=docker 29 | ARG GROUP=docker 30 | ARG UID 31 | ARG GID 32 | ## must use ; here to ignore user exist status code 33 | RUN [ ${GID} -gt 0 ] && groupadd -f -g ${GID} ${GROUP}; \ 34 | [ ${UID} -gt 0 ] && useradd -d /home -g ${GID} -K UID_MAX=${UID} -K UID_MIN=${UID} ${USER}; \ 35 | chown -R ${UID}:${GID} /home 36 | USER ${UID} 37 | #### 38 | -------------------------------------------------------------------------------- /ad-insertion/analytics/Xeon/ffmpeg/pipelines/emotion_recognition/1/pipeline.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "emotion_recognition", 3 | "version": 1, 4 | "type": "FFmpeg", 5 | "template": [ 6 | "-i \"{source[uri]}\" -vf ", 7 | "\"detect=model={models[face_detection_retail][1][network]}", 8 | ":model_proc=\"{models[face_detection_retail][1][proc]}\"", 9 | ":interval=\"{parameters[inference-interval]}\":device=CPU:nireq=\"{parameters[nireq]}\",", 10 | "classify=model=\"{models[emotion_recognition][1][network]}\"", 11 | ":model_proc=\"{models[emotion_recognition][1][proc]}\":device=CPU,", 12 | "metaconvert\"", 13 | " -an -y -f metapublish" 14 | ], 15 | "description": "Emotion Recognition", 16 | "parameters": { 17 | "type": "object", 18 | "properties": { 19 | "inference-interval": { 20 | "element": "detection", 21 | "type": "integer", 22 | "minimum": 0, 23 | "maximum": 4294967295, 24 | "default": 1 25 | }, 26 | "nireq": { 27 | "element": "detection", 28 | "type": "integer", 29 | "minimum": 1, 30 | "maximum": 64, 31 | "default": 2 32 | } 33 | } 34 | } 35 | } -------------------------------------------------------------------------------- /ad-insertion/analytics/Xeon/ffmpeg/pipelines/face_recognition/1/pipeline.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "face_recognition", 3 | "version": 1, 4 | "type": "FFmpeg", 5 | "template": [ 6 | "-i \"{source[uri]}\" -vf ", 7 | "\"detect=model={models[face_detection_retail][1][network]}", 8 | ":model_proc=\"{models[face_detection_retail][1][proc]}\"", 9 | ":interval=\"{parameters[inference-interval]}\":device=CPU:nireq=\"{parameters[nireq]}\",", 10 | "classify=model=\"{models[face_reidentification][1][network]}\"", 11 | ":model_proc=\"{models[face_reidentification][1][proc]}\":device=CPU,", 12 | "identify=gallery=\"/home/gallery/face_gallery_FP32/gallery.json\",", 13 | "metaconvert\"", 14 | " -an -y -f metapublish" 15 | ], 16 | "description": "Face Recognition Pipeline", 17 | "parameters": { 18 | "type": "object", 19 | "properties": { 20 | "inference-interval": { 21 | "element": "detection", 22 | "type": "integer", 23 | "minimum": 0, 24 | "maximum": 4294967295, 25 | "default": 1 26 | }, 27 | "nireq": { 28 | "element": "detection", 29 | "type": "integer", 30 | "minimum": 1, 31 | "maximum": 64, 32 | "default": 2 33 | } 34 | } 35 | } 36 | } -------------------------------------------------------------------------------- /ad-insertion/analytics/Xeon/ffmpeg/pipelines/object_detection/1/pipeline.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "object_detection", 3 | "version": 1, 4 | "type": "FFmpeg", 5 | "template": [ 6 | "-i \"{source[uri]}\" -vf ", 7 | "\"detect=model={models[object_detection][1][network]}", 8 | ":model_proc=\"{models[object_detection][1][proc]}\"", 9 | ":interval={parameters[inference-interval]}", 10 | ":device=CPU:nireq={parameters[nireq]},", 11 | "metaconvert\"", 12 | " -an -y -f metapublish" 13 | ], 14 | "description": "Object Detection", 15 | "parameters": { 16 | "type": "object", 17 | "properties": { 18 | "inference-interval": { 19 | "element": "detection", 20 | "type": "integer", 21 | "minimum": 0, 22 | "maximum": 4294967295, 23 | "default": 1 24 | }, 25 | "nireq": { 26 | "element": "detection", 27 | "type": "integer", 28 | "minimum": 1, 29 | "maximum": 64, 30 | "default": 2 31 | } 32 | } 33 | } 34 | } -------------------------------------------------------------------------------- /ad-insertion/analytics/Xeon/gst/pipelines/emotion_recognition/1/pipeline.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "emotion_recognition", 3 | "version": 1, 4 | "type": "GStreamer", 5 | "template":"urisourcebin uri=\"{source[uri]}\" ! concat name=c ! decodebin ! video/x-raw ! videoconvert name=\"videoconvert\" ! gvadetect model-instance-id=det0 model=\"{models[face_detection_retail][1][network]}\" model-proc=\"{models[face_detection_retail][1][proc]}\" name=\"detection\" ! queue ! gvaclassify model-instance-id=cls0 model=\"{models[emotion_recognition][1][network]}\" model-proc=\"{models[emotion_recognition][1][proc]}\" name=\"classification\" ! queue ! gvametaconvert name=\"metaconvert\" ! queue ! gvametapublish method=kafka name=\"destination\" ! appsink name=appsink", 6 | "description": "Emotion Recognition Pipeline", 7 | "parameters": { 8 | "type" : "object", 9 | "properties" : { 10 | "inference-interval": { 11 | "element":"detection", 12 | "type": "integer", 13 | "minimum": 0, 14 | "maximum": 4294967295 15 | }, 16 | "cpu-throughput-streams": { 17 | "element":"detection", 18 | "type": "string" 19 | }, 20 | "n-threads": { 21 | "element":"videoconvert", 22 | "type": "integer" 23 | }, 24 | "nireq": { 25 | "element":"detection", 26 | "type": "integer", 27 | "minimum": 1, 28 | "maximum": 64 29 | } 30 | } 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /ad-insertion/analytics/Xeon/gst/pipelines/object_detection/1/pipeline.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "object_detection", 3 | "version": 1, 4 | "type": "GStreamer", 5 | "template":"urisourcebin uri=\"{source[uri]}\" ! concat name=c ! decodebin ! video/x-raw ! videoconvert name=\"videoconvert\" ! video/x-raw,format=BGRx ! gvadetect model-instance-id=det2 ie-config=CPU_BIND_THREAD=NO model=\"{models[object_detection][1][network]}\" model-proc=\"{models[object_detection][1][proc]}\" name=\"detection\" ! gvametaconvert name=\"metaconvert\" ! queue ! gvametapublish method=kafka name=\"destination\" ! queue ! appsink name=appsink", 6 | "description": "Object Detection Pipeline", 7 | "parameters": { 8 | "type" : "object", 9 | "properties" : { 10 | "inference-interval": { 11 | "element":"detection", 12 | "type": "integer", 13 | "minimum": 0, 14 | "maximum": 4294967295 15 | }, 16 | "cpu-throughput-streams": { 17 | "element":"detection", 18 | "type": "string" 19 | }, 20 | "n-threads": { 21 | "element":"videoconvert", 22 | "type": "integer" 23 | }, 24 | "nireq": { 25 | "element":"detection", 26 | "type": "integer", 27 | "minimum": 1, 28 | "maximum": 64 29 | } 30 | } 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /ad-insertion/analytics/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | DIR=$(dirname $(readlink -f "$0")) 3 | 4 | . "${DIR}/../../script/build.sh" 5 | 6 | -------------------------------------------------------------------------------- /ad-insertion/analytics/gallery/face_gallery_FP16/features/BikeQuick1_boy_0_frame_0_idx_0.tensor: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/ad-insertion/analytics/gallery/face_gallery_FP16/features/BikeQuick1_boy_0_frame_0_idx_0.tensor -------------------------------------------------------------------------------- /ad-insertion/analytics/gallery/face_gallery_FP16/features/BikeQuick1_boy_1_frame_0_idx_0.tensor: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/ad-insertion/analytics/gallery/face_gallery_FP16/features/BikeQuick1_boy_1_frame_0_idx_0.tensor -------------------------------------------------------------------------------- /ad-insertion/analytics/gallery/face_gallery_FP16/features/BikeQuick1_girl_0_frame_0_idx_0.tensor: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/ad-insertion/analytics/gallery/face_gallery_FP16/features/BikeQuick1_girl_0_frame_0_idx_0.tensor -------------------------------------------------------------------------------- /ad-insertion/analytics/gallery/face_gallery_FP16/features/BikeQuick1_girl_1_frame_0_idx_0.tensor: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/ad-insertion/analytics/gallery/face_gallery_FP16/features/BikeQuick1_girl_1_frame_0_idx_0.tensor -------------------------------------------------------------------------------- /ad-insertion/analytics/gallery/face_gallery_FP16/features/HorseScene1_blue_man_0_frame_0_idx_0.tensor: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/ad-insertion/analytics/gallery/face_gallery_FP16/features/HorseScene1_blue_man_0_frame_0_idx_0.tensor -------------------------------------------------------------------------------- /ad-insertion/analytics/gallery/face_gallery_FP16/features/HorseScene1_blue_man_1_frame_0_idx_0.tensor: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/ad-insertion/analytics/gallery/face_gallery_FP16/features/HorseScene1_blue_man_1_frame_0_idx_0.tensor -------------------------------------------------------------------------------- /ad-insertion/analytics/gallery/face_gallery_FP16/features/HorseScene1_blue_man_1_frame_0_idx_1.tensor: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/ad-insertion/analytics/gallery/face_gallery_FP16/features/HorseScene1_blue_man_1_frame_0_idx_1.tensor -------------------------------------------------------------------------------- /ad-insertion/analytics/gallery/face_gallery_FP16/features/HorseScene1_red_woman_0_frame_0_idx_0.tensor: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/ad-insertion/analytics/gallery/face_gallery_FP16/features/HorseScene1_red_woman_0_frame_0_idx_0.tensor -------------------------------------------------------------------------------- /ad-insertion/analytics/gallery/face_gallery_FP16/features/HorseScene1_red_woman_1_frame_0_idx_0.tensor: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/ad-insertion/analytics/gallery/face_gallery_FP16/features/HorseScene1_red_woman_1_frame_0_idx_0.tensor -------------------------------------------------------------------------------- /ad-insertion/analytics/gallery/face_gallery_FP16/features/WalkScene1_blue_man_0_frame_0_idx_0.tensor: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/ad-insertion/analytics/gallery/face_gallery_FP16/features/WalkScene1_blue_man_0_frame_0_idx_0.tensor -------------------------------------------------------------------------------- /ad-insertion/analytics/gallery/face_gallery_FP16/features/WalkScene1_blue_man_1_frame_0_idx_0.tensor: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/ad-insertion/analytics/gallery/face_gallery_FP16/features/WalkScene1_blue_man_1_frame_0_idx_0.tensor -------------------------------------------------------------------------------- /ad-insertion/analytics/gallery/face_gallery_FP16/features/WalkScene1_grey_man_0_frame_0_idx_0.tensor: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/ad-insertion/analytics/gallery/face_gallery_FP16/features/WalkScene1_grey_man_0_frame_0_idx_0.tensor -------------------------------------------------------------------------------- /ad-insertion/analytics/gallery/face_gallery_FP16/features/WalkScene1_grey_man_1_frame_0_idx_0.tensor: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/ad-insertion/analytics/gallery/face_gallery_FP16/features/WalkScene1_grey_man_1_frame_0_idx_0.tensor -------------------------------------------------------------------------------- /ad-insertion/analytics/gallery/face_gallery_FP16/gallery.json: -------------------------------------------------------------------------------- 1 | {"Victor": {"features": ["features/WalkScene1_grey_man_1_frame_0_idx_0.tensor", "features/WalkScene1_grey_man_0_frame_0_idx_0.tensor"]}, "John": {"features": ["features/HorseScene1_blue_man_1_frame_0_idx_0.tensor", "features/HorseScene1_blue_man_1_frame_0_idx_1.tensor", "features/HorseScene1_blue_man_0_frame_0_idx_0.tensor"]}, "Mike": {"features": ["features/WalkScene1_blue_man_1_frame_0_idx_0.tensor", "features/WalkScene1_blue_man_0_frame_0_idx_0.tensor"]}, "Jenny": {"features": ["features/HorseScene1_red_woman_0_frame_0_idx_0.tensor", "features/HorseScene1_red_woman_1_frame_0_idx_0.tensor"]}, "Jocelyn": {"features": ["features/BikeQuick1_girl_0_frame_0_idx_0.tensor", "features/BikeQuick1_girl_1_frame_0_idx_0.tensor"]}, "Jack": {"features": ["features/BikeQuick1_boy_1_frame_0_idx_0.tensor", "features/BikeQuick1_boy_0_frame_0_idx_0.tensor"]}} 2 | -------------------------------------------------------------------------------- /ad-insertion/analytics/gallery/face_gallery_FP16/gallery.json.gst: -------------------------------------------------------------------------------- 1 | [{"name": "Victor", "features": ["features/WalkScene1_grey_man_1_frame_0_idx_0.tensor", "features/WalkScene1_grey_man_0_frame_0_idx_0.tensor"]}, {"name": "John", "features": ["features/HorseScene1_blue_man_1_frame_0_idx_0.tensor", "features/HorseScene1_blue_man_1_frame_0_idx_1.tensor", "features/HorseScene1_blue_man_0_frame_0_idx_0.tensor"]}, {"name": "Mike", "features": ["features/WalkScene1_blue_man_1_frame_0_idx_0.tensor", "features/WalkScene1_blue_man_0_frame_0_idx_0.tensor"]}, {"name": "Jenny", "features": ["features/HorseScene1_red_woman_0_frame_0_idx_0.tensor", "features/HorseScene1_red_woman_1_frame_0_idx_0.tensor"]}, {"name": "Jocelyn", "features": ["features/BikeQuick1_girl_0_frame_0_idx_0.tensor", "features/BikeQuick1_girl_1_frame_0_idx_0.tensor"]}, {"name": "Jack", "features": ["features/BikeQuick1_boy_1_frame_0_idx_0.tensor", "features/BikeQuick1_boy_0_frame_0_idx_0.tensor"]}] 2 | -------------------------------------------------------------------------------- /ad-insertion/analytics/gallery/face_gallery_FP32/features/BikeQuick1_boy_0_frame_0_idx_0.tensor: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/ad-insertion/analytics/gallery/face_gallery_FP32/features/BikeQuick1_boy_0_frame_0_idx_0.tensor -------------------------------------------------------------------------------- /ad-insertion/analytics/gallery/face_gallery_FP32/features/BikeQuick1_boy_1_frame_0_idx_0.tensor: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/ad-insertion/analytics/gallery/face_gallery_FP32/features/BikeQuick1_boy_1_frame_0_idx_0.tensor -------------------------------------------------------------------------------- /ad-insertion/analytics/gallery/face_gallery_FP32/features/BikeQuick1_girl_0_frame_0_idx_0.tensor: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/ad-insertion/analytics/gallery/face_gallery_FP32/features/BikeQuick1_girl_0_frame_0_idx_0.tensor -------------------------------------------------------------------------------- /ad-insertion/analytics/gallery/face_gallery_FP32/features/BikeQuick1_girl_1_frame_0_idx_0.tensor: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/ad-insertion/analytics/gallery/face_gallery_FP32/features/BikeQuick1_girl_1_frame_0_idx_0.tensor -------------------------------------------------------------------------------- /ad-insertion/analytics/gallery/face_gallery_FP32/features/HorseScene1_blue_man_0_frame_0_idx_0.tensor: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/ad-insertion/analytics/gallery/face_gallery_FP32/features/HorseScene1_blue_man_0_frame_0_idx_0.tensor -------------------------------------------------------------------------------- /ad-insertion/analytics/gallery/face_gallery_FP32/features/HorseScene1_blue_man_1_frame_0_idx_0.tensor: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/ad-insertion/analytics/gallery/face_gallery_FP32/features/HorseScene1_blue_man_1_frame_0_idx_0.tensor -------------------------------------------------------------------------------- /ad-insertion/analytics/gallery/face_gallery_FP32/features/HorseScene1_blue_man_1_frame_0_idx_1.tensor: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/ad-insertion/analytics/gallery/face_gallery_FP32/features/HorseScene1_blue_man_1_frame_0_idx_1.tensor -------------------------------------------------------------------------------- /ad-insertion/analytics/gallery/face_gallery_FP32/features/HorseScene1_red_woman_0_frame_0_idx_0.tensor: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/ad-insertion/analytics/gallery/face_gallery_FP32/features/HorseScene1_red_woman_0_frame_0_idx_0.tensor -------------------------------------------------------------------------------- /ad-insertion/analytics/gallery/face_gallery_FP32/features/HorseScene1_red_woman_1_frame_0_idx_0.tensor: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/ad-insertion/analytics/gallery/face_gallery_FP32/features/HorseScene1_red_woman_1_frame_0_idx_0.tensor -------------------------------------------------------------------------------- /ad-insertion/analytics/gallery/face_gallery_FP32/features/WalkScene1_blue_man_0_frame_0_idx_0.tensor: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/ad-insertion/analytics/gallery/face_gallery_FP32/features/WalkScene1_blue_man_0_frame_0_idx_0.tensor -------------------------------------------------------------------------------- /ad-insertion/analytics/gallery/face_gallery_FP32/features/WalkScene1_blue_man_1_frame_0_idx_0.tensor: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/ad-insertion/analytics/gallery/face_gallery_FP32/features/WalkScene1_blue_man_1_frame_0_idx_0.tensor -------------------------------------------------------------------------------- /ad-insertion/analytics/gallery/face_gallery_FP32/features/WalkScene1_grey_man_0_frame_0_idx_0.tensor: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/ad-insertion/analytics/gallery/face_gallery_FP32/features/WalkScene1_grey_man_0_frame_0_idx_0.tensor -------------------------------------------------------------------------------- /ad-insertion/analytics/gallery/face_gallery_FP32/features/WalkScene1_grey_man_1_frame_0_idx_0.tensor: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/ad-insertion/analytics/gallery/face_gallery_FP32/features/WalkScene1_grey_man_1_frame_0_idx_0.tensor -------------------------------------------------------------------------------- /ad-insertion/analytics/gallery/face_gallery_FP32/gallery.json: -------------------------------------------------------------------------------- 1 | {"Victor": {"features": ["features/WalkScene1_grey_man_1_frame_0_idx_0.tensor", "features/WalkScene1_grey_man_0_frame_0_idx_0.tensor"]}, "John": {"features": ["features/HorseScene1_blue_man_1_frame_0_idx_0.tensor", "features/HorseScene1_blue_man_1_frame_0_idx_1.tensor", "features/HorseScene1_blue_man_0_frame_0_idx_0.tensor"]}, "Mike": {"features": ["features/WalkScene1_blue_man_1_frame_0_idx_0.tensor", "features/WalkScene1_blue_man_0_frame_0_idx_0.tensor"]}, "Jenny": {"features": ["features/HorseScene1_red_woman_0_frame_0_idx_0.tensor", "features/HorseScene1_red_woman_1_frame_0_idx_0.tensor"]}, "Jocelyn": {"features": ["features/BikeQuick1_girl_0_frame_0_idx_0.tensor", "features/BikeQuick1_girl_1_frame_0_idx_0.tensor"]}, "Jack": {"features": ["features/BikeQuick1_boy_1_frame_0_idx_0.tensor", "features/BikeQuick1_boy_0_frame_0_idx_0.tensor"]}} 2 | -------------------------------------------------------------------------------- /ad-insertion/analytics/gallery/face_gallery_FP32/gallery.json.gst: -------------------------------------------------------------------------------- 1 | [{"name": "Victor", "features": ["features/WalkScene1_grey_man_1_frame_0_idx_0.tensor", "features/WalkScene1_grey_man_0_frame_0_idx_0.tensor"]}, {"name": "John", "features": ["features/HorseScene1_blue_man_1_frame_0_idx_0.tensor", "features/HorseScene1_blue_man_1_frame_0_idx_1.tensor", "features/HorseScene1_blue_man_0_frame_0_idx_0.tensor"]}, {"name": "Mike", "features": ["features/WalkScene1_blue_man_1_frame_0_idx_0.tensor", "features/WalkScene1_blue_man_0_frame_0_idx_0.tensor"]}, {"name": "Jenny", "features": ["features/HorseScene1_red_woman_0_frame_0_idx_0.tensor", "features/HorseScene1_red_woman_1_frame_0_idx_0.tensor"]}, {"name": "Jocelyn", "features": ["features/BikeQuick1_girl_0_frame_0_idx_0.tensor", "features/BikeQuick1_girl_1_frame_0_idx_0.tensor"]}, {"name": "Jack", "features": ["features/BikeQuick1_boy_1_frame_0_idx_0.tensor", "features/BikeQuick1_boy_0_frame_0_idx_0.tensor"]}] 2 | -------------------------------------------------------------------------------- /ad-insertion/analytics/merged_segment.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | import tempfile 4 | import urllib.request 5 | import urllib.parse 6 | import urllib 7 | import os 8 | import shutil 9 | 10 | def delete_merged_segment(segment_path): 11 | shutil.rmtree(os.path.dirname(segment_path)) 12 | 13 | def create_merged_segment(init_segment,segment): 14 | 15 | segment_uri = urllib.parse.urlparse(segment) 16 | init_segment_uri = urllib.parse.urlparse(init_segment) 17 | 18 | if (segment_uri.scheme!="http") or (init_segment_uri.scheme!="http"): 19 | return None 20 | 21 | segment_name = os.path.basename(segment_uri.path) 22 | init_segment_name = os.path.basename(init_segment_uri.path) 23 | stream_path = os.path.dirname(init_segment_uri.path)[1:] 24 | stream_directory = os.path.join(tempfile.mkdtemp(), 25 | stream_path) 26 | destination_path = os.path.join(stream_directory, 27 | init_segment_name+".dat") 28 | os.makedirs(stream_directory,exist_ok=True) 29 | segment_path = os.path.join(stream_directory,segment_name) 30 | init_segment_path = os.path.join(stream_directory,init_segment_name) 31 | 32 | try: 33 | urllib.request.urlretrieve(segment,segment_path) 34 | urllib.request.urlretrieve(init_segment,init_segment_path) 35 | destination = open(destination_path,'wb') 36 | shutil.copyfileobj(open(init_segment_path,'rb'),destination) 37 | shutil.copyfileobj(open(segment_path,'rb'),destination) 38 | destination.close() 39 | except requests.exceptions.RequestException as e: 40 | print("Request failed " + str(e)) 41 | return None 42 | 43 | return destination_path 44 | 45 | -------------------------------------------------------------------------------- /ad-insertion/analytics/models/emotion_recognition/1/FP16/emotions-recognition-retail-0003-fp16.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/ad-insertion/analytics/models/emotion_recognition/1/FP16/emotions-recognition-retail-0003-fp16.bin -------------------------------------------------------------------------------- /ad-insertion/analytics/models/emotion_recognition/1/FP32/emotions-recognition-retail-0003.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/ad-insertion/analytics/models/emotion_recognition/1/FP32/emotions-recognition-retail-0003.bin -------------------------------------------------------------------------------- /ad-insertion/analytics/models/emotion_recognition/1/INT8/emotions-recognition-retail-0003-int8.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/ad-insertion/analytics/models/emotion_recognition/1/INT8/emotions-recognition-retail-0003-int8.bin -------------------------------------------------------------------------------- /ad-insertion/analytics/models/emotion_recognition/1/emotions-recognition-retail-0003.json: -------------------------------------------------------------------------------- 1 | { 2 | "json_schema_version" : 1.0, 3 | "input_preproc": [ 4 | { 5 | "layer_name": "data", 6 | "color_format": "BGR" 7 | } 8 | ], 9 | "output_postproc": [ 10 | { 11 | "layer_name": "prob_emotion", 12 | "attribute_name": "emotion", 13 | "labels": ["neutral", "happy", "sad", "surprise", "anger" ], 14 | "converter": "tensor_to_label", 15 | "method": "max" 16 | } 17 | ] 18 | } 19 | -------------------------------------------------------------------------------- /ad-insertion/analytics/models/emotion_recognition/1/emotions-recognition-retail-0003.json.gst: -------------------------------------------------------------------------------- 1 | { 2 | "json_schema_version" : "1.0.0", 3 | "input_preproc": [ 4 | { 5 | "layer_name": "data", 6 | "color_format": "BGR" 7 | } 8 | ], 9 | "output_postproc": [ 10 | { 11 | "layer_name": "prob_emotion", 12 | "attribute_name": "emotion", 13 | "labels": ["neutral", "happy", "sad", "surprise", "anger" ], 14 | "converter": "tensor_to_label", 15 | "method": "max" 16 | } 17 | ] 18 | } 19 | -------------------------------------------------------------------------------- /ad-insertion/analytics/models/face_detection_adas/1/FP16/face-detection-adas-0001-fp16.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/ad-insertion/analytics/models/face_detection_adas/1/FP16/face-detection-adas-0001-fp16.bin -------------------------------------------------------------------------------- /ad-insertion/analytics/models/face_detection_adas/1/FP32/face-detection-adas-0001.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/ad-insertion/analytics/models/face_detection_adas/1/FP32/face-detection-adas-0001.bin -------------------------------------------------------------------------------- /ad-insertion/analytics/models/face_detection_adas/1/INT8/face-detection-adas-0001-int8.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/ad-insertion/analytics/models/face_detection_adas/1/INT8/face-detection-adas-0001-int8.bin -------------------------------------------------------------------------------- /ad-insertion/analytics/models/face_detection_adas/1/face-detection-adas-0001.json: -------------------------------------------------------------------------------- 1 | { 2 | "json_schema_version":1.0, 3 | "input_preproc":[ 4 | { 5 | "color_format":"BGR" 6 | } 7 | ], 8 | "output_postproc":[ 9 | { 10 | "labels":["background","face"], 11 | "converter":"DetectionOutput", 12 | "layer_name":"detection_out" 13 | } 14 | ] 15 | } -------------------------------------------------------------------------------- /ad-insertion/analytics/models/face_detection_adas/1/face-detection-adas-0001.json.gst: -------------------------------------------------------------------------------- 1 | { 2 | "json_schema_version": "1.0.0", 3 | "input_preproc":[ 4 | { 5 | "color_format":"BGR" 6 | } 7 | ], 8 | "output_postproc":[ 9 | { 10 | "labels":["background","face"], 11 | "converter":"DetectionOutput", 12 | "layer_name":"detection_out" 13 | } 14 | ] 15 | } 16 | -------------------------------------------------------------------------------- /ad-insertion/analytics/models/face_detection_retail/1/FP16/face-detection-retail-0004-fp16.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/ad-insertion/analytics/models/face_detection_retail/1/FP16/face-detection-retail-0004-fp16.bin -------------------------------------------------------------------------------- /ad-insertion/analytics/models/face_detection_retail/1/FP32/face-detection-retail-0004.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/ad-insertion/analytics/models/face_detection_retail/1/FP32/face-detection-retail-0004.bin -------------------------------------------------------------------------------- /ad-insertion/analytics/models/face_detection_retail/1/INT8/face-detection-retail-0004-int8.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/ad-insertion/analytics/models/face_detection_retail/1/INT8/face-detection-retail-0004-int8.bin -------------------------------------------------------------------------------- /ad-insertion/analytics/models/face_detection_retail/1/face-detection-retail-0004.json: -------------------------------------------------------------------------------- 1 | { 2 | "json_schema_version":1.0, 3 | "input_preproc":[ 4 | { 5 | "color_format":"BGR" 6 | } 7 | ], 8 | "output_postproc":[ 9 | { 10 | "labels":["background","face"], 11 | "converter":"DetectionOutput", 12 | "layer_name":"detection_out" 13 | } 14 | ] 15 | } -------------------------------------------------------------------------------- /ad-insertion/analytics/models/face_detection_retail/1/face-detection-retail-0004.json.gst: -------------------------------------------------------------------------------- 1 | { 2 | "json_schema_version": "1.0.0", 3 | "input_preproc":[ 4 | { 5 | "color_format":"BGR" 6 | } 7 | ], 8 | "output_postproc":[ 9 | { 10 | "labels":["background","face"], 11 | "converter":"DetectionOutput", 12 | "layer_name":"detection_out" 13 | } 14 | ] 15 | } 16 | -------------------------------------------------------------------------------- /ad-insertion/analytics/models/face_reidentification/1/FP16/face-reidentification-retail-0095-fp16.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/ad-insertion/analytics/models/face_reidentification/1/FP16/face-reidentification-retail-0095-fp16.bin -------------------------------------------------------------------------------- /ad-insertion/analytics/models/face_reidentification/1/FP32/face-reidentification-retail-0095.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/ad-insertion/analytics/models/face_reidentification/1/FP32/face-reidentification-retail-0095.bin -------------------------------------------------------------------------------- /ad-insertion/analytics/models/face_reidentification/1/INT8/face-reidentification-retail-0095.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/ad-insertion/analytics/models/face_reidentification/1/INT8/face-reidentification-retail-0095.bin -------------------------------------------------------------------------------- /ad-insertion/analytics/models/face_reidentification/1/face-reidentification-retail-0095.json: -------------------------------------------------------------------------------- 1 | { 2 | "json_schema_version" : 1.0, 3 | "input_preproc": [ 4 | { 5 | "layer_name": "0", 6 | "color_format": "BGR", 7 | "converter": "alignment", 8 | "alignment_points": [ 9 | 0.31556875000000000, 10 | 0.4615741071428571, 11 | 0.68262291666666670, 12 | 0.4615741071428571, 13 | 0.50026249999999990, 14 | 0.6405053571428571, 15 | 0.34947187500000004, 16 | 0.8246919642857142, 17 | 0.65343645833333330, 18 | 0.8246919642857142 19 | ] 20 | } 21 | ], 22 | "output_postproc": [ 23 | { 24 | "layer_name": "658", 25 | "attribute_name": "face_id", 26 | "format": "cosine_distance" 27 | } 28 | ] 29 | } 30 | -------------------------------------------------------------------------------- /ad-insertion/analytics/models/face_reidentification/1/face-reidentification-retail-0095.json.gst: -------------------------------------------------------------------------------- 1 | { 2 | "json_schema_version" : "1.0.0", 3 | "input_preproc": [ 4 | { 5 | "layer_name": "0", 6 | "color_format": "BGR", 7 | "converter": "alignment", 8 | "alignment_points": [ 9 | 0.31556875000000000, 10 | 0.4615741071428571, 11 | 0.68262291666666670, 12 | 0.4615741071428571, 13 | 0.50026249999999990, 14 | 0.6405053571428571, 15 | 0.34947187500000004, 16 | 0.8246919642857142, 17 | 0.65343645833333330, 18 | 0.8246919642857142 19 | ] 20 | } 21 | ], 22 | "output_postproc": [ 23 | { 24 | "layer_name": "658", 25 | "attribute_name": "face_id", 26 | "format": "cosine_distance" 27 | } 28 | ] 29 | } 30 | -------------------------------------------------------------------------------- /ad-insertion/analytics/models/landmarks_regression/1/FP16/landmarks-regression-retail-0009-fp16.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/ad-insertion/analytics/models/landmarks_regression/1/FP16/landmarks-regression-retail-0009-fp16.bin -------------------------------------------------------------------------------- /ad-insertion/analytics/models/landmarks_regression/1/FP32/landmarks-regression-retail-0009.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/ad-insertion/analytics/models/landmarks_regression/1/FP32/landmarks-regression-retail-0009.bin -------------------------------------------------------------------------------- /ad-insertion/analytics/models/landmarks_regression/1/INT8/landmarks-regression-retail-0009.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/ad-insertion/analytics/models/landmarks_regression/1/INT8/landmarks-regression-retail-0009.bin -------------------------------------------------------------------------------- /ad-insertion/analytics/models/landmarks_regression/1/landmarks-regression-retail-0009.json: -------------------------------------------------------------------------------- 1 | { 2 | "json_schema_version" : 1.0, 3 | "input_preproc": [ 4 | { 5 | "color_format": "BGR" 6 | } 7 | ], 8 | "output_postproc": [ 9 | { 10 | "layer_name": "95", 11 | "format": "landmark_points" 12 | } 13 | ] 14 | } 15 | -------------------------------------------------------------------------------- /ad-insertion/analytics/models/landmarks_regression/1/landmarks-regression-retail-0009.json.gst: -------------------------------------------------------------------------------- 1 | { 2 | "json_schema_version" : "1.0.0", 3 | "input_preproc": [ 4 | { 5 | "color_format": "BGR" 6 | } 7 | ], 8 | "output_postproc": [ 9 | { 10 | "layer_name": "95", 11 | "format": "landmark_points" 12 | } 13 | ] 14 | } 15 | -------------------------------------------------------------------------------- /ad-insertion/analytics/models/object_detection/1/FP16/mobilenet-ssd-fp16.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/ad-insertion/analytics/models/object_detection/1/FP16/mobilenet-ssd-fp16.bin -------------------------------------------------------------------------------- /ad-insertion/analytics/models/object_detection/1/FP32/mobilenet-ssd.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/ad-insertion/analytics/models/object_detection/1/FP32/mobilenet-ssd.bin -------------------------------------------------------------------------------- /ad-insertion/analytics/models/object_detection/1/INT8/mobilenet-ssd-int8.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/ad-insertion/analytics/models/object_detection/1/INT8/mobilenet-ssd-int8.bin -------------------------------------------------------------------------------- /ad-insertion/analytics/models/object_detection/1/mobilenet-ssd.json: -------------------------------------------------------------------------------- 1 | { 2 | "json_schema_version" : 1.0, 3 | "input_preproc":[ 4 | { 5 | "color_format":"BGR" 6 | } 7 | ], 8 | "output_postproc":[ 9 | { 10 | "converter":"DetectionOutput", 11 | "labels":["background","aeroplane","bicycle","bird","boat","bottle","bus","car","cat","chair","cow","diningtable","dog","horse","motorbike","person","pottedplant","sheep","sofa","train","tvmonitor"], 12 | "layer_name":"detection_out" 13 | } 14 | ] 15 | } 16 | 17 | -------------------------------------------------------------------------------- /ad-insertion/analytics/models/object_detection/1/mobilenet-ssd.json.gst: -------------------------------------------------------------------------------- 1 | { 2 | "json_schema_version" : "1.0.0", 3 | "input_preproc":[ 4 | { 5 | "color_format":"BGR" 6 | } 7 | ], 8 | "output_postproc":[ 9 | { 10 | "converter":"DetectionOutput", 11 | "labels":["background","aeroplane","bicycle","bird","boat","bottle","bus","car","cat","chair","cow","diningtable","dog","horse","motorbike","person","pottedplant","sheep","sofa","train","tvmonitor"], 12 | "layer_name":"detection_out" 13 | } 14 | ] 15 | } 16 | 17 | -------------------------------------------------------------------------------- /ad-insertion/analytics/shell.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | DIR=$(dirname $(readlink -f "$0")) 4 | PLATFORM=${1:-Xeon} 5 | shift 6 | FRAMEWORK=${1:-gst} 7 | shift 8 | 9 | IMAGE="ssai_analytics_${FRAMEWORK}_$(echo ${PLATFORM} | tr A-Z a-z)" 10 | OPTIONS=("--rm" "--name=${IMAGE}") 11 | . "$DIR/../../script/shell.sh" 12 | -------------------------------------------------------------------------------- /ad-insertion/frontend/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | set(service "ad_insertion_frontend") 2 | include("${CMAKE_SOURCE_DIR}/script/service.cmake") 3 | add_dependencies(build_${service} build_ssai_common) 4 | -------------------------------------------------------------------------------- /ad-insertion/frontend/Dockerfile: -------------------------------------------------------------------------------- 1 | 2 | FROM ssai_common 3 | 4 | COPY *.py /home/ 5 | COPY *.conf /etc/nginx/ 6 | CMD ["/bin/bash","-c","/home/main.py&/usr/sbin/nginx"] 7 | WORKDIR /home 8 | EXPOSE 8080 9 | 10 | #### 11 | ARG USER=docker 12 | ARG GROUP=docker 13 | ARG UID 14 | ARG GID 15 | ## must use ; here to ignore user exist status code 16 | RUN [ ${GID} -gt 0 ] && groupadd -f -g ${GID} ${GROUP}; \ 17 | [ ${UID} -gt 0 ] && useradd -d /home -g ${GID} -K UID_MAX=${UID} -K UID_MIN=${UID} ${USER}; \ 18 | touch /var/run/nginx.pid && \ 19 | mkdir -p /var/log/nginx /var/lib/nginx /var/www/cache /var/www/adstatic /var/www/adinsert && \ 20 | chown -R ${UID}:${GID} /home /var/run/nginx.pid /var/www /var/log/nginx /var/lib/nginx 21 | USER ${UID} 22 | #### 23 | 24 | -------------------------------------------------------------------------------- /ad-insertion/frontend/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | IMAGE="ssai_ad_insertion_frontend" 4 | DIR=$(dirname $(readlink -f "$0")) 5 | 6 | . "$DIR/../../script/build.sh" 7 | -------------------------------------------------------------------------------- /ad-insertion/frontend/main.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | from tornado import ioloop, web 4 | from tornado.options import define, options, parse_command_line 5 | from manifest import ManifestHandler 6 | from segment import SegmentHandler 7 | 8 | app = web.Application([ 9 | (r'/segment/.*',SegmentHandler), 10 | (r'/manifest/.*',ManifestHandler), 11 | ]) 12 | 13 | if __name__ == "__main__": 14 | define("port", default=2222, help="the binding port", type=int) 15 | define("ip", default="127.0.0.1", help="the binding ip") 16 | parse_command_line() 17 | print("ad-insertion: frontend: Listening to " + options.ip + ":" + str(options.port), flush = True) 18 | app.listen(options.port, address=options.ip) 19 | ioloop.IOLoop.instance().start() 20 | -------------------------------------------------------------------------------- /ad-insertion/frontend/nginx.conf: -------------------------------------------------------------------------------- 1 | 2 | worker_processes 5; 3 | worker_rlimit_nofile 8192; 4 | daemon off; 5 | 6 | events { 7 | worker_connections 4096; 8 | } 9 | 10 | http { 11 | include mime.types; 12 | default_type application/octet-stream; 13 | 14 | server { 15 | listen 8080; 16 | server_name _; 17 | 18 | location / { 19 | rewrite ^(/.*) $1 break; 20 | proxy_pass http://content-provider-service:8080; 21 | } 22 | 23 | location /intercept { 24 | internal; 25 | rewrite ^/intercept(/.*) $1 break; 26 | proxy_pass http://content-provider-service:8080; 27 | } 28 | 29 | location /adinsert { 30 | internal; 31 | root /var/www/; 32 | add_header Cache-Control no-cache; 33 | types { 34 | application/dash+xml mpd; 35 | application/vnd.apple.mpegurl m3u8; 36 | video/mp2t ts; 37 | } 38 | } 39 | 40 | location /adstatic { 41 | internal; 42 | root /var/www/; 43 | add_header Cache-Control no-cache; 44 | types { 45 | application/dash+xml mpd; 46 | application/vnd.apple.mpegurl m3u8; 47 | video/mp2t ts; 48 | } 49 | } 50 | 51 | location ~* ^/(hls|dash)/.*.(mpd|m3u8)$ { 52 | add_header Cache-Control no-cache; 53 | rewrite ^/(dash|hls)/(.*) /manifest/$1/$2 break; 54 | proxy_pass http://localhost:2222; 55 | } 56 | 57 | location ~* ^/(hls|dash)/.*.(ts|m4s)$ { 58 | add_header Cache-Control no-cache; 59 | rewrite ^/(dash|hls)/(.*) /segment/$1/$2 break; 60 | proxy_pass http://localhost:2222; 61 | } 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /ad-insertion/frontend/shell.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | IMAGE="ssai_ad_insertion_frontend" 4 | DIR=$(dirname $(readlink -f "$0")) 5 | OPTIONS=("--volume=${DIR}/../../volume/ad/cache/dash:/var/www/adinsert/dash:ro" "--volume=${DIR}/../../volume/ad/cache/hls:/var/www/adinsert/hls:ro" "--volume=${DIR}/../../volume/ad/static:/var/www/skipped:ro") 6 | 7 | . "$DIR/../../script/shell.sh" 8 | -------------------------------------------------------------------------------- /ad-insertion/kafka2db/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | set(service "ssai_kafka2db") 2 | include("${CMAKE_SOURCE_DIR}/script/service.cmake") 3 | add_dependencies(build_${service} build_ssai_common) 4 | -------------------------------------------------------------------------------- /ad-insertion/kafka2db/Dockerfile: -------------------------------------------------------------------------------- 1 | 2 | FROM ssai_common 3 | 4 | COPY *.py /home/ 5 | CMD ["/bin/bash","-c","/home/main.py"] 6 | 7 | #### 8 | ARG USER=docker 9 | ARG GROUP=docker 10 | ARG UID 11 | ARG GID 12 | ## must use ; here to ignore user exist status code 13 | RUN [ ${GID} -gt 0 ] && groupadd -f -g ${GID} ${GROUP}; \ 14 | [ ${UID} -gt 0 ] && useradd -d /home -g ${GID} -K UID_MAX=${UID} -K UID_MIN=${UID} ${USER}; \ 15 | chown -R ${UID}:${GID} /home 16 | USER ${UID} 17 | #### 18 | 19 | -------------------------------------------------------------------------------- /ad-insertion/kafka2db/Dockerfile.1.kafka: -------------------------------------------------------------------------------- 1 | # ssai_kafka 2 | 3 | FROM wurstmeister/kafka:2.12-2.4.0 4 | 5 | RUN sed -i 's/\/kafka\/kafka/\/opt\/kafka\/logs\/kafka/' /usr/bin/start-kafka.sh && \ 6 | mkdir /opt/kafka/logs 7 | 8 | RUN addgroup kafka && \ 9 | adduser -D -H -G kafka kafka && \ 10 | chown -R kafka:kafka /opt /kafka 11 | 12 | USER kafka 13 | -------------------------------------------------------------------------------- /ad-insertion/kafka2db/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | IMAGE="ssai_kafka2db" 4 | DIR=$(dirname $(readlink -f "$0")) 5 | 6 | . "$DIR/../../script/build.sh" 7 | -------------------------------------------------------------------------------- /ad-insertion/kafka2db/shell.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | IMAGE="ssai_kafka2db" 4 | DIR=$(dirname $(readlink -f "$0")) 5 | 6 | . "$DIR/../../script/shell.sh" 7 | 8 | -------------------------------------------------------------------------------- /cdn/.dockignore: -------------------------------------------------------------------------------- 1 | README.md 2 | *.sh 3 | CMakeLists.txt 4 | -------------------------------------------------------------------------------- /cdn/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | set(service "ssai_cdn_service") 2 | include("${CMAKE_SOURCE_DIR}/script/service.cmake") 3 | add_dependencies(build_${service} build_ssai_common) 4 | -------------------------------------------------------------------------------- /cdn/Dockerfile: -------------------------------------------------------------------------------- 1 | 2 | FROM ssai_common 3 | 4 | COPY *.conf /etc/nginx/ 5 | COPY *.py /home/ 6 | CMD ["/bin/bash","-c","/home/main.py&/usr/sbin/nginx"] 7 | EXPOSE 8080 8 | 9 | #### 10 | ARG USER=docker 11 | ARG GROUP=docker 12 | ARG UID 13 | ARG GID 14 | ## must use ; here to ignore user exist status code 15 | RUN [ ${GID} -gt 0 ] && groupadd -f -g ${GID} ${GROUP}; \ 16 | [ ${UID} -gt 0 ] && useradd -d /home -g ${GID} -K UID_MAX=${UID} -K UID_MIN=${UID} ${USER}; \ 17 | touch /var/run/nginx.pid && \ 18 | mkdir -p /var/log/nginx /var/lib/nginx /var/www/cache && \ 19 | chown -R ${UID}:${GID} /home /var/run/nginx.pid /var/www /var/log/nginx /var/lib/nginx 20 | USER ${UID} 21 | #### 22 | 23 | -------------------------------------------------------------------------------- /cdn/README.md: -------------------------------------------------------------------------------- 1 | The CDN service is a caching proxy of any content from the [AD Insertion](../ad-insertion/README.md) service or the [Content Provider](../content-provider/README.md) service. 2 | 3 | In addition, the CDN service implements a debug hook to show server activities and statistics. 4 | 5 | ### Interface: 6 | 7 | The CDN service exposes the following interface(s) on port 8443: 8 | 9 | | Path | Description | 10 | |----|------| 11 | |GET /|Proxy to the [AD Insertion](../ad-insertion/README.md) service. | 12 | |GET /api/debug/analytics | Debug only: query the database for any analytics data to show on the UI analytics panel. | 13 | |GET /api/debug | Debug only: listen on the Kafka topics and open a websocket connection to UI for showing messages in the debug console and charts. | 14 | -------------------------------------------------------------------------------- /cdn/analytics.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | from urllib.parse import unquote 4 | from tornado import web 5 | from db import DataBase 6 | import json 7 | 8 | class AnalyticsHandler(web.RequestHandler): 9 | def __init__(self, app, request, **kwargs): 10 | super(AnalyticsHandler, self).__init__(app, request, **kwargs) 11 | self._db=DataBase() 12 | 13 | def check_origin(self, origin): 14 | return True 15 | 16 | def get(self): 17 | stream=unquote(str(self.get_argument("stream"))).split("/")[-2] 18 | start=float(self.get_argument("start")) 19 | end=float(self.get_argument("end")) 20 | r=self._db.query(stream, [start, end]) 21 | 22 | self.set_status(200,'OK') 23 | self.set_header('Content-Type','application/json') 24 | self.write(json.dumps(r)) 25 | -------------------------------------------------------------------------------- /cdn/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | IMAGE="ssai_cdn_service" 4 | DIR=$(dirname $(readlink -f "$0")) 5 | 6 | . "$DIR/../script/build.sh" 7 | -------------------------------------------------------------------------------- /cdn/debug.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | from tornado import websocket, gen, ioloop 4 | from messaging import Consumer 5 | import json 6 | 7 | kafka_topics=["content_provider_sched","seg_analytics_sched","ad_transcode_sched","seg_analytics_data","workloads","adstats","video_analytics_fps"] 8 | 9 | class DebugHandler(websocket.WebSocketHandler): 10 | def __init__(self, app, request, **kwargs): 11 | super(DebugHandler, self).__init__(app, request, **kwargs) 12 | 13 | def check_origin(self, origin): 14 | return True 15 | 16 | def open(self): 17 | self.set_nodelay(True) 18 | jobs=[] 19 | 20 | ioloop.IOLoop.current().spawn_callback(self._read_topics) 21 | 22 | def data_received(self, chunk): 23 | pass 24 | 25 | @gen.coroutine 26 | def _read_topics(self): 27 | jobs=[] 28 | for topic in kafka_topics: 29 | jobs.append(self._read_topic(topic)) 30 | yield jobs 31 | 32 | @gen.coroutine 33 | def _read_topic(self, topic): 34 | c = Consumer(None) 35 | while True: 36 | try: 37 | for msg in c.debug(topic): 38 | if msg: 39 | yield self.write_message(json.dumps({"topic":topic,"value":msg})) 40 | else: 41 | yield gen.sleep(0.05) 42 | 43 | except Exception as e: 44 | yield self.write_message("Exception:"+str(e)) 45 | print(str(e)) 46 | 47 | # sleep and retry 48 | yield gen.sleep(10) 49 | -------------------------------------------------------------------------------- /cdn/main.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | from tornado import ioloop, web 4 | from tornado.options import define, options, parse_command_line 5 | from debug import DebugHandler 6 | from analytics import AnalyticsHandler 7 | 8 | app = web.Application([ 9 | (r'/debug',DebugHandler), 10 | (r'/analytics',AnalyticsHandler), 11 | ]) 12 | 13 | if __name__ == "__main__": 14 | define("port", default=2222, help="the binding port", type=int) 15 | define("ip", default="127.0.0.1", help="the binding ip") 16 | parse_command_line() 17 | print("Listening to " + options.ip + ":" + str(options.port)) 18 | app.listen(options.port, address=options.ip) 19 | ioloop.IOLoop.instance().start() 20 | -------------------------------------------------------------------------------- /cdn/shell.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | IMAGE="ssai_cdn_service" 4 | DIR=$(dirname $(readlink -f "$0")) 5 | 6 | . "$DIR/../script/shell.sh" 7 | -------------------------------------------------------------------------------- /common/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | set(service "ssai_common") 2 | include("${CMAKE_SOURCE_DIR}/script/service.cmake") 3 | -------------------------------------------------------------------------------- /common/Dockerfile: -------------------------------------------------------------------------------- 1 | 2 | FROM ubuntu:18.04 3 | 4 | RUN DEBIAN_FRONTEND=noninteractive apt-get update && apt-get install -y -q --no-install-recommends nginx python3-tornado python3-urllib3 python3-requests python3-psutil python3-pip && rm -rf /var/lib/apt/lists/* && \ 5 | pip3 install 'kafka-python>=1.4.7' 'kazoo>=2.6.1' 6 | 7 | COPY *.py /home/ 8 | -------------------------------------------------------------------------------- /common/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | IMAGE="ssai_common" 4 | DIR=$(dirname $(readlink -f "$0")) 5 | 6 | . "$DIR/../script/build.sh" 7 | -------------------------------------------------------------------------------- /common/shell.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | IMAGE="ssai_common" 4 | DIR=$(dirname $(readlink -f "$0")) 5 | 6 | . "$DIR/../script/shell.sh" 7 | -------------------------------------------------------------------------------- /common/workload.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | from messaging import Producer 4 | import socket 5 | import datetime 6 | import psutil 7 | import time 8 | import json 9 | import sys 10 | 11 | kafka_topic="workloads" 12 | 13 | if __name__ == "__main__": 14 | prefix=""; 15 | if len(sys.argv)>1: prefix=sys.argv[1] 16 | instance=socket.gethostname()[0:3] 17 | machine=prefix+instance 18 | 19 | while True: 20 | try: 21 | p=Producer() 22 | while True: 23 | p.send(kafka_topic,json.dumps({ 24 | "time":datetime.datetime.utcnow().isoformat(), 25 | "machine": machine, 26 | "workload": psutil.cpu_percent(), 27 | })); 28 | time.sleep(1); 29 | p.close() 30 | except Exception as e: 31 | print(str(e)) 32 | time.sleep(2) 33 | -------------------------------------------------------------------------------- /common/zkdata.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | from kazoo.client import KazooClient, KazooState 4 | from kazoo.exceptions import NoNodeError, NodeExistsError 5 | import traceback 6 | import json 7 | import time 8 | 9 | ZK_HOSTS='zookeeper-service:2181' 10 | 11 | class ZKData(object): 12 | def __init__(self): 13 | super(ZKData,self).__init__() 14 | options={"max_tries":-1,"max_delay":5,"ignore_expire":True} 15 | self._zk=KazooClient(hosts=ZK_HOSTS,connection_retry=options) 16 | try: 17 | self._zk.start(timeout=3600) 18 | except: 19 | print(traceback.format_exc(), flush=True) 20 | 21 | def set(self, path, value): 22 | value=json.dumps(value).encode('utf-8') 23 | try: 24 | self._zk.create(path, value, makepath=True) 25 | except NodeExistsError: 26 | self._zk.set(path,value) 27 | 28 | def get(self, path): 29 | try: 30 | value, stat= self._zk.get(path) 31 | if not value: return {} 32 | return json.loads(value.decode('utf-8')) 33 | except Exception as e: 34 | return {} 35 | 36 | def close(self): 37 | self._zk.stop() 38 | self._zk.close() 39 | -------------------------------------------------------------------------------- /common/zkstate.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | from kazoo.client import KazooClient 4 | from kazoo.exceptions import NoNodeError, NodeExistsError 5 | from kazoo.protocol.states import KazooState 6 | import traceback 7 | import time 8 | 9 | ZK_HOSTS='zookeeper-service:2181' 10 | 11 | class ZKState(object): 12 | def __init__(self, path, name=None): 13 | super(ZKState,self).__init__() 14 | options={"max_tries":-1,"max_delay":5,"ignore_expire":True} 15 | self._zk=KazooClient(hosts=ZK_HOSTS,connection_retry=options) 16 | try: 17 | self._zk.start(timeout=3600) 18 | except: 19 | print(traceback.format_exc(), flush=True) 20 | self._path=path 21 | self._name="" if name is None else name+"." 22 | self._zk.ensure_path(path) 23 | 24 | def processed(self): 25 | return self._zk.exists(self._path+"/"+self._name+"complete") 26 | 27 | def process_start(self): 28 | if self.processed(): return False 29 | try: 30 | self._zk.create(self._path+"/"+self._name+"processing",ephemeral=True) 31 | return True 32 | except NodeExistsError: # another process wins 33 | return False 34 | 35 | def process_end(self): 36 | try: 37 | self._zk.create(self._path+"/"+self._name+"complete") 38 | except NodeExistsError: 39 | pass 40 | 41 | def process_abort(self): 42 | # the ephemeral node will be deleted upon close 43 | pass 44 | 45 | def close(self): 46 | self._zk.stop() 47 | self._zk.close() 48 | -------------------------------------------------------------------------------- /content-provider/.dockerignore: -------------------------------------------------------------------------------- 1 | archive/* 2 | html/* 3 | *.sh 4 | CMakeLists.txt 5 | -------------------------------------------------------------------------------- /content-provider/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | include("${CMAKE_SOURCE_DIR}/script/scan-all.cmake") 2 | -------------------------------------------------------------------------------- /content-provider/archive/.dockerignore: -------------------------------------------------------------------------------- 1 | CMakeLists.txt 2 | *.mp4 3 | *.sh 4 | dash/* 5 | hls/* 6 | -------------------------------------------------------------------------------- /content-provider/archive/.gitignore: -------------------------------------------------------------------------------- 1 | *.mp4 2 | *.png 3 | dash/* 4 | hls/* 5 | -------------------------------------------------------------------------------- /content-provider/archive/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | set(service "ssai_content_provider_archive") 2 | add_custom_target(dash "${CMAKE_CURRENT_SOURCE_DIR}/build.sh" "dash" "${MINRESOLUTION}" DEPENDS build_${service}) 3 | add_custom_target(hls "${CMAKE_CURRENT_SOURCE_DIR}/build.sh" "hls" "${MINRESOLUTION}" DEPENDS build_${service}) 4 | include("${CMAKE_SOURCE_DIR}/script/service.cmake") 5 | -------------------------------------------------------------------------------- /content-provider/archive/Dockerfile: -------------------------------------------------------------------------------- 1 | 2 | FROM openvisualcloud/xeon-ubuntu1804-media-ffmpeg:20.7 3 | RUN apt-get update && apt-get install -y -q youtube-dl bc wget && rm -rf /var/lib/apt/lists/*; 4 | 5 | #### 6 | ARG USER=docker 7 | ARG GROUP=docker 8 | ARG UID 9 | ARG GID 10 | ## must use ; here to ignore user exist status code 11 | RUN [ ${GID} -gt 0 ] && groupadd -f -g ${GID} ${GROUP}; \ 12 | [ ${UID} -gt 0 ] && useradd -d /home -g ${GID} -K UID_MAX=${UID} -K UID_MIN=${UID} ${USER}; \ 13 | chown -R ${UID}:${GID} /home 14 | USER ${UID} 15 | #### 16 | -------------------------------------------------------------------------------- /content-provider/archive/shell.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | IMAGE="ssai_content_provider_archive" 4 | DIR=$(dirname $(readlink -f "$0")) 5 | OPTIONS=("--volume=$DIR/../../volume/video:/mnt:rw" "--volume=$DIR:/home:ro") 6 | 7 | . "$DIR/../../script/shell.sh" 8 | -------------------------------------------------------------------------------- /content-provider/frontend/.dockerignore: -------------------------------------------------------------------------------- 1 | CMakeLists.txt 2 | *.sh 3 | -------------------------------------------------------------------------------- /content-provider/frontend/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | set(service "ssai_content_provider_frontend") 2 | include("${CMAKE_SOURCE_DIR}/script/service.cmake") 3 | add_dependencies(build_${service} build_ssai_common) 4 | -------------------------------------------------------------------------------- /content-provider/frontend/Dockerfile: -------------------------------------------------------------------------------- 1 | 2 | FROM ssai_common 3 | 4 | COPY *.py /home/ 5 | COPY *.conf /etc/nginx/ 6 | COPY html /var/www/html 7 | CMD ["/bin/bash","-c","/home/main.py&/usr/sbin/nginx"] 8 | VOLUME ["/var/www/archive","/var/www/dash","/var/www/hls"] 9 | 10 | #### 11 | ARG USER=docker 12 | ARG GROUP=docker 13 | ARG UID 14 | ARG GID 15 | ## must use ; here to ignore user exist status code 16 | RUN [ ${GID} -gt 0 ] && groupadd -f -g ${GID} ${GROUP}; \ 17 | [ ${UID} -gt 0 ] && useradd -d /home -g ${GID} -K UID_MAX=${UID} -K UID_MIN=${UID} ${USER}; \ 18 | touch /var/run/nginx.pid && \ 19 | mkdir -p /var/log/nginx /var/lib/nginx /var/www/cache /var/www/video && \ 20 | chown -R ${UID}:${GID} /home /var/run/nginx.pid /var/www /var/log/nginx /var/lib/nginx 21 | USER ${UID} 22 | #### 23 | -------------------------------------------------------------------------------- /content-provider/frontend/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | IMAGE="ssai_content_provider_frontend" 4 | DIR=$(dirname $(readlink -f "$0")) 5 | 6 | . "$DIR/../../script/build.sh" 7 | -------------------------------------------------------------------------------- /content-provider/frontend/html/css/app.css: -------------------------------------------------------------------------------- 1 | 2 | form .form-icons { 3 | text-align: center; 4 | } 5 | 6 | form .form-icons h4 { 7 | margin-bottom: 1rem; 8 | } 9 | 10 | form .form-icons .input-group-label { 11 | background-color: #1779ba; 12 | border-color: #1779ba; 13 | } 14 | 15 | form .form-icons .input-group-field { 16 | border-color: #1779ba; 17 | } 18 | 19 | form .form-icons .fa { 20 | color: white; 21 | width: 1rem; 22 | } 23 | -------------------------------------------------------------------------------- /content-provider/frontend/html/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/content-provider/frontend/html/favicon.ico -------------------------------------------------------------------------------- /content-provider/frontend/html/icon/foundation-icons.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/content-provider/frontend/html/icon/foundation-icons.eot -------------------------------------------------------------------------------- /content-provider/frontend/html/icon/foundation-icons.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/content-provider/frontend/html/icon/foundation-icons.ttf -------------------------------------------------------------------------------- /content-provider/frontend/html/icon/foundation-icons.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/content-provider/frontend/html/icon/foundation-icons.woff -------------------------------------------------------------------------------- /content-provider/frontend/html/image/Jack.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/content-provider/frontend/html/image/Jack.jpg -------------------------------------------------------------------------------- /content-provider/frontend/html/image/Jenny.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/content-provider/frontend/html/image/Jenny.jpg -------------------------------------------------------------------------------- /content-provider/frontend/html/image/Jocelyn.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/content-provider/frontend/html/image/Jocelyn.jpg -------------------------------------------------------------------------------- /content-provider/frontend/html/image/John.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/content-provider/frontend/html/image/John.jpg -------------------------------------------------------------------------------- /content-provider/frontend/html/image/Mike.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/content-provider/frontend/html/image/Mike.jpg -------------------------------------------------------------------------------- /content-provider/frontend/html/image/Unknown.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/content-provider/frontend/html/image/Unknown.jpg -------------------------------------------------------------------------------- /content-provider/frontend/html/image/Victor.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/content-provider/frontend/html/image/Victor.jpg -------------------------------------------------------------------------------- /content-provider/frontend/html/image/anger.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/content-provider/frontend/html/image/anger.png -------------------------------------------------------------------------------- /content-provider/frontend/html/image/happy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/content-provider/frontend/html/image/happy.png -------------------------------------------------------------------------------- /content-provider/frontend/html/image/home-url.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/content-provider/frontend/html/image/home-url.png -------------------------------------------------------------------------------- /content-provider/frontend/html/image/intel-logo-white-100.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/content-provider/frontend/html/image/intel-logo-white-100.png -------------------------------------------------------------------------------- /content-provider/frontend/html/image/neutral.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/content-provider/frontend/html/image/neutral.png -------------------------------------------------------------------------------- /content-provider/frontend/html/image/object_10_cow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/content-provider/frontend/html/image/object_10_cow.png -------------------------------------------------------------------------------- /content-provider/frontend/html/image/object_11_diningtable.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/content-provider/frontend/html/image/object_11_diningtable.png -------------------------------------------------------------------------------- /content-provider/frontend/html/image/object_12_dog.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/content-provider/frontend/html/image/object_12_dog.png -------------------------------------------------------------------------------- /content-provider/frontend/html/image/object_13_horse.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/content-provider/frontend/html/image/object_13_horse.png -------------------------------------------------------------------------------- /content-provider/frontend/html/image/object_14_motorbike.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/content-provider/frontend/html/image/object_14_motorbike.png -------------------------------------------------------------------------------- /content-provider/frontend/html/image/object_15_person.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/content-provider/frontend/html/image/object_15_person.png -------------------------------------------------------------------------------- /content-provider/frontend/html/image/object_16_pottedplant.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/content-provider/frontend/html/image/object_16_pottedplant.png -------------------------------------------------------------------------------- /content-provider/frontend/html/image/object_17_sheep.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/content-provider/frontend/html/image/object_17_sheep.png -------------------------------------------------------------------------------- /content-provider/frontend/html/image/object_18_sofa.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/content-provider/frontend/html/image/object_18_sofa.png -------------------------------------------------------------------------------- /content-provider/frontend/html/image/object_19_train.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/content-provider/frontend/html/image/object_19_train.png -------------------------------------------------------------------------------- /content-provider/frontend/html/image/object_1_aeroplane.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/content-provider/frontend/html/image/object_1_aeroplane.png -------------------------------------------------------------------------------- /content-provider/frontend/html/image/object_1_face.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/content-provider/frontend/html/image/object_1_face.png -------------------------------------------------------------------------------- /content-provider/frontend/html/image/object_20_tvmonitor.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/content-provider/frontend/html/image/object_20_tvmonitor.png -------------------------------------------------------------------------------- /content-provider/frontend/html/image/object_2_bicycle.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/content-provider/frontend/html/image/object_2_bicycle.png -------------------------------------------------------------------------------- /content-provider/frontend/html/image/object_3_bird.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/content-provider/frontend/html/image/object_3_bird.png -------------------------------------------------------------------------------- /content-provider/frontend/html/image/object_4_boat.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/content-provider/frontend/html/image/object_4_boat.png -------------------------------------------------------------------------------- /content-provider/frontend/html/image/object_5_bottle.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/content-provider/frontend/html/image/object_5_bottle.png -------------------------------------------------------------------------------- /content-provider/frontend/html/image/object_6_bus.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/content-provider/frontend/html/image/object_6_bus.png -------------------------------------------------------------------------------- /content-provider/frontend/html/image/object_7_car.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/content-provider/frontend/html/image/object_7_car.png -------------------------------------------------------------------------------- /content-provider/frontend/html/image/object_8_cat.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/content-provider/frontend/html/image/object_8_cat.png -------------------------------------------------------------------------------- /content-provider/frontend/html/image/object_9_chair.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/content-provider/frontend/html/image/object_9_chair.png -------------------------------------------------------------------------------- /content-provider/frontend/html/image/placeholder.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/content-provider/frontend/html/image/placeholder.jpg -------------------------------------------------------------------------------- /content-provider/frontend/html/image/sad.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/content-provider/frontend/html/image/sad.png -------------------------------------------------------------------------------- /content-provider/frontend/html/image/screen.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/content-provider/frontend/html/image/screen.png -------------------------------------------------------------------------------- /content-provider/frontend/html/image/surprise.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/content-provider/frontend/html/image/surprise.png -------------------------------------------------------------------------------- /content-provider/frontend/html/image/vcac-a.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/content-provider/frontend/html/image/vcac-a.gif -------------------------------------------------------------------------------- /content-provider/frontend/html/image/xeon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/content-provider/frontend/html/image/xeon.png -------------------------------------------------------------------------------- /content-provider/frontend/html/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | AD Insertion E2E Pipeline 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | -------------------------------------------------------------------------------- /content-provider/frontend/html/js/app-api.js: -------------------------------------------------------------------------------- 1 | 2 | var apiHost={ 3 | playList: function (name) { 4 | var url="api/playlist"; 5 | var args= { name: name }; 6 | console.log("GET "+url+"?"+JSON.stringify(args)); 7 | return $.get(url, args); 8 | }, 9 | click: function (name, x, y, t, stream) { 10 | var url="api/click"; 11 | var args= { x: x, y:y, name: name, t:t, stream:stream } 12 | console.log("POST "+url+"?"+JSON.stringify(args)); 13 | return $.post(url, args); 14 | }, 15 | debug: function (ondata) { 16 | if (!!window.Worker) { 17 | var worker=new Worker('js/app-worker.js'); 18 | worker.onmessage=ondata; 19 | worker.postMessage(window.location.protocol.replace("http","ws")+window.location.host+window.location.pathname+"api/debug"); 20 | } 21 | }, 22 | analytics: function (stream, start, end) { 23 | var url="api/debug/analytics"; 24 | var args= { stream: stream, start: start, end:end } 25 | //console.log("GET "+url+"?"+JSON.stringify(args)); 26 | return $.get(url, args); 27 | }, 28 | }; 29 | -------------------------------------------------------------------------------- /content-provider/frontend/html/js/app-worker.js: -------------------------------------------------------------------------------- 1 | 2 | onmessage=function (e) { 3 | console.log("GET "+e.data); 4 | var web_socket=new WebSocket(e.data); 5 | web_socket.onclose=function() { 6 | console.log("websocket closed"); 7 | }; 8 | web_socket.onerror=function() { 9 | console.log("websocket error"); 10 | }; 11 | web_socket.onmessage=function (e) { 12 | postMessage(e.data); 13 | }; 14 | }; 15 | 16 | -------------------------------------------------------------------------------- /content-provider/frontend/html/js/app.js: -------------------------------------------------------------------------------- 1 | 2 | function spec(key) { 3 | var kvs=window.location.search.substring(1).split('&'); 4 | for (var i = 0; i < kvs.length; i++) { 5 | var kv=kvs[i].split('='); 6 | if (kv[0] === key) return kv[1] === undefined ? true : decodeURIComponent(kv[1]); 7 | } 8 | return false; 9 | } 10 | 11 | $(document).foundation(); 12 | $(window).bind("load", function () { 13 | if (spec("header")=="off") $("[ui-header]").hide(); 14 | if (spec("playlist")=="off") $("[playlist-section]").hide(); 15 | if (spec("videourl")=="off") $("#player [video-section] .input-group").hide(); 16 | if (spec("seq")) settings.user("u"+spec("seq")); 17 | if (spec("benchmark")) settings.algorithms(spec("benchmark")+" "); 18 | if (spec("window")) settings.analytics_window(spec("window")); 19 | 20 | $(".top-bar").trigger(":initpage"); 21 | $("#player").trigger(":update"); 22 | 23 | $("[debug-console]").trigger(":initpage"); 24 | $("[adstats-console]").trigger(":initpage"); 25 | $("[workloads-console]").trigger(":initpage"); 26 | $("[analytics-console]").trigger(":initpage"); 27 | $("[analyticPerf-console]").trigger(":initpage"); 28 | 29 | }); 30 | -------------------------------------------------------------------------------- /content-provider/frontend/main.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | from tornado import ioloop, web 4 | from tornado.options import define, options, parse_command_line 5 | from playlist import PlayListHandler 6 | from schedule import ScheduleHandler 7 | 8 | app = web.Application([ 9 | (r'/playlist',PlayListHandler), 10 | (r'/schedule/.*',ScheduleHandler), 11 | ]) 12 | 13 | if __name__ == "__main__": 14 | define("port", default=2222, help="the binding port", type=int) 15 | define("ip", default="127.0.0.1", help="the binding ip") 16 | parse_command_line() 17 | print("Listening to " + options.ip + ":" + str(options.port)) 18 | app.listen(options.port, address=options.ip) 19 | ioloop.IOLoop.instance().start() 20 | -------------------------------------------------------------------------------- /content-provider/frontend/playlist.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | from tornado import web, gen 4 | from tornado.httpclient import AsyncHTTPClient 5 | from os import listdir 6 | import json 7 | 8 | account_service_url="http://account-service:8080" 9 | archive_root="/var/www/archive" 10 | 11 | class PlayListHandler(web.RequestHandler): 12 | def __init__(self, app, request, **kwargs): 13 | super(PlayListHandler, self).__init__(app, request, **kwargs) 14 | self._cache={} 15 | 16 | def check_origin(self, origin): 17 | return True 18 | 19 | @gen.coroutine 20 | def get(self): 21 | name = str(self.get_argument("name")) 22 | 23 | if name not in self._cache: 24 | http_client=AsyncHTTPClient() 25 | r=yield http_client.fetch(account_service_url+"/acct?name="+name) 26 | self._cache[name]=json.loads(r.body) 27 | 28 | info=self._cache[name] 29 | if "subscription" not in info: 30 | self.set_status(404,"USER NOT FOUND") 31 | return 32 | print(info) 33 | 34 | try: 35 | streams=[s for s in listdir(archive_root) if s.endswith(".mp4")] 36 | except: 37 | self.set_status(404,"VIDEO NOT FOUND") 38 | return 39 | 40 | if info["subscription"] == "basic": 41 | streams=streams[0:3] 42 | print(streams) 43 | 44 | self.set_status(200,"OK") 45 | self.set_header("Content-Type", "application/json") 46 | types=[("hls",".m3u8"),("dash",".mpd")] 47 | self.write(json.dumps([{"name":t[0]+"-"+s,"url":t[0]+"/"+s+"/index"+t[1],"img":"thumbnail/"+s+".png"} for t in types for s in streams])) 48 | -------------------------------------------------------------------------------- /content-provider/frontend/schedule.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | from tornado import web, gen 4 | from os.path import isfile 5 | from messaging import Producer 6 | import time 7 | 8 | kafka_topic="content_provider_sched" 9 | dashls_root="/var/www/video" 10 | 11 | class ScheduleHandler(web.RequestHandler): 12 | def __init__(self, app, request, **kwargs): 13 | super(ScheduleHandler, self).__init__(app, request, **kwargs) 14 | 15 | def check_origin(self, origin): 16 | return True 17 | 18 | @gen.coroutine 19 | def get(self): 20 | stream=self.request.uri.replace("/schedule/","") 21 | 22 | # schedule producing the stream 23 | print("request received to process stream: "+stream, flush=True) 24 | producer=Producer() 25 | producer.send(kafka_topic,stream) 26 | producer.close() 27 | 28 | # wait until file is available, return it 29 | start_time=time.time() 30 | while time.time()-start_time<60: 31 | if isfile(dashls_root+"/"+stream): 32 | self.set_header('X-Accel-Redirect','/'+stream) 33 | self.set_status(200, "OK") 34 | return 35 | yield gen.sleep(0.5) 36 | 37 | # wait too long, skip this REST API 38 | self.set_status(503, "Request scheduled") 39 | -------------------------------------------------------------------------------- /content-provider/frontend/shell.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | IMAGE="ssai_content_provider_frontend" 4 | DIR=$(dirname $(readlink -f "$0")) 5 | OPTIONS=("--volume=${DIR}/../../volume/video/archive:/var/www/archive:ro" "--volume=${DIR}/../../volume/video/dash:/var/www/dash:ro" "--volume=${DIR}/../../volume/video/hls:/var/www/hls:ro") 6 | 7 | . "$DIR/../../script/shell.sh" 8 | -------------------------------------------------------------------------------- /content-provider/transcode/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | set(service "ssai_content_transcode") 2 | include("${CMAKE_SOURCE_DIR}/script/service.cmake") 3 | add_dependencies(build_${service} build_ssai_common) 4 | -------------------------------------------------------------------------------- /content-provider/transcode/Dockerfile: -------------------------------------------------------------------------------- 1 | 2 | FROM openvisualcloud/xeon-ubuntu1804-media-ffmpeg:20.7 3 | 4 | RUN DEBIAN_FRONTEND=noninteractive apt-get update && apt-get install -y -q --no-install-recommends nginx python3-tornado python3-urllib3 python3-requests python3-psutil python3-pip && rm -rf /var/lib/apt/lists/* && \ 5 | pip3 install 'kafka-python>=1.4.7' 'kazoo>=2.6.1' 6 | 7 | COPY --from=ssai_common /home/*.py /home/ 8 | COPY *.py /home/ 9 | CMD ["/bin/bash","-c","/home/workload.py tx-&/home/main.py"] 10 | 11 | #### 12 | ARG USER=docker 13 | ARG GROUP=docker 14 | ARG UID 15 | ARG GID 16 | ## must use ; here to ignore user exist status code 17 | RUN [ ${GID} -gt 0 ] && groupadd -f -g ${GID} ${GROUP}; \ 18 | [ ${UID} -gt 0 ] && useradd -d /home -g ${GID} -K UID_MAX=${UID} -K UID_MIN=${UID} ${USER}; \ 19 | chown -R ${UID}:${GID} /home 20 | USER ${UID} 21 | #### 22 | -------------------------------------------------------------------------------- /content-provider/transcode/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | IMAGE="ssai_content_transcode" 4 | DIR=$(dirname $(readlink -f "$0")) 5 | 6 | . "$DIR/../../script/build.sh" 7 | -------------------------------------------------------------------------------- /content-provider/transcode/shell.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | IMAGE="ssai_content_transcode" 4 | DIR=$(dirname $(readlink -f "$0")) 5 | OPTIONS=("--volume=${DIR}/../../volume/video/archive:/var/www/archive:ro" "--volume=${DIR}/../../volume/video/dash:/var/www/dash:rw" "--volume=${DIR}/../../volume/video/hls:/var/www/hls:rw") 6 | 7 | . "$DIR/../../script/shell.sh" 8 | -------------------------------------------------------------------------------- /content-provider/transcode/workload.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | from messaging import Producer 4 | import socket 5 | import datetime 6 | import psutil 7 | import time 8 | import json 9 | import sys 10 | 11 | kafka_topic="workloads" 12 | 13 | if __name__ == "__main__": 14 | prefix=""; 15 | if len(sys.argv)>1: prefix=sys.argv[1] 16 | instance=socket.gethostname()[0:3] 17 | machine=prefix+instance 18 | 19 | while True: 20 | try: 21 | p=Producer() 22 | while True: 23 | p.send(kafka_topic,json.dumps({ 24 | "time":datetime.datetime.utcnow().isoformat(), 25 | "machine": machine, 26 | "workload": psutil.cpu_percent(), 27 | })); 28 | time.sleep(1); 29 | p.close() 30 | except Exception as e: 31 | print(str(e)) 32 | time.sleep(2) 33 | -------------------------------------------------------------------------------- /deployment/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | include("${CMAKE_SOURCE_DIR}/script/scan-all.cmake") 2 | -------------------------------------------------------------------------------- /deployment/certificate/.dockerignore: -------------------------------------------------------------------------------- 1 | .rnd 2 | *.key 3 | *.pem 4 | *.crt 5 | -------------------------------------------------------------------------------- /deployment/certificate/.gitignore: -------------------------------------------------------------------------------- 1 | .rnd 2 | *.key 3 | *.crt 4 | -------------------------------------------------------------------------------- /deployment/certificate/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | set(service "ssai_self_certificate") 2 | include("${CMAKE_SOURCE_DIR}/script/service.cmake") 3 | add_custom_target(sign_certificate ${CMAKE_CURRENT_SOURCE_DIR}/self-sign.sh) 4 | -------------------------------------------------------------------------------- /deployment/certificate/Dockerfile: -------------------------------------------------------------------------------- 1 | 2 | FROM ubuntu:18.04 3 | RUN apt-get update && apt-get install -y openssh-server 4 | 5 | #### 6 | ARG USER=docker 7 | ARG GROUP=docker 8 | ARG UID 9 | ARG GID 10 | ## must use ; here to ignore user exist status code 11 | RUN [ ${GID} -gt 0 ] && groupadd -f -g ${GID} ${GROUP}; \ 12 | [ ${UID} -gt 0 ] && useradd -d /home -g ${GID} -K UID_MAX=${UID} -K UID_MIN=${UID} ${USER}; \ 13 | chown -R ${UID}:${GID} /home 14 | USER ${UID} 15 | #### 16 | -------------------------------------------------------------------------------- /deployment/certificate/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | IMAGE="ssai_self_certificate" 4 | DIR=$(dirname $(readlink -f "$0")) 5 | 6 | . "$DIR/../../script/build.sh" 7 | -------------------------------------------------------------------------------- /deployment/certificate/self-sign.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | IMAGE="ssai_self_certificate" 4 | DIR=$(dirname $(readlink -f "$0")) 5 | 6 | case "$(cat /proc/1/sched | head -n 1)" in 7 | *self-sign*) 8 | openssl req -x509 -nodes -days 30 -newkey rsa:4096 -keyout /home/self.key -out /home/self.crt << EOL 9 | US 10 | OR 11 | Portland 12 | Oregon 13 | Data Center Group 14 | Intel Corporation 15 | $1 16 | nobody@intel.com 17 | EOL 18 | chmod 640 "/home/self.key" 19 | chmod 644 "/home/self.crt" 20 | ;; 21 | *) 22 | OPTIONS=("--volume=$DIR:/home:rw") 23 | . "$DIR/../../script/shell.sh" /home/self-sign.sh $(hostname -f) 24 | ;; 25 | esac 26 | -------------------------------------------------------------------------------- /deployment/certificate/shell.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | IMAGE="ssai_self_certificate" 4 | DIR=$(dirname $(readlink -f "$0")) 5 | 6 | . "$DIR/../../script/shell.sh" 7 | -------------------------------------------------------------------------------- /deployment/docker-swarm/.gitignore: -------------------------------------------------------------------------------- 1 | docker-compose.yml 2 | *.key 3 | *.crt 4 | *.pem 5 | .rnd 6 | -------------------------------------------------------------------------------- /deployment/docker-swarm/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | set(service "docker_swarm") 2 | include("${CMAKE_SOURCE_DIR}/script/service.cmake") 3 | include("${CMAKE_SOURCE_DIR}/script/deployment.cmake") 4 | add_dependencies(start_${service} sign_certificate) 5 | -------------------------------------------------------------------------------- /deployment/docker-swarm/README.md: -------------------------------------------------------------------------------- 1 | 2 | ### Docker Swam Single Machine Deployment 3 | 4 | Initialize docker swarm if you have not: 5 | ``` 6 | sudo docker swarm init 7 | ``` 8 | Then start/stop services as follows: 9 | ``` 10 | make start_docker_swarm 11 | make stop_docker_swarm 12 | ``` 13 | 14 | ### Docker Swam Multiple Nodes Deployment 15 | 16 | Follow the [instructions](https://docs.docker.com/engine/swarm/swarm-tutorial/create-swarm) to create a swarm. Then setup each swarm node as follows: 17 | - All swarm nodes must have the same user (uid) and group (gid). 18 | - Setup NFS to share the [volume](../../volume) directory. 19 | - Each swarm node must mount the [volume](../../volume) directory at the same absolute path. 20 | 21 | Finally, start/stop services as follows: 22 | ``` 23 | make update # optional for private registry 24 | make start_docker_swarm 25 | make stop_docker_swarm 26 | ``` 27 | 28 | --- 29 | 30 | The `make update` command uploads the sample images to each worker node. If you prefer to use a private docker registry, configure the sample, `cmake -DREGISTRY= ..`, to push the sample images to the private docker registry after each build. 31 | 32 | --- 33 | 34 | ### Docker Swam Deployment with Intel VCAC-A 35 | 36 | Initialize Intel VCAC-A if you have not: 37 | 38 | ``` 39 | script/setup-vcac-a.sh 40 | ``` 41 | 42 | Then start/stop services as follows: 43 | ``` 44 | make update # optional for private registry 45 | make start_docker_swarm 46 | make stop_docker_swarm 47 | ``` 48 | 49 | ### See Also: 50 | 51 | - [Build Configuration](../../doc/cmake.md) 52 | - [Utility Script](../../doc/script.md) 53 | 54 | -------------------------------------------------------------------------------- /deployment/docker-swarm/account.m4: -------------------------------------------------------------------------------- 1 | 2 | account-service: 3 | image: defn(`REGISTRY_PREFIX')ssai_account_service:latest 4 | environment: 5 | NO_PROXY: "*" 6 | no_proxy: "*" 7 | networks: 8 | - appnet 9 | deploy: 10 | replicas: 1 11 | placement: 12 | constraints: 13 | - node.labels.vcac_zone!=yes 14 | 15 | -------------------------------------------------------------------------------- /deployment/docker-swarm/ad-content.m4: -------------------------------------------------------------------------------- 1 | 2 | ad-content-service: 3 | image: defn(`REGISTRY_PREFIX')ssai_ad_content_frontend:latest 4 | environment: 5 | NO_PROXY: "*" 6 | no_proxy: "*" 7 | volumes: 8 | - ${AD_ARCHIVE_VOLUME}:/var/www/archive:ro 9 | networks: 10 | - appnet 11 | deploy: 12 | replicas: 1 13 | placement: 14 | constraints: 15 | - node.role==manager 16 | - node.labels.vcac_zone!=yes 17 | 18 | -------------------------------------------------------------------------------- /deployment/docker-swarm/ad-decision.m4: -------------------------------------------------------------------------------- 1 | 2 | ad-decision-service: 3 | image: defn(`REGISTRY_PREFIX')ssai_ad_decision_frontend:latest 4 | environment: 5 | NO_PROXY: "*" 6 | no_proxy: "*" 7 | networks: 8 | - appnet 9 | deploy: 10 | replicas: 1 11 | placement: 12 | constraints: 13 | - node.labels.vcac_zone!=yes 14 | 15 | -------------------------------------------------------------------------------- /deployment/docker-swarm/ad-insertion.m4: -------------------------------------------------------------------------------- 1 | 2 | ad-insertion-service: 3 | image: defn(`REGISTRY_PREFIX')ssai_ad_insertion_frontend:latest 4 | volumes: 5 | - ${AD_CACHE_VOLUME}:/var/www/adinsert:ro 6 | - ${AD_STATIC_VOLUME}:/var/www/adstatic:ro 7 | - /etc/localtime:/etc/localtime:ro 8 | environment: 9 | AD_INTERVALS: 8 10 | AD_DURATION: 5 11 | AD_SEGMENT: 5 12 | AD_BACKOFF: 3 13 | AD_BENCH_MODE: 0 14 | AD_ANALYTIC_AHEAD: 3 15 | AD_TRANSCODE_AHEAD: 2 16 | EVERY_NTH_FRAME: 6 17 | NO_PROXY: "*" 18 | no_proxy: "*" 19 | networks: 20 | - appnet 21 | deploy: 22 | replicas: 1 23 | placement: 24 | constraints: 25 | - node.role==manager 26 | - node.labels.vcac_zone!=yes 27 | -------------------------------------------------------------------------------- /deployment/docker-swarm/ad-transcode.m4: -------------------------------------------------------------------------------- 1 | 2 | ad-transcode: 3 | image: defn(`REGISTRY_PREFIX')ssai_ad_transcode:latest 4 | environment: 5 | NO_PROXY: "*" 6 | no_proxy: "*" 7 | volumes: 8 | - ${AD_CACHE_VOLUME}:/var/www/adinsert:rw 9 | - ${AD_SEGMENT_VOLUME}:/var/www/adsegment:ro 10 | - /etc/localtime:/etc/localtime:ro 11 | networks: 12 | - appnet 13 | deploy: 14 | replicas: defn(`NTRANSCODES') 15 | placement: 16 | constraints: 17 | - node.role==manager 18 | - node.labels.vcac_zone!=yes 19 | -------------------------------------------------------------------------------- /deployment/docker-swarm/analytics.m4: -------------------------------------------------------------------------------- 1 | include(platform.m4) 2 | 3 | analytics: 4 | image: PLATFORM_IMAGE(defn(`REGISTRY_PREFIX')`ssai_analytics_'defn(`FRAMEWORK')`_'defn(`PLATFORM_SUFFIX'):latest) 5 | environment: 6 | PLATFORM_ENV(``NETWORK_PREFERENCE''): "{\"defn(`PLATFORM_DEVICE')\":\"defn(`NETWORK_PREFERENCE')\"}" 7 | PLATFORM_ENV(VA_PRE): "defn(`PLATFORM')-" 8 | PLATFORM_ENV(NO_PROXY): "*" 9 | PLATFORM_ENV(no_proxy): "*" 10 | PLATFORM_ENV_EXTRA()dnl 11 | volumes: 12 | - /etc/localtime:/etc/localtime:ro 13 | PLATFORM_VOLUME_EXTRA()dnl 14 | networks: 15 | - appnet 16 | deploy: 17 | replicas: defn(`NANALYTICS') 18 | placement: 19 | constraints: 20 | - PLATFORM_ZONE() 21 | 22 | -------------------------------------------------------------------------------- /deployment/docker-swarm/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | DIR=$(dirname $(readlink -f "$0")) 4 | PLATFORM="${1:-Xeon}" 5 | FRAMEWORK="${2:-gst}" 6 | NANALYTICS="${3:-1}" 7 | NTRANSCODES="${4:-1}" 8 | MINRESOLUTION="${5:-360p}" 9 | NETWORK="${6:FP32}" 10 | REGISTRY="$7" 11 | 12 | if test -f "${DIR}/docker-compose.yml.m4"; then 13 | echo "Generating docker-compose.yml" 14 | m4 -DREGISTRY_PREFIX=${REGISTRY} -DPLATFORM=${PLATFORM} -DFRAMEWORK=${FRAMEWORK} -DNANALYTICS=${NANALYTICS} -DNTRANSCODES=${NTRANSCODES} -DMINRESOLUTION=${MINRESOLUTION} -DNETWORK_PREFERENCE=${NETWORK} -I "${DIR}" "${DIR}/docker-compose.yml.m4" > "${DIR}/docker-compose.yml" 15 | fi 16 | 17 | -------------------------------------------------------------------------------- /deployment/docker-swarm/cdn.m4: -------------------------------------------------------------------------------- 1 | 2 | cdn-service: 3 | image: defn(`REGISTRY_PREFIX')ssai_cdn_service:latest 4 | ports: 5 | - "443:8443" 6 | environment: 7 | NO_PROXY: "*" 8 | no_proxy: "*" 9 | networks: 10 | - appnet 11 | deploy: 12 | replicas: 1 13 | placement: 14 | constraints: 15 | - node.role==manager 16 | - node.labels.vcac_zone!=yes 17 | secrets: 18 | - source: self_crt 19 | target: /var/run/secrets/self.crt 20 | uid: ${USER_ID} 21 | gid: ${GROUP_ID} 22 | mode: 0444 23 | - source: self_key 24 | target: /var/run/secrets/self.key 25 | uid: ${USER_ID} 26 | gid: ${GROUP_ID} 27 | mode: 0440 28 | 29 | -------------------------------------------------------------------------------- /deployment/docker-swarm/content-provider.m4: -------------------------------------------------------------------------------- 1 | 2 | content-provider-service: 3 | image: defn(`REGISTRY_PREFIX')ssai_content_provider_frontend:latest 4 | environment: 5 | NO_PROXY: "*" 6 | no_proxy: "*" 7 | volumes: 8 | - ${VIDEO_ARCHIVE_VOLUME}:/var/www/archive:ro 9 | - ${VIDEO_CACHE_VOLUME}:/var/www/video:ro 10 | networks: 11 | - appnet 12 | deploy: 13 | replicas: 1 14 | placement: 15 | constraints: 16 | - node.role==manager 17 | - node.labels.vcac_zone!=yes 18 | -------------------------------------------------------------------------------- /deployment/docker-swarm/content-transcode.m4: -------------------------------------------------------------------------------- 1 | 2 | content-transcode: 3 | image: defn(`REGISTRY_PREFIX')ssai_content_transcode:latest 4 | environment: 5 | NO_PROXY: "*" 6 | no_proxy: "*" 7 | volumes: 8 | - ${VIDEO_ARCHIVE_VOLUME}:/var/www/archive:ro 9 | - ${VIDEO_CACHE_VOLUME}:/var/www/video:rw 10 | networks: 11 | - appnet 12 | deploy: 13 | replicas: defn(`NTRANSCODES') 14 | placement: 15 | constraints: 16 | - node.role==manager 17 | - node.labels.vcac_zone!=yes 18 | -------------------------------------------------------------------------------- /deployment/docker-swarm/database.m4: -------------------------------------------------------------------------------- 1 | 2 | database-service: 3 | image: docker.elastic.co/elasticsearch/elasticsearch-oss:6.8.1 4 | environment: 5 | - 'discovery.type=single-node' 6 | - 'NO_PROXY=*' 7 | - 'no_proxy=*' 8 | networks: 9 | - appnet 10 | user: elasticsearch 11 | deploy: 12 | replicas: 1 13 | placement: 14 | constraints: 15 | - node.labels.vcac_zone!=yes 16 | 17 | -------------------------------------------------------------------------------- /deployment/docker-swarm/docker-compose.yml.m4: -------------------------------------------------------------------------------- 1 | 2 | version: "3.7" 3 | 4 | secrets: 5 | cdn-ssl-key: 6 | external: true 7 | cdn-ssl-key: 8 | external: true 9 | 10 | services: 11 | 12 | include(zookeeper.m4) 13 | include(kafka.m4) 14 | include(database.m4) 15 | include(account.m4) 16 | include(ad-decision.m4) 17 | include(ad-content.m4) 18 | include(ad-insertion.m4) 19 | include(kafka2db.m4) 20 | include(cdn.m4) 21 | include(content-provider.m4) 22 | include(content-transcode.m4) 23 | include(ad-transcode.m4) 24 | include(analytics.m4) 25 | include(secret.m4) 26 | include(network.m4) 27 | -------------------------------------------------------------------------------- /deployment/docker-swarm/kafka.m4: -------------------------------------------------------------------------------- 1 | kafka-service: 2 | image: defn(`REGISTRY_PREFIX')ssai_kafka:latest 3 | environment: 4 | KAFKA_BROKER_ID: 1 5 | KAFKA_ZOOKEEPER_CONNECT: zookeeper-service:2181 6 | KAFKA_LISTENERS: PLAINTEXT://:9092 7 | KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka-service:9092 8 | KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT 9 | KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT 10 | KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 11 | KAFKA_DEFAULT_REPLICATION_FACTOR: 1 12 | KAFKA_AUTO_CREATE_TOPICS_ENABLE: 'true' 13 | KAFKA_NUM_PARTITIONS: 8 14 | KAFKA_LOG_RETENTION_MINUTES: 30 15 | KAFKA_HEAP_OPTS: '-Xmx1024m -Xms1024m' 16 | KAFKA_LOG4J_ROOT_LOGLEVEL: 'ERROR' 17 | networks: 18 | - appnet 19 | user: kafka 20 | deploy: 21 | replicas: 1 22 | placement: 23 | constraints: 24 | - node.labels.vcac_zone!=yes 25 | 26 | -------------------------------------------------------------------------------- /deployment/docker-swarm/kafka2db.m4: -------------------------------------------------------------------------------- 1 | 2 | kafka2db: 3 | image: defn(`REGISTRY_PREFIX')ssai_kafka2db:latest 4 | environment: 5 | NO_PROXY: "*" 6 | no_proxy: "*" 7 | networks: 8 | - appnet 9 | deploy: 10 | replicas: 1 11 | placement: 12 | constraints: 13 | - node.labels.vcac_zone!=yes 14 | 15 | -------------------------------------------------------------------------------- /deployment/docker-swarm/network.m4: -------------------------------------------------------------------------------- 1 | networks: 2 | appnet: 3 | driver: overlay 4 | attachable: true 5 | -------------------------------------------------------------------------------- /deployment/docker-swarm/platform.m4: -------------------------------------------------------------------------------- 1 | define(`PLATFORM_SUFFIX',translit(defn(`PLATFORM'),`A-Z',`a-z'))dnl 2 | define(`PLATFORM_IMAGE',`ifelse(defn(`PLATFORM'),`VCAC-A',defn(`REGISTRY_PREFIX')vcac-container-launcher:latest,define(`_PLATFORM_IMAGE',$1)$1)')dnl 3 | define(`PLATFORM_VOLUME_EXTRA',ifelse(defn(`PLATFORM'),`VCAC-A',dnl 4 | - /var/run/docker.sock:/var/run/docker.sock 5 | ))dnl 6 | define(`PLATFORM_ENV_EXTRA',`ifelse(defn(`PLATFORM'),`VCAC-A',`dnl 7 | VCAC_IMAGE: defn(`_PLATFORM_IMAGE') 8 | ')')dnl 9 | define(`PLATFORM_ENV',ifelse(defn(`PLATFORM'),`VCAC-A',`VCAC_'$1,$1))dnl 10 | define(`PLATFORM_ZONE',`node.labels.vcac_zone ifelse(defn(`PLATFORM'),`VCAC-A',`==',`!=') yes')dnl 11 | define(`PLATFORM_DEVICE',ifelse(defn(`PLATFORM'),`VCAC-A',`HDDL',`CPU'))dnl 12 | -------------------------------------------------------------------------------- /deployment/docker-swarm/secret.m4: -------------------------------------------------------------------------------- 1 | secrets: 2 | self_key: 3 | file: ../certificate/self.key 4 | self_crt: 5 | file: ../certificate/self.crt 6 | -------------------------------------------------------------------------------- /deployment/docker-swarm/start.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | DIR=$(dirname $(readlink -f "$0")) 4 | export AD_ARCHIVE_VOLUME=$(readlink -f "$DIR/../../volume/ad/archive") 5 | export AD_CACHE_VOLUME=$(readlink -f "$DIR/../../volume/ad/cache") 6 | export AD_SEGMENT_VOLUME=$(readlink -f "$DIR/../../volume/ad/segment") 7 | export AD_STATIC_VOLUME=$(readlink -f "$DIR/../../volume/ad/static") 8 | export VIDEO_ARCHIVE_VOLUME=$(readlink -f "$DIR/../../volume/video/archive") 9 | export VIDEO_CACHE_VOLUME=$(readlink -f "$DIR/../../volume/video/cache") 10 | 11 | docker container prune -f 12 | docker volume prune -f 13 | docker network prune -f 14 | 15 | for mode in dash hls; do 16 | rm -rf "${AD_CACHE_VOLUME}/$mode" 17 | mkdir -p "${AD_CACHE_VOLUME}/$mode" 18 | mkdir -p "${AD_SEGMENT_VOLUME}/$mode" 19 | mkdir -p "${VIDEO_CACHE_VOLUME}/$mode" 20 | done 21 | 22 | yml="$DIR/docker-compose.$(hostname).yml" 23 | test -f "$yml" || yml="$DIR/docker-compose.yml" 24 | 25 | export USER_ID=$(id -u) 26 | export GROUP_ID=$(id -g) 27 | shift 28 | . "$DIR/build.sh" 29 | docker stack deploy -c "$yml" adi 30 | -------------------------------------------------------------------------------- /deployment/docker-swarm/stop.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | DIR=$(dirname $(readlink -f "$0")) 4 | 5 | yml="$DIR/docker-compose.$(hostname).yml" 6 | test -f "$yml" || yml="$DIR/docker-compose.yml" 7 | 8 | docker stack services adi 9 | echo "Shutting down stack adi..." 10 | while test -z "$(docker stack rm adi 2>&1 | grep 'Nothing found in stack')"; do 11 | sleep 2 12 | done 13 | 14 | docker container prune -f 15 | docker volume prune -f 16 | docker network prune -f 17 | -------------------------------------------------------------------------------- /deployment/docker-swarm/zookeeper.m4: -------------------------------------------------------------------------------- 1 | 2 | zookeeper-service: 3 | image: zookeeper:3.5.6 4 | environment: 5 | ZOO_TICK_TIME: '10000' 6 | ZOO_MAX_CLIENT_CNXNS: '160000' 7 | ZOO_AUTOPURGE_PURGEINTERVAL: '1' 8 | ZOO_LOG4J_PROP: 'ERROR' 9 | networks: 10 | - appnet 11 | user: zookeeper 12 | deploy: 13 | replicas: 1 14 | placement: 15 | constraints: 16 | - node.labels.vcac_zone!=yes 17 | 18 | -------------------------------------------------------------------------------- /deployment/kubernetes/.gitignore: -------------------------------------------------------------------------------- 1 | *-pv.yaml 2 | yaml/*.yaml 3 | -------------------------------------------------------------------------------- /deployment/kubernetes/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | set(service "pv") 2 | include("${CMAKE_SOURCE_DIR}/script/service.cmake") 3 | add_custom_target(volume ${CMAKE_CURRENT_SOURCE_DIR}/mkvolume.sh) 4 | add_custom_target(create_pv ${CMAKE_CURRENT_SOURCE_DIR}/start.sh) 5 | add_custom_target(delete_pv ${CMAKE_CURRENT_SOURCE_DIR}/stop.sh) 6 | include("${CMAKE_SOURCE_DIR}/script/scan-all.cmake") 7 | 8 | # add cleanup files 9 | file(GLOB m4files "${CMAKE_CURRENT_SOURCE_DIR}/*.yaml.m4") 10 | foreach(m4file ${m4files}) 11 | string(REPLACE ".yaml.m4" ".yaml" yamlfile "${m4file}") 12 | set_property(DIRECTORY APPEND PROPERTY ADDITIONAL_MAKE_CLEAN_FILES "${yamlfile}") 13 | endforeach(m4file) 14 | 15 | -------------------------------------------------------------------------------- /deployment/kubernetes/README.md: -------------------------------------------------------------------------------- 1 | 2 | The Ad-Insertion sample can be deployed with Kubernetes. 3 | 4 | ### Kubernetes Setup 5 | 6 | 1. Follow the [instructions](https://kubernetes.io/docs/setup) to setup your Kubernetes cluster. 7 | 2. All cluster nodes must have the same user (uid) and group (gid). 8 | 3. Setup password-less access from the Kubernetes controller to each worker node (required by `make update` and `make volume`): 9 | 10 | ``` 11 | ssh-keygen 12 | ssh-copy-id 13 | ``` 14 | 15 | 4. Start/stop services as follows: 16 | 17 | ``` 18 | mkdir build 19 | cd build 20 | cmake .. 21 | make 22 | make update # optional for private registry 23 | make volume 24 | make start_kubernetes 25 | make stop_kubernetes 26 | ``` 27 | 28 | --- 29 | 30 | - The `make update` command uploads the sample images to each worker node. If you prefer to use a private docker registry, configure the sample, `cmake -DREGISTRY= ..`, to push the sample images to the private registry after each build. 31 | - The `make volume` command creates local persistent volumes under the `/tmp` directory of the first two Kubernetes workers. This is a temporary solution for quick sample deployment. For scalability beyond a two-node cluster, consider rewriting the persistent volume scripts. 32 | 33 | --- 34 | 35 | ### See Also: 36 | 37 | - [Build Configuration](../../doc/cmake.md) 38 | - [Utility Script](../../doc/script.md) 39 | - [Helm Chart](helm/adi/README.md) 40 | -------------------------------------------------------------------------------- /deployment/kubernetes/ad-archive-pv.yaml.m4: -------------------------------------------------------------------------------- 1 | 2 | apiVersion: storage.k8s.io/v1 3 | kind: StorageClass 4 | metadata: 5 | name: ad-archive 6 | provisioner: kubernetes.io/no-provisioner 7 | volumeBindingMode: WaitForFirstConsumer 8 | 9 | --- 10 | 11 | apiVersion: v1 12 | kind: PersistentVolume 13 | metadata: 14 | name: ad-archive 15 | spec: 16 | capacity: 17 | storage: defn(`AD_ARCHIVE_VOLUME_SIZE')Gi 18 | accessModes: 19 | - ReadOnlyMany 20 | persistentVolumeReclaimPolicy: Retain 21 | storageClassName: ad-archive 22 | local: 23 | path: defn(`AD_ARCHIVE_VOLUME_PATH') 24 | nodeAffinity: 25 | required: 26 | nodeSelectorTerms: 27 | - matchExpressions: 28 | - key: kubernetes.io/hostname 29 | operator: In 30 | values: 31 | - "defn(`AD_ARCHIVE_VOLUME_HOST')" 32 | 33 | -------------------------------------------------------------------------------- /deployment/kubernetes/ad-cache-pv.yaml.m4: -------------------------------------------------------------------------------- 1 | 2 | apiVersion: storage.k8s.io/v1 3 | kind: StorageClass 4 | metadata: 5 | name: ad-cache 6 | provisioner: kubernetes.io/no-provisioner 7 | volumeBindingMode: WaitForFirstConsumer 8 | 9 | --- 10 | 11 | apiVersion: v1 12 | kind: PersistentVolume 13 | metadata: 14 | name: ad-cache 15 | spec: 16 | capacity: 17 | storage: defn(`AD_CACHE_VOLUME_SIZE')Gi 18 | accessModes: 19 | - ReadWriteMany 20 | persistentVolumeReclaimPolicy: Retain 21 | storageClassName: ad-cache 22 | local: 23 | path: defn(`AD_CACHE_VOLUME_PATH') 24 | nodeAffinity: 25 | required: 26 | nodeSelectorTerms: 27 | - matchExpressions: 28 | - key: kubernetes.io/hostname 29 | operator: In 30 | values: 31 | - "defn(`AD_CACHE_VOLUME_HOST')" 32 | 33 | -------------------------------------------------------------------------------- /deployment/kubernetes/ad-segment-pv.yaml.m4: -------------------------------------------------------------------------------- 1 | 2 | apiVersion: storage.k8s.io/v1 3 | kind: StorageClass 4 | metadata: 5 | name: ad-segment 6 | provisioner: kubernetes.io/no-provisioner 7 | volumeBindingMode: WaitForFirstConsumer 8 | 9 | --- 10 | 11 | apiVersion: v1 12 | kind: PersistentVolume 13 | metadata: 14 | name: ad-segment 15 | spec: 16 | capacity: 17 | storage: defn(`AD_SEGMENT_VOLUME_SIZE')Gi 18 | accessModes: 19 | - ReadWriteMany 20 | persistentVolumeReclaimPolicy: Retain 21 | storageClassName: ad-segment 22 | local: 23 | path: defn(`AD_SEGMENT_VOLUME_PATH') 24 | nodeAffinity: 25 | required: 26 | nodeSelectorTerms: 27 | - matchExpressions: 28 | - key: kubernetes.io/hostname 29 | operator: In 30 | values: 31 | - "defn(`AD_SEGMENT_VOLUME_HOST')" 32 | 33 | -------------------------------------------------------------------------------- /deployment/kubernetes/ad-static-pv.yaml.m4: -------------------------------------------------------------------------------- 1 | 2 | apiVersion: storage.k8s.io/v1 3 | kind: StorageClass 4 | metadata: 5 | name: ad-static 6 | provisioner: kubernetes.io/no-provisioner 7 | volumeBindingMode: WaitForFirstConsumer 8 | 9 | --- 10 | 11 | apiVersion: v1 12 | kind: PersistentVolume 13 | metadata: 14 | name: ad-static 15 | spec: 16 | capacity: 17 | storage: defn(`AD_STATIC_VOLUME_SIZE')Gi 18 | accessModes: 19 | - ReadOnlyMany 20 | persistentVolumeReclaimPolicy: Retain 21 | storageClassName: ad-static 22 | local: 23 | path: defn(`AD_STATIC_VOLUME_PATH') 24 | nodeAffinity: 25 | required: 26 | nodeSelectorTerms: 27 | - matchExpressions: 28 | - key: kubernetes.io/hostname 29 | operator: In 30 | values: 31 | - "defn(`AD_STATIC_VOLUME_HOST')" 32 | 33 | -------------------------------------------------------------------------------- /deployment/kubernetes/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | DIR=$(dirname $(readlink -f "$0")) 4 | 5 | rm -rf "$DIR/../../volume/ad/cache" 6 | mkdir -p "$DIR/../../volume/ad/cache/dash" "$DIR/../../volume/ad/cache/hls" 7 | mkdir -p "$DIR/../../volume/ad/segment/dash" "$DIR/../../volume/ad/segment/hls" 8 | mkdir -p "$DIR/../../volume/video/cache/dash" "$DIR/../../volume/video/cache/hls" 9 | 10 | if [ -x /usr/bin/kubectl ] || [ -x /usr/local/bin/kubectl ]; then 11 | # list all workers 12 | hosts=($(kubectl get node -l vcac-zone!=yes -o custom-columns=NAME:metadata.name,STATUS:status.conditions[-1].type,TAINT:spec.taints | grep " Ready " | grep -v "NoSchedule" | cut -f1 -d' ')) 13 | if test ${#hosts[@]} -eq 0; then 14 | printf "\nFailed to locate worker node(s) for shared storage\n\n" 15 | exit 1 16 | elif test ${#hosts[@]} -lt 2; then 17 | hosts=(${hosts[0]} ${hosts[0]}) 18 | fi 19 | 20 | echo "Generating persistent volume scripts" 21 | . "$DIR/volume-info.sh" "${hosts[@]}" 22 | find "${DIR}" -maxdepth 1 -name "*.yaml" -exec rm -rf "{}" \; 23 | for template in $(find "${DIR}" -maxdepth 1 -name "*.yaml.m4" -print); do 24 | m4 $(env | grep _VOLUME_ | sed 's/^/-D/') "${template}" > "${template/.m4/}" 25 | done 26 | fi 27 | -------------------------------------------------------------------------------- /deployment/kubernetes/helm/.gitignore: -------------------------------------------------------------------------------- 1 | adi/values.yaml 2 | -------------------------------------------------------------------------------- /deployment/kubernetes/helm/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | set(service "helm") 2 | include("${CMAKE_SOURCE_DIR}/script/service.cmake") 3 | include("${CMAKE_SOURCE_DIR}/script/deployment.cmake") 4 | 5 | # add cleanup files 6 | set_property(DIRECTORY APPEND PROPERTY ADDITIONAL_MAKE_CLEAN_FILES "${CMAKE_CURRENT_SOURCE_DIR}/adi/values.yaml") 7 | 8 | add_dependencies(start_${service} create_pv sign_certificate) 9 | add_dependencies(stop_${service} delete_pv) 10 | -------------------------------------------------------------------------------- /deployment/kubernetes/helm/adi/Chart.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v2 2 | appVersion: 0.1.0 3 | description: A Helm chart for the Ad-Insertion sample 4 | home: https://github.com/OpenVisualCloud/AD-Insertion-Sample 5 | icon: https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/master/content-provider/frontend/html/favicon.ico 6 | name: AD-Insertion-Sample 7 | sources: 8 | - https://github.com/OpenVisualCloud/AD-Insertion-Sample 9 | type: application 10 | version: 0.1.0 11 | -------------------------------------------------------------------------------- /deployment/kubernetes/helm/adi/templates/account.yaml: -------------------------------------------------------------------------------- 1 | 2 | apiVersion: v1 3 | kind: Service 4 | metadata: 5 | name: account-service 6 | labels: 7 | app: account 8 | spec: 9 | ports: 10 | - port: 8080 11 | protocol: TCP 12 | selector: 13 | app: account 14 | 15 | --- 16 | 17 | apiVersion: apps/v1 18 | kind: Deployment 19 | metadata: 20 | name: account 21 | labels: 22 | app: account 23 | spec: 24 | replicas: 1 25 | selector: 26 | matchLabels: 27 | app: account 28 | template: 29 | metadata: 30 | labels: 31 | app: account 32 | spec: 33 | enableServiceLinks: false 34 | containers: 35 | - name: account 36 | image: {{ $.Values.registryPrefix }}ssai_account_service:latest 37 | imagePullPolicy: IfNotPresent 38 | ports: 39 | - containerPort: 8080 40 | env: 41 | - name: NO_PROXY 42 | value: "*" 43 | - name: no_proxy 44 | value: "*" 45 | {{ include "adi.platform.node-selector" $ }} 46 | -------------------------------------------------------------------------------- /deployment/kubernetes/helm/adi/templates/ad-archive-pvc.yaml: -------------------------------------------------------------------------------- 1 | 2 | apiVersion: v1 3 | kind: PersistentVolumeClaim 4 | metadata: 5 | name: ad-archive 6 | spec: 7 | accessModes: 8 | - ReadOnlyMany 9 | storageClassName: ad-archive 10 | resources: 11 | requests: 12 | storage: {{ .Values.pvc.ad.archive }} 13 | 14 | -------------------------------------------------------------------------------- /deployment/kubernetes/helm/adi/templates/ad-cache-pvc.yaml: -------------------------------------------------------------------------------- 1 | 2 | apiVersion: v1 3 | kind: PersistentVolumeClaim 4 | metadata: 5 | name: ad-cache 6 | spec: 7 | accessModes: 8 | - ReadWriteMany 9 | storageClassName: ad-cache 10 | resources: 11 | requests: 12 | storage: {{ .Values.pvc.ad.cache }} 13 | 14 | -------------------------------------------------------------------------------- /deployment/kubernetes/helm/adi/templates/ad-content.yaml: -------------------------------------------------------------------------------- 1 | 2 | apiVersion: v1 3 | kind: Service 4 | metadata: 5 | name: ad-content-service 6 | labels: 7 | app: ad-content 8 | spec: 9 | ports: 10 | - port: 8080 11 | protocol: TCP 12 | selector: 13 | app: ad-content 14 | 15 | --- 16 | 17 | apiVersion: apps/v1 18 | kind: Deployment 19 | metadata: 20 | name: ad-content 21 | labels: 22 | app: ad-content 23 | spec: 24 | replicas: 1 25 | selector: 26 | matchLabels: 27 | app: ad-content 28 | template: 29 | metadata: 30 | labels: 31 | app: ad-content 32 | spec: 33 | enableServiceLinks: false 34 | containers: 35 | - name: ad-content 36 | image: {{ $.Values.registryPrefix }}ssai_ad_content_frontend:latest 37 | imagePullPolicy: IfNotPresent 38 | ports: 39 | - containerPort: 8080 40 | env: 41 | - name: NO_PROXY 42 | value: "*" 43 | - name: no_proxy 44 | value: "*" 45 | volumeMounts: 46 | - mountPath: /var/www/archive 47 | name: ad-archive 48 | readOnly: true 49 | volumes: 50 | - name: ad-archive 51 | persistentVolumeClaim: 52 | claimName: ad-archive 53 | {{ include "adi.platform.node-selector" $ }} 54 | -------------------------------------------------------------------------------- /deployment/kubernetes/helm/adi/templates/ad-decision.yaml: -------------------------------------------------------------------------------- 1 | 2 | apiVersion: v1 3 | kind: Service 4 | metadata: 5 | name: ad-decision-service 6 | labels: 7 | app: ad-decision 8 | spec: 9 | ports: 10 | - port: 8080 11 | protocol: TCP 12 | selector: 13 | app: ad-decision 14 | 15 | --- 16 | 17 | apiVersion: apps/v1 18 | kind: Deployment 19 | metadata: 20 | name: ad-decision 21 | labels: 22 | app: ad-decision 23 | spec: 24 | replicas: 1 25 | selector: 26 | matchLabels: 27 | app: ad-decision 28 | template: 29 | metadata: 30 | labels: 31 | app: ad-decision 32 | spec: 33 | enableServiceLinks: false 34 | containers: 35 | - name: ad-decision 36 | image: {{ $.Values.registryPrefix }}ssai_ad_decision_frontend:latest 37 | imagePullPolicy: IfNotPresent 38 | ports: 39 | - containerPort: 8080 40 | env: 41 | - name: NO_PROXY 42 | value: "*" 43 | - name: no_proxy 44 | value: "*" 45 | {{ include "adi.platform.node-selector" $ }} 46 | -------------------------------------------------------------------------------- /deployment/kubernetes/helm/adi/templates/ad-segment-pvc.yaml: -------------------------------------------------------------------------------- 1 | 2 | apiVersion: v1 3 | kind: PersistentVolumeClaim 4 | metadata: 5 | name: ad-segment 6 | spec: 7 | accessModes: 8 | - ReadWriteMany 9 | storageClassName: ad-segment 10 | resources: 11 | requests: 12 | storage: {{ .Values.pvc.ad.segment }} 13 | -------------------------------------------------------------------------------- /deployment/kubernetes/helm/adi/templates/ad-static-pvc.yaml: -------------------------------------------------------------------------------- 1 | 2 | apiVersion: v1 3 | kind: PersistentVolumeClaim 4 | metadata: 5 | name: ad-static 6 | spec: 7 | accessModes: 8 | - ReadOnlyMany 9 | storageClassName: ad-static 10 | resources: 11 | requests: 12 | storage: {{ .Values.pvc.ad.static }} 13 | -------------------------------------------------------------------------------- /deployment/kubernetes/helm/adi/templates/ad-transcode.yaml: -------------------------------------------------------------------------------- 1 | 2 | apiVersion: apps/v1 3 | kind: Deployment 4 | metadata: 5 | name: ad-transcode 6 | labels: 7 | app: ad-transcode 8 | spec: 9 | replicas: {{ .Values.ntranscodes }} 10 | selector: 11 | matchLabels: 12 | app: ad-transcode 13 | template: 14 | metadata: 15 | labels: 16 | app: ad-transcode 17 | spec: 18 | enableServiceLinks: false 19 | securityContext: 20 | runAsUser: {{ .Values.userId }} 21 | runAsGroup: {{ .Values.groupId }} 22 | fsGroup: {{ .Values.groupId }} 23 | containers: 24 | - name: ad-transcode 25 | image: {{ $.Values.registryPrefix }}ssai_ad_transcode:latest 26 | imagePullPolicy: IfNotPresent 27 | env: 28 | - name: NO_PROXY 29 | value: "*" 30 | - name: no_proxy 31 | value: "*" 32 | volumeMounts: 33 | - mountPath: /var/www/adinsert 34 | name: ad-cache 35 | - mountPath: /var/www/adsegment 36 | name: ad-segment 37 | readOnly: true 38 | volumes: 39 | - name: ad-cache 40 | persistentVolumeClaim: 41 | claimName: ad-cache 42 | - name: ad-segment 43 | persistentVolumeClaim: 44 | claimName: ad-segment 45 | {{ include "adi.platform.node-selector" $ }} 46 | -------------------------------------------------------------------------------- /deployment/kubernetes/helm/adi/templates/analytics.yaml: -------------------------------------------------------------------------------- 1 | 2 | apiVersion: apps/v1 3 | kind: Deployment 4 | metadata: 5 | name: analytics 6 | labels: 7 | app: analytics 8 | spec: 9 | replicas: {{ .Values.nanalytics }} 10 | selector: 11 | matchLabels: 12 | app: analytics 13 | template: 14 | metadata: 15 | labels: 16 | app: analytics 17 | spec: 18 | enableServiceLinks: false 19 | containers: 20 | - name: analytics 21 | image: {{ $.Values.registryPrefix }}ssai_analytics_{{ .Values.framework }}_{{ include "adi.platform.suffix" $ }}:latest 22 | imagePullPolicy: IfNotPresent 23 | env: 24 | - name: NETWORK_PREFERENCE 25 | value: "{\"{{ include "adi.platform.device" $ }}\":\"{{ .Values.networkPreference }}\"}" 26 | - name: VA_PRE 27 | value: "{{ include "adi.platform.suffix" $ }}-" 28 | - name: NO_PROXY 29 | value: "*" 30 | - name: no_proxy 31 | value: "*" 32 | volumeMounts: 33 | {{- include "adi.platform.mounts" $ }} 34 | volumes: 35 | {{- include "adi.platform.volumes" $ }} 36 | {{ include "adi.platform.accel-selector" $ }} 37 | -------------------------------------------------------------------------------- /deployment/kubernetes/helm/adi/templates/cdn.yaml: -------------------------------------------------------------------------------- 1 | 2 | apiVersion: v1 3 | kind: Service 4 | metadata: 5 | name: cdn-service 6 | labels: 7 | app: cdn 8 | spec: 9 | ports: 10 | - port: 443 11 | targetPort: 8443 12 | name: https 13 | externalIPs: 14 | - {{ .Values.hostIP }} 15 | selector: 16 | app: cdn 17 | 18 | --- 19 | 20 | apiVersion: apps/v1 21 | kind: Deployment 22 | metadata: 23 | name: cdn 24 | labels: 25 | app: cdn 26 | spec: 27 | replicas: 1 28 | selector: 29 | matchLabels: 30 | app: cdn 31 | template: 32 | metadata: 33 | labels: 34 | app: cdn 35 | spec: 36 | enableServiceLinks: false 37 | containers: 38 | - name: cdn 39 | image: {{ $.Values.registryPrefix }}ssai_cdn_service:latest 40 | imagePullPolicy: IfNotPresent 41 | ports: 42 | - containerPort: 8443 43 | env: 44 | - name: NO_PROXY 45 | value: "*" 46 | - name: no_proxy 47 | value: "*" 48 | volumeMounts: 49 | - mountPath: /etc/localtime 50 | name: timezone 51 | readOnly: true 52 | - mountPath: /var/run/secrets 53 | name: self-signed-certificate 54 | readOnly: true 55 | volumes: 56 | - name: timezone 57 | hostPath: 58 | path: /etc/localtime 59 | type: File 60 | - name: self-signed-certificate 61 | secret: 62 | secretName: self-signed-certificate 63 | {{ include "adi.platform.node-selector" $ }} 64 | -------------------------------------------------------------------------------- /deployment/kubernetes/helm/adi/templates/content-provider.yaml: -------------------------------------------------------------------------------- 1 | 2 | apiVersion: v1 3 | kind: Service 4 | metadata: 5 | name: content-provider-service 6 | labels: 7 | app: content-provider 8 | spec: 9 | ports: 10 | - port: 8080 11 | protocol: TCP 12 | selector: 13 | app: content-provider 14 | 15 | --- 16 | 17 | apiVersion: apps/v1 18 | kind: Deployment 19 | metadata: 20 | name: content-provider 21 | labels: 22 | app: content-provider 23 | spec: 24 | replicas: 1 25 | selector: 26 | matchLabels: 27 | app: content-provider 28 | template: 29 | metadata: 30 | labels: 31 | app: content-provider 32 | spec: 33 | enableServiceLinks: false 34 | containers: 35 | - name: content-provider 36 | image: {{ $.Values.registryPrefix }}ssai_content_provider_frontend:latest 37 | imagePullPolicy: IfNotPresent 38 | env: 39 | - name: NO_PROXY 40 | value: "*" 41 | - name: no_proxy 42 | value: "*" 43 | ports: 44 | - containerPort: 8080 45 | volumeMounts: 46 | - mountPath: /var/www/archive 47 | name: video-archive 48 | readOnly: true 49 | - mountPath: /var/www/video 50 | name: video-cache 51 | readOnly: true 52 | volumes: 53 | - name: video-archive 54 | persistentVolumeClaim: 55 | claimName: video-archive 56 | - name: video-cache 57 | persistentVolumeClaim: 58 | claimName: video-cache 59 | {{ include "adi.platform.node-selector" $ }} 60 | -------------------------------------------------------------------------------- /deployment/kubernetes/helm/adi/templates/content-transcode.yaml: -------------------------------------------------------------------------------- 1 | 2 | apiVersion: apps/v1 3 | kind: Deployment 4 | metadata: 5 | name: content-transcode 6 | labels: 7 | app: content-transcode 8 | spec: 9 | replicas: {{ .Values.ntranscodes }} 10 | selector: 11 | matchLabels: 12 | app: content-transcode 13 | template: 14 | metadata: 15 | labels: 16 | app: content-transcode 17 | spec: 18 | enableServiceLinks: false 19 | securityContext: 20 | runAsUser: {{ .Values.userId }} 21 | runAsGroup: {{ .Values.groupId }} 22 | fsGroup: {{ .Values.groupId }} 23 | containers: 24 | - name: content-transcode 25 | image: {{ $.Values.registryPrefix }}ssai_content_transcode:latest 26 | imagePullPolicy: IfNotPresent 27 | env: 28 | - name: NO_PROXY 29 | value: "*" 30 | - name: no_proxy 31 | value: "*" 32 | volumeMounts: 33 | - mountPath: /var/www/video 34 | name: video-cache 35 | - mountPath: /var/www/archive 36 | name: video-archive 37 | readOnly: true 38 | volumes: 39 | - name: video-cache 40 | persistentVolumeClaim: 41 | claimName: video-cache 42 | - name: video-archive 43 | persistentVolumeClaim: 44 | claimName: video-archive 45 | {{ include "adi.platform.node-selector" $ }} 46 | -------------------------------------------------------------------------------- /deployment/kubernetes/helm/adi/templates/database.yaml: -------------------------------------------------------------------------------- 1 | 2 | apiVersion: v1 3 | kind: Service 4 | metadata: 5 | name: database-service 6 | labels: 7 | app: database 8 | spec: 9 | ports: 10 | - port: 9200 11 | protocol: TCP 12 | selector: 13 | app: database 14 | 15 | --- 16 | 17 | apiVersion: apps/v1 18 | kind: Deployment 19 | metadata: 20 | name: database 21 | labels: 22 | app: database 23 | spec: 24 | replicas: 1 25 | selector: 26 | matchLabels: 27 | app: database 28 | template: 29 | metadata: 30 | labels: 31 | app: database 32 | spec: 33 | enableServiceLinks: false 34 | containers: 35 | - name: database 36 | image: docker.elastic.co/elasticsearch/elasticsearch-oss:6.8.1 37 | imagePullPolicy: IfNotPresent 38 | ports: 39 | - containerPort: 9200 40 | env: 41 | - name: "discovery.type" 42 | value: "single-node" 43 | - name: NO_PROXY 44 | value: "*" 45 | - name: no_proxy 46 | value: "*" 47 | securityContext: 48 | runAsUser: 1000 49 | {{ include "adi.platform.node-selector" $ }} 50 | -------------------------------------------------------------------------------- /deployment/kubernetes/helm/adi/templates/kafka2db.yaml: -------------------------------------------------------------------------------- 1 | 2 | apiVersion: apps/v1 3 | kind: Deployment 4 | metadata: 5 | name: kafka2db 6 | labels: 7 | app: kafka2db 8 | spec: 9 | replicas: 1 10 | selector: 11 | matchLabels: 12 | app: kafka2db 13 | template: 14 | metadata: 15 | labels: 16 | app: kafka2db 17 | spec: 18 | enableServiceLinks: false 19 | containers: 20 | - name: kafka2db 21 | image: {{ $.Values.registryPrefix }}ssai_kafka2db:latest 22 | imagePullPolicy: IfNotPresent 23 | env: 24 | - name: NO_PROXY 25 | value: "*" 26 | - name: no_proxy 27 | value: "*" 28 | {{ include "adi.platform.node-selector" $ }} 29 | -------------------------------------------------------------------------------- /deployment/kubernetes/helm/adi/templates/video-archive-pvc.yaml: -------------------------------------------------------------------------------- 1 | 2 | apiVersion: v1 3 | kind: PersistentVolumeClaim 4 | metadata: 5 | name: video-archive 6 | spec: 7 | accessModes: 8 | - ReadOnlyMany 9 | storageClassName: video-archive 10 | resources: 11 | requests: 12 | storage: {{ .Values.pvc.video.archive }} 13 | -------------------------------------------------------------------------------- /deployment/kubernetes/helm/adi/templates/video-cache-pvc.yaml: -------------------------------------------------------------------------------- 1 | 2 | apiVersion: v1 3 | kind: PersistentVolumeClaim 4 | metadata: 5 | name: video-cache 6 | spec: 7 | accessModes: 8 | - ReadWriteMany 9 | storageClassName: video-cache 10 | resources: 11 | requests: 12 | storage: {{ .Values.pvc.video.cache }} 13 | -------------------------------------------------------------------------------- /deployment/kubernetes/helm/adi/templates/zookeeper.yaml: -------------------------------------------------------------------------------- 1 | 2 | apiVersion: v1 3 | kind: Service 4 | metadata: 5 | name: zookeeper-service 6 | labels: 7 | app: zookeeper 8 | spec: 9 | ports: 10 | - port: 2181 11 | protocol: TCP 12 | selector: 13 | app: zookeeper 14 | 15 | --- 16 | 17 | apiVersion: apps/v1 18 | kind: Deployment 19 | metadata: 20 | name: zookeeper 21 | labels: 22 | app: zookeeper 23 | spec: 24 | replicas: 1 25 | selector: 26 | matchLabels: 27 | app: zookeeper 28 | template: 29 | metadata: 30 | labels: 31 | app: zookeeper 32 | spec: 33 | enableServiceLinks: false 34 | containers: 35 | - name: zookeeper 36 | image: zookeeper:3.5.6 37 | imagePullPolicy: IfNotPresent 38 | ports: 39 | - containerPort: 2181 40 | env: 41 | - name: "ZOO_TICK_TIME" 42 | value: "10000" 43 | - name: "ZOO_MAX_CLIENT_CNXNS" 44 | value: "160000" 45 | - name: "ZOO_AUTOPURGE_PURGEINTERVAL" 46 | value: "1" 47 | - name: "ZOO_LOG4J_PROP" 48 | value: "ERROR" 49 | securityContext: 50 | runAsUser: 1000 51 | {{ include "adi.platform.node-selector" $ }} 52 | -------------------------------------------------------------------------------- /deployment/kubernetes/helm/adi/values.yaml.m4: -------------------------------------------------------------------------------- 1 | 2 | # private registry URL 3 | registryPrefix: "" 4 | 5 | # platform specifies the target platform: Xeon or VCAC-A. 6 | platform: "defn(`PLATFORM')" 7 | 8 | # framework specifies the target framework: gst or ffmpeg. 9 | framework: "defn(`FRAMEWORK')" 10 | 11 | # nanalytics specifies the number of analytics instances 12 | nanalytics: defn(`NANALYTICS') 13 | 14 | # ntranscodes specifies the number of transcoding instances 15 | ntranscodes: defn(`NTRANSCODES') 16 | 17 | # hostIP specifies the external IP to access the sample UI 18 | hostIP: "defn(`HOSTIP')" 19 | 20 | # network specifies the analytics model precision: FP32, INT8 or FP16, or their 21 | # combination as a comma delimited string. 22 | networkPreference: "defn(`NETWORK_PREFERENCE')" 23 | 24 | # pvc specifies the persistent volume claim sizes 25 | pvc: 26 | ad: 27 | archive: defn(`AD_ARCHIVE_VOLUME_SIZE')Gi 28 | cache: defn(`AD_CACHE_VOLUME_SIZE')Gi 29 | segment: defn(`AD_SEGMENT_VOLUME_SIZE')Gi 30 | static: defn(`AD_STATIC_VOLUME_SIZE')Gi 31 | video: 32 | archive: defn(`VIDEO_ARCHIVE_VOLUME_SIZE')Gi 33 | cache: defn(`VIDEO_CACHE_VOLUME_SIZE')Gi 34 | 35 | # optional: provide Linux user id & group permissioned to access cloud storage 36 | # userID is obtained using command: `$ id -u` 37 | # groupID is obtained using command: `$ id -g` 38 | userId: defn(`USERID') 39 | groupId: defn(`GROUPID') 40 | -------------------------------------------------------------------------------- /deployment/kubernetes/helm/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | DIR=$(dirname $(readlink -f "$0")) 4 | PLATFORM="${1:-Xeon}" 5 | FRAMEWORK="${2:-gst}" 6 | NANALYTICS="${3:-1}" 7 | NTRANSCODES="${4:-1}" 8 | MINRESOLUTION="${5:-360p}" 9 | NETWORK="${6:-FP32}" 10 | REGISTRY="$7" 11 | HOSTIP=$(ip route get 8.8.8.8 | awk '/ src /{split(substr($0,index($0," src ")),f);print f[2];exit}') 12 | 13 | echo "Generating helm chart" 14 | . "$DIR/../volume-info.sh" 15 | m4 -DREGISTRY_PREFIX=${REGISTRY} -DPLATFORM=${PLATFORM} -DFRAMEWORK=${FRAMEWORK} -DNANALYTICS=${NANALYTICS} -DNTRANSCODES=${NTRANSCODES} -DMINRESOLUTION=${MINRESOLUTION} -DNETWORK_PREFERENCE=${NETWORK} -DUSERID=$(id -u) -DGROUPID=$(id -g) -DHOSTIP=${HOSTIP} $(env | grep _VOLUME_ | sed 's/^/-D/') -I "${DIR}/adi" "$DIR/adi/values.yaml.m4" > "$DIR/adi/values.yaml" 16 | -------------------------------------------------------------------------------- /deployment/kubernetes/helm/start.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | DIR=$(dirname $(readlink -f "$0")) 4 | 5 | shift 6 | . "$DIR/build.sh" 7 | 8 | function create_secret { 9 | kubectl create secret generic self-signed-certificate "--from-file=${DIR}/../../certificate/self.crt" "--from-file=${DIR}/../../certificate/self.key" 10 | } 11 | 12 | # create secrets 13 | create_secret 2>/dev/null || (kubectl delete secret self-signed-certificate; create_secret) 14 | 15 | helm install adi "$DIR/adi" 16 | -------------------------------------------------------------------------------- /deployment/kubernetes/helm/stop.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | DIR=$(dirname $(readlink -f "$0")) 4 | 5 | helm uninstall adi 6 | kubectl delete secret self-signed-certificate 2> /dev/null || echo -n "" 7 | -------------------------------------------------------------------------------- /deployment/kubernetes/mkvolume.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | DIR=$(dirname $(readlink -f "$0")) 4 | 5 | echo "Making volumes..." 6 | HOSTS=$(kubectl get node -o 'custom-columns=NAME:.status.addresses[?(@.type=="Hostname")].address,IP:.status.addresses[?(@.type=="InternalIP")].address' | awk '!/NAME/{print $1":"$2}') 7 | awk -v DIR="$DIR" -v HOSTS="$HOSTS" ' 8 | BEGIN{ 9 | split(HOSTS,tmp1," "); 10 | for (i in tmp1) { 11 | split(tmp1[i],tmp2,":"); 12 | host2ip[tmp2[1]]=tmp2[2]; 13 | } 14 | } 15 | /name:/ { 16 | gsub("-","/",$2) 17 | content="\""DIR"/../../volume/"$2"\"" 18 | } 19 | /path:/ { 20 | path=$2 21 | } 22 | /- ".*"/ { 23 | host=host2ip[substr($2,2,length($2)-2)]; 24 | paths[host,path]=1; 25 | contents[host,path]=content 26 | } 27 | END { 28 | for (item in paths) { 29 | split(item,tmp,SUBSEP); 30 | host=tmp[1] 31 | path=tmp[2]; 32 | print host, path; 33 | system("ssh "host" \"mkdir -p "path";find "path" -mindepth 1 -maxdepth 1 -exec rm -rf {} \\\\;\""); 34 | system("scp -r "contents[host,path]"/* "host":"path); 35 | } 36 | } 37 | ' "$DIR"/*-pv.yaml 38 | -------------------------------------------------------------------------------- /deployment/kubernetes/start.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | DIR=$(dirname $(readlink -f "$0")) 4 | 5 | for yaml in $(find "$DIR" -maxdepth 1 -name "*.yaml" -print); do 6 | kubectl apply -f "$yaml" 7 | done 8 | -------------------------------------------------------------------------------- /deployment/kubernetes/stop.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | DIR=$(dirname $(readlink -f "$0")) 4 | 5 | # delete all persistent volumes 6 | for yaml in $(find "${DIR}" -maxdepth 1 -name "*.yaml" -print); do 7 | kubectl delete --wait=false -f "$yaml" --ignore-not-found=true 2>/dev/null 8 | done 9 | 10 | echo -n "" 11 | -------------------------------------------------------------------------------- /deployment/kubernetes/video-archive-pv.yaml.m4: -------------------------------------------------------------------------------- 1 | 2 | apiVersion: storage.k8s.io/v1 3 | kind: StorageClass 4 | metadata: 5 | name: video-archive 6 | provisioner: kubernetes.io/no-provisioner 7 | volumeBindingMode: WaitForFirstConsumer 8 | 9 | --- 10 | 11 | apiVersion: v1 12 | kind: PersistentVolume 13 | metadata: 14 | name: video-archive 15 | spec: 16 | capacity: 17 | storage: defn(`VIDEO_ARCHIVE_VOLUME_SIZE')Gi 18 | accessModes: 19 | - ReadOnlyMany 20 | persistentVolumeReclaimPolicy: Retain 21 | storageClassName: video-archive 22 | local: 23 | path: defn(`VIDEO_ARCHIVE_VOLUME_PATH') 24 | nodeAffinity: 25 | required: 26 | nodeSelectorTerms: 27 | - matchExpressions: 28 | - key: kubernetes.io/hostname 29 | operator: In 30 | values: 31 | - "defn(`VIDEO_ARCHIVE_VOLUME_HOST')" 32 | 33 | -------------------------------------------------------------------------------- /deployment/kubernetes/video-cache-pv.yaml.m4: -------------------------------------------------------------------------------- 1 | 2 | apiVersion: storage.k8s.io/v1 3 | kind: StorageClass 4 | metadata: 5 | name: video-cache 6 | provisioner: kubernetes.io/no-provisioner 7 | volumeBindingMode: WaitForFirstConsumer 8 | 9 | --- 10 | 11 | apiVersion: v1 12 | kind: PersistentVolume 13 | metadata: 14 | name: video-cache 15 | spec: 16 | capacity: 17 | storage: defn(`VIDEO_CACHE_VOLUME_SIZE')Gi 18 | accessModes: 19 | - ReadWriteMany 20 | persistentVolumeReclaimPolicy: Retain 21 | storageClassName: video-cache 22 | local: 23 | path: defn(`VIDEO_CACHE_VOLUME_PATH') 24 | nodeAffinity: 25 | required: 26 | nodeSelectorTerms: 27 | - matchExpressions: 28 | - key: kubernetes.io/hostname 29 | operator: In 30 | values: 31 | - "defn(`VIDEO_CACHE_VOLUME_HOST')" 32 | 33 | -------------------------------------------------------------------------------- /deployment/kubernetes/volume-info.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | export AD_ARCHIVE_VOLUME_PATH=/tmp/archive/ad 4 | export AD_ARCHIVE_VOLUME_SIZE=1 5 | export AD_ARCHIVE_VOLUME_HOST=$1 6 | 7 | export AD_CACHE_VOLUME_PATH=/tmp/cache/ad 8 | export AD_CACHE_VOLUME_SIZE=1 9 | export AD_CACHE_VOLUME_HOST=$1 10 | 11 | export AD_SEGMENT_VOLUME_PATH=/tmp/segment/ad 12 | export AD_SEGMENT_VOLUME_SIZE=1 13 | export AD_SEGMENT_VOLUME_HOST=$1 14 | 15 | export AD_STATIC_VOLUME_PATH=/tmp/static/ad 16 | export AD_STATIC_VOLUME_SIZE=1 17 | export AD_STATIC_VOLUME_HOST=$1 18 | 19 | export VIDEO_ARCHIVE_VOLUME_PATH=/tmp/archive/video 20 | export VIDEO_ARCHIVE_VOLUME_SIZE=2 21 | export VIDEO_ARCHIVE_VOLUME_HOST=$2 22 | 23 | export VIDEO_CACHE_VOLUME_PATH=/tmp/cache/video 24 | export VIDEO_CACHE_VOLUME_SIZE=2 25 | export VIDEO_CACHE_VOLUME_HOST=$2 26 | -------------------------------------------------------------------------------- /deployment/kubernetes/yaml/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | set(service "kubernetes") 2 | include("${CMAKE_SOURCE_DIR}/script/service.cmake") 3 | include("${CMAKE_SOURCE_DIR}/script/deployment.cmake") 4 | 5 | # add cleanup files 6 | file(GLOB m4files "${CMAKE_CURRENT_SOURCE_DIR}/*.yaml.m4") 7 | foreach(m4file ${m4files}) 8 | string(REPLACE ".yaml.m4" ".yaml" yamlfile "${m4file}") 9 | set_property(DIRECTORY APPEND PROPERTY ADDITIONAL_MAKE_CLEAN_FILES "${yamlfile}") 10 | endforeach(m4file) 11 | 12 | add_dependencies(start_${service} create_pv sign_certificate) 13 | add_dependencies(stop_${service} delete_pv) 14 | -------------------------------------------------------------------------------- /deployment/kubernetes/yaml/account.yaml.m4: -------------------------------------------------------------------------------- 1 | include(platform.m4) 2 | 3 | apiVersion: v1 4 | kind: Service 5 | metadata: 6 | name: account-service 7 | labels: 8 | app: account 9 | spec: 10 | ports: 11 | - port: 8080 12 | protocol: TCP 13 | selector: 14 | app: account 15 | 16 | --- 17 | 18 | apiVersion: apps/v1 19 | kind: Deployment 20 | metadata: 21 | name: account 22 | labels: 23 | app: account 24 | spec: 25 | replicas: 1 26 | selector: 27 | matchLabels: 28 | app: account 29 | template: 30 | metadata: 31 | labels: 32 | app: account 33 | spec: 34 | enableServiceLinks: false 35 | containers: 36 | - name: account 37 | image: defn(`REGISTRY_PREFIX')ssai_account_service:latest 38 | imagePullPolicy: IfNotPresent 39 | ports: 40 | - containerPort: 8080 41 | env: 42 | - name: NO_PROXY 43 | value: "*" 44 | - name: no_proxy 45 | value: "*" 46 | PLATFORM_NODE_SELECTOR(`Xeon')dnl 47 | -------------------------------------------------------------------------------- /deployment/kubernetes/yaml/ad-archive-pvc.yaml.m4: -------------------------------------------------------------------------------- 1 | 2 | apiVersion: v1 3 | kind: PersistentVolumeClaim 4 | metadata: 5 | name: ad-archive 6 | spec: 7 | accessModes: 8 | - ReadOnlyMany 9 | storageClassName: ad-archive 10 | resources: 11 | requests: 12 | storage: defn(`AD_ARCHIVE_VOLUME_SIZE')Gi 13 | 14 | -------------------------------------------------------------------------------- /deployment/kubernetes/yaml/ad-cache-pvc.yaml.m4: -------------------------------------------------------------------------------- 1 | 2 | apiVersion: v1 3 | kind: PersistentVolumeClaim 4 | metadata: 5 | name: ad-cache 6 | spec: 7 | accessModes: 8 | - ReadWriteMany 9 | storageClassName: ad-cache 10 | resources: 11 | requests: 12 | storage: defn(`AD_CACHE_VOLUME_SIZE')Gi 13 | 14 | -------------------------------------------------------------------------------- /deployment/kubernetes/yaml/ad-content.yaml.m4: -------------------------------------------------------------------------------- 1 | include(platform.m4) 2 | 3 | apiVersion: v1 4 | kind: Service 5 | metadata: 6 | name: ad-content-service 7 | labels: 8 | app: ad-content 9 | spec: 10 | ports: 11 | - port: 8080 12 | protocol: TCP 13 | selector: 14 | app: ad-content 15 | 16 | --- 17 | 18 | apiVersion: apps/v1 19 | kind: Deployment 20 | metadata: 21 | name: ad-content 22 | labels: 23 | app: ad-content 24 | spec: 25 | replicas: 1 26 | selector: 27 | matchLabels: 28 | app: ad-content 29 | template: 30 | metadata: 31 | labels: 32 | app: ad-content 33 | spec: 34 | enableServiceLinks: false 35 | containers: 36 | - name: ad-content 37 | image: defn(`REGISTRY_PREFIX')ssai_ad_content_frontend:latest 38 | imagePullPolicy: IfNotPresent 39 | ports: 40 | - containerPort: 8080 41 | env: 42 | - name: NO_PROXY 43 | value: "*" 44 | - name: no_proxy 45 | value: "*" 46 | volumeMounts: 47 | - mountPath: /var/www/archive 48 | name: ad-archive 49 | readOnly: true 50 | volumes: 51 | - name: ad-archive 52 | persistentVolumeClaim: 53 | claimName: ad-archive 54 | PLATFORM_NODE_SELECTOR(`Xeon')dnl 55 | -------------------------------------------------------------------------------- /deployment/kubernetes/yaml/ad-decision.yaml.m4: -------------------------------------------------------------------------------- 1 | include(platform.m4) 2 | 3 | apiVersion: v1 4 | kind: Service 5 | metadata: 6 | name: ad-decision-service 7 | labels: 8 | app: ad-decision 9 | spec: 10 | ports: 11 | - port: 8080 12 | protocol: TCP 13 | selector: 14 | app: ad-decision 15 | 16 | --- 17 | 18 | apiVersion: apps/v1 19 | kind: Deployment 20 | metadata: 21 | name: ad-decision 22 | labels: 23 | app: ad-decision 24 | spec: 25 | replicas: 1 26 | selector: 27 | matchLabels: 28 | app: ad-decision 29 | template: 30 | metadata: 31 | labels: 32 | app: ad-decision 33 | spec: 34 | enableServiceLinks: false 35 | containers: 36 | - name: ad-decision 37 | image: defn(`REGISTRY_PREFIX')ssai_ad_decision_frontend:latest 38 | imagePullPolicy: IfNotPresent 39 | ports: 40 | - containerPort: 8080 41 | env: 42 | - name: NO_PROXY 43 | value: "*" 44 | - name: no_proxy 45 | value: "*" 46 | PLATFORM_NODE_SELECTOR(`Xeon')dnl 47 | -------------------------------------------------------------------------------- /deployment/kubernetes/yaml/ad-segment-pvc.yaml.m4: -------------------------------------------------------------------------------- 1 | 2 | apiVersion: v1 3 | kind: PersistentVolumeClaim 4 | metadata: 5 | name: ad-segment 6 | spec: 7 | accessModes: 8 | - ReadWriteMany 9 | storageClassName: ad-segment 10 | resources: 11 | requests: 12 | storage: defn(`AD_SEGMENT_VOLUME_SIZE')Gi 13 | 14 | -------------------------------------------------------------------------------- /deployment/kubernetes/yaml/ad-static-pvc.yaml.m4: -------------------------------------------------------------------------------- 1 | 2 | apiVersion: v1 3 | kind: PersistentVolumeClaim 4 | metadata: 5 | name: ad-static 6 | spec: 7 | accessModes: 8 | - ReadOnlyMany 9 | storageClassName: ad-static 10 | resources: 11 | requests: 12 | storage: defn(`AD_STATIC_VOLUME_SIZE')Gi 13 | -------------------------------------------------------------------------------- /deployment/kubernetes/yaml/ad-transcode.yaml.m4: -------------------------------------------------------------------------------- 1 | include(platform.m4) 2 | 3 | apiVersion: apps/v1 4 | kind: Deployment 5 | metadata: 6 | name: ad-transcode 7 | labels: 8 | app: ad-transcode 9 | spec: 10 | replicas: defn(`NTRANSCODES') 11 | selector: 12 | matchLabels: 13 | app: ad-transcode 14 | template: 15 | metadata: 16 | labels: 17 | app: ad-transcode 18 | spec: 19 | enableServiceLinks: false 20 | securityContext: 21 | runAsUser: defn(`USERID') 22 | runAsGroup: defn(`GROUPID') 23 | fsGroup: defn(`GROUPID') 24 | containers: 25 | - name: ad-transcode 26 | image: defn(`REGISTRY_PREFIX')ssai_ad_transcode:latest 27 | imagePullPolicy: IfNotPresent 28 | env: 29 | - name: NO_PROXY 30 | value: "*" 31 | - name: no_proxy 32 | value: "*" 33 | volumeMounts: 34 | - mountPath: /var/www/adinsert 35 | name: ad-cache 36 | - mountPath: /var/www/adsegment 37 | name: ad-segment 38 | readOnly: true 39 | volumes: 40 | - name: ad-cache 41 | persistentVolumeClaim: 42 | claimName: ad-cache 43 | - name: ad-segment 44 | persistentVolumeClaim: 45 | claimName: ad-segment 46 | PLATFORM_NODE_SELECTOR(`Xeon')dnl 47 | -------------------------------------------------------------------------------- /deployment/kubernetes/yaml/analytics.yaml.m4: -------------------------------------------------------------------------------- 1 | include(platform.m4) 2 | 3 | apiVersion: apps/v1 4 | kind: Deployment 5 | metadata: 6 | name: analytics 7 | labels: 8 | app: analytics 9 | spec: 10 | replicas: defn(`NANALYTICS') 11 | selector: 12 | matchLabels: 13 | app: analytics 14 | template: 15 | metadata: 16 | labels: 17 | app: analytics 18 | spec: 19 | enableServiceLinks: false 20 | containers: 21 | - name: analytics 22 | image: defn(`REGISTRY_PREFIX')`ssai_analytics_'defn(`FRAMEWORK')`_'defn(`PLATFORM_SUFFIX'):latest 23 | imagePullPolicy: IfNotPresent 24 | env: 25 | - name: `NETWORK_PREFERENCE' 26 | value: "{\"defn(`PLATFORM_DEVICE')\":\"defn(`NETWORK_PREFERENCE')\"}" 27 | - name: VA_PRE 28 | value: "defn(`PLATFORM')-" 29 | - name: NO_PROXY 30 | value: "*" 31 | - name: no_proxy 32 | value: "*" 33 | defn(`PLATFORM_VOLUME_MOUNTS')dnl 34 | defn(`PLATFORM_VOLUMES')dnl 35 | PLATFORM_NODE_SELECTOR(`VCAC-A')dnl 36 | -------------------------------------------------------------------------------- /deployment/kubernetes/yaml/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | DIR=$(dirname $(readlink -f "$0")) 4 | PLATFORM="${1:-Xeon}" 5 | FRAMEWORK="${2:-gst}" 6 | NANALYTICS="${3:-1}" 7 | NTRANSCODES="${4:-1}" 8 | MINRESOLUTION="${5:-360p}" 9 | NETWORK="${6:-FP32}" 10 | REGISTRY="$7" 11 | HOSTIP=$(ip route get 8.8.8.8 | awk '/ src /{split(substr($0,index($0," src ")),f);print f[2];exit}') 12 | 13 | echo "Generating templates with PLATFORM=${PLATFORM},FRAMEWORK=${FRAMEWORK},NANALYTICS=${NANALYTICS},NTRANSCODES=${NTRANSCODES},MINRESOLUTION=${MINRESOLUTION},NETWORK=${NETWORK}" 14 | . "$DIR/../volume-info.sh" 15 | find "${DIR}" -maxdepth 1 -name "*.yaml" -exec rm -rf "{}" \; 16 | for template in $(find "${DIR}" -maxdepth 1 -name "*.yaml.m4" -print); do 17 | yaml=${template/.m4/} 18 | m4 -DREGISTRY_PREFIX=${REGISTRY} -DPLATFORM=${PLATFORM} -DFRAMEWORK=${FRAMEWORK} -DNANALYTICS=${NANALYTICS} -DNTRANSCODES=${NTRANSCODES} -DMINRESOLUTION=${MINRESOLUTION} -DNETWORK_PREFERENCE=${NETWORK} $(env | grep _VOLUME_ | sed 's/^/-D/') -DUSERID=$(id -u) -DGROUPID=$(id -g) -DHOSTIP=${HOSTIP} -I "${DIR}" "${template}" > "${yaml}" 19 | done 20 | -------------------------------------------------------------------------------- /deployment/kubernetes/yaml/cdn.yaml.m4: -------------------------------------------------------------------------------- 1 | include(platform.m4) 2 | 3 | apiVersion: v1 4 | kind: Service 5 | metadata: 6 | name: cdn-service 7 | labels: 8 | app: cdn 9 | spec: 10 | ports: 11 | - port: 443 12 | targetPort: 8443 13 | name: https 14 | externalIPs: 15 | - defn(`HOSTIP') 16 | selector: 17 | app: cdn 18 | 19 | --- 20 | 21 | apiVersion: apps/v1 22 | kind: Deployment 23 | metadata: 24 | name: cdn 25 | labels: 26 | app: cdn 27 | spec: 28 | replicas: 1 29 | selector: 30 | matchLabels: 31 | app: cdn 32 | template: 33 | metadata: 34 | labels: 35 | app: cdn 36 | spec: 37 | enableServiceLinks: false 38 | containers: 39 | - name: cdn 40 | image: defn(`REGISTRY_PREFIX')ssai_cdn_service:latest 41 | imagePullPolicy: IfNotPresent 42 | ports: 43 | - containerPort: 8443 44 | env: 45 | - name: NO_PROXY 46 | value: "*" 47 | - name: no_proxy 48 | value: "*" 49 | volumeMounts: 50 | - mountPath: /etc/localtime 51 | name: timezone 52 | readOnly: true 53 | - mountPath: /var/run/secrets 54 | name: self-signed-certificate 55 | readOnly: true 56 | volumes: 57 | - name: timezone 58 | hostPath: 59 | path: /etc/localtime 60 | type: File 61 | - name: self-signed-certificate 62 | secret: 63 | secretName: self-signed-certificate 64 | PLATFORM_NODE_SELECTOR(`Xeon')dnl 65 | -------------------------------------------------------------------------------- /deployment/kubernetes/yaml/content-provider.yaml.m4: -------------------------------------------------------------------------------- 1 | include(platform.m4) 2 | 3 | apiVersion: v1 4 | kind: Service 5 | metadata: 6 | name: content-provider-service 7 | labels: 8 | app: content-provider 9 | spec: 10 | ports: 11 | - port: 8080 12 | protocol: TCP 13 | selector: 14 | app: content-provider 15 | 16 | --- 17 | 18 | apiVersion: apps/v1 19 | kind: Deployment 20 | metadata: 21 | name: content-provider 22 | labels: 23 | app: content-provider 24 | spec: 25 | replicas: 1 26 | selector: 27 | matchLabels: 28 | app: content-provider 29 | template: 30 | metadata: 31 | labels: 32 | app: content-provider 33 | spec: 34 | enableServiceLinks: false 35 | containers: 36 | - name: content-provider 37 | image: defn(`REGISTRY_PREFIX')ssai_content_provider_frontend:latest 38 | imagePullPolicy: IfNotPresent 39 | env: 40 | - name: NO_PROXY 41 | value: "*" 42 | - name: no_proxy 43 | value: "*" 44 | ports: 45 | - containerPort: 8080 46 | volumeMounts: 47 | - mountPath: /var/www/archive 48 | name: video-archive 49 | readOnly: true 50 | - mountPath: /var/www/video 51 | name: video-cache 52 | readOnly: true 53 | volumes: 54 | - name: video-archive 55 | persistentVolumeClaim: 56 | claimName: video-archive 57 | - name: video-cache 58 | persistentVolumeClaim: 59 | claimName: video-cache 60 | PLATFORM_NODE_SELECTOR(`Xeon')dnl 61 | -------------------------------------------------------------------------------- /deployment/kubernetes/yaml/content-transcode.yaml.m4: -------------------------------------------------------------------------------- 1 | include(platform.m4) 2 | 3 | apiVersion: apps/v1 4 | kind: Deployment 5 | metadata: 6 | name: content-transcode 7 | labels: 8 | app: content-transcode 9 | spec: 10 | replicas: defn(`NTRANSCODES') 11 | selector: 12 | matchLabels: 13 | app: content-transcode 14 | template: 15 | metadata: 16 | labels: 17 | app: content-transcode 18 | spec: 19 | enableServiceLinks: false 20 | securityContext: 21 | runAsUser: defn(`USERID') 22 | runAsGroup: defn(`GROUPID') 23 | fsGroup: defn(`GROUPID') 24 | containers: 25 | - name: content-transcode 26 | image: defn(`REGISTRY_PREFIX')ssai_content_transcode:latest 27 | imagePullPolicy: IfNotPresent 28 | env: 29 | - name: NO_PROXY 30 | value: "*" 31 | - name: no_proxy 32 | value: "*" 33 | volumeMounts: 34 | - mountPath: /var/www/video 35 | name: video-cache 36 | - mountPath: /var/www/archive 37 | name: video-archive 38 | readOnly: true 39 | volumes: 40 | - name: video-cache 41 | persistentVolumeClaim: 42 | claimName: video-cache 43 | - name: video-archive 44 | persistentVolumeClaim: 45 | claimName: video-archive 46 | PLATFORM_NODE_SELECTOR(`Xeon')dnl 47 | -------------------------------------------------------------------------------- /deployment/kubernetes/yaml/database.yaml.m4: -------------------------------------------------------------------------------- 1 | include(platform.m4) 2 | 3 | apiVersion: v1 4 | kind: Service 5 | metadata: 6 | name: database-service 7 | labels: 8 | app: database 9 | spec: 10 | ports: 11 | - port: 9200 12 | protocol: TCP 13 | selector: 14 | app: database 15 | 16 | --- 17 | 18 | apiVersion: apps/v1 19 | kind: Deployment 20 | metadata: 21 | name: database 22 | labels: 23 | app: database 24 | spec: 25 | replicas: 1 26 | selector: 27 | matchLabels: 28 | app: database 29 | template: 30 | metadata: 31 | labels: 32 | app: database 33 | spec: 34 | enableServiceLinks: false 35 | containers: 36 | - name: database 37 | image: docker.elastic.co/elasticsearch/elasticsearch-oss:6.8.1 38 | imagePullPolicy: IfNotPresent 39 | ports: 40 | - containerPort: 9200 41 | env: 42 | - name: "discovery.type" 43 | value: "single-node" 44 | - name: NO_PROXY 45 | value: "*" 46 | - name: no_proxy 47 | value: "*" 48 | securityContext: 49 | runAsUser: 1000 50 | PLATFORM_NODE_SELECTOR(`Xeon')dnl 51 | -------------------------------------------------------------------------------- /deployment/kubernetes/yaml/kafka2db.yaml.m4: -------------------------------------------------------------------------------- 1 | include(platform.m4) 2 | 3 | apiVersion: apps/v1 4 | kind: Deployment 5 | metadata: 6 | name: kafka2db 7 | labels: 8 | app: kafka2db 9 | spec: 10 | replicas: 1 11 | selector: 12 | matchLabels: 13 | app: kafka2db 14 | template: 15 | metadata: 16 | labels: 17 | app: kafka2db 18 | spec: 19 | enableServiceLinks: false 20 | containers: 21 | - name: kafka2db 22 | image: defn(`REGISTRY_PREFIX')ssai_kafka2db:latest 23 | imagePullPolicy: IfNotPresent 24 | env: 25 | - name: NO_PROXY 26 | value: "*" 27 | - name: no_proxy 28 | value: "*" 29 | PLATFORM_NODE_SELECTOR(`Xeon')dnl 30 | -------------------------------------------------------------------------------- /deployment/kubernetes/yaml/platform.m4: -------------------------------------------------------------------------------- 1 | define(`PLATFORM_SUFFIX',translit(defn(`PLATFORM'),`A-Z',`a-z'))dnl 2 | define(`PLATFORM_VOLUME_MOUNTS',dnl 3 | ifelse(defn(`PLATFORM'),`VCAC-A',dnl 4 | volumeMounts: 5 | - mountPath: /var/tmp 6 | name: var-tmp 7 | # resources: 8 | # limits: 9 | # vpu.intel.com/hddl: 1 10 | # gpu.intel.com/i915: 1 11 | securityContext: 12 | privileged: true 13 | ))dnl 14 | define(`PLATFORM_VOLUMES',dnl 15 | ifelse(defn(`PLATFORM'),`VCAC-A',dnl 16 | volumes: 17 | - name: var-tmp 18 | hostPath: 19 | path: /var/tmp 20 | type: Directory 21 | ))dnl 22 | define(`PLATFORM_NODE_SELECTOR',dnl 23 | affinity: 24 | nodeAffinity: 25 | requiredDuringSchedulingIgnoredDuringExecution: 26 | nodeSelectorTerms: 27 | - matchExpressions: 28 | - key: "vcac-zone" 29 | operator: `ifelse(defn(`PLATFORM'),`VCAC-A',ifelse($1,`VCAC-A',`In',`NotIn'),`NotIn')' 30 | values: 31 | - "yes" 32 | )dnl 33 | -------------------------------------------------------------------------------- /deployment/kubernetes/yaml/start.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | DIR=$(dirname $(readlink -f "$0")) 4 | 5 | shift 6 | . "$DIR/build.sh" 7 | 8 | function create_secret { 9 | kubectl create secret generic self-signed-certificate "--from-file=${DIR}/../../certificate/self.crt" "--from-file=${DIR}/../../certificate/self.key" 10 | } 11 | 12 | # create secrets 13 | create_secret 2>/dev/null || (kubectl delete secret self-signed-certificate; create_secret) 14 | 15 | for yaml in $(find "$DIR" -maxdepth 1 -name "*.yaml" -print); do 16 | kubectl apply -f "$yaml" 17 | done 18 | -------------------------------------------------------------------------------- /deployment/kubernetes/yaml/stop.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | DIR=$(dirname $(readlink -f "$0")) 4 | 5 | # delete all pods, services and deployments 6 | for yaml in $(find "${DIR}" -maxdepth 1 -name "*.yaml" -print); do 7 | kubectl delete --wait=false -f "$yaml" --ignore-not-found=true 2>/dev/null 8 | done 9 | 10 | kubectl delete secret self-signed-certificate 2> /dev/null || echo -n "" 11 | -------------------------------------------------------------------------------- /deployment/kubernetes/yaml/video-archive-pvc.yaml.m4: -------------------------------------------------------------------------------- 1 | 2 | apiVersion: v1 3 | kind: PersistentVolumeClaim 4 | metadata: 5 | name: video-archive 6 | spec: 7 | accessModes: 8 | - ReadOnlyMany 9 | storageClassName: video-archive 10 | resources: 11 | requests: 12 | storage: defn(`VIDEO_ARCHIVE_VOLUME_SIZE')Gi 13 | -------------------------------------------------------------------------------- /deployment/kubernetes/yaml/video-cache-pvc.yaml.m4: -------------------------------------------------------------------------------- 1 | 2 | apiVersion: v1 3 | kind: PersistentVolumeClaim 4 | metadata: 5 | name: video-cache 6 | spec: 7 | accessModes: 8 | - ReadWriteMany 9 | storageClassName: video-cache 10 | resources: 11 | requests: 12 | storage: defn(`VIDEO_CACHE_VOLUME_SIZE')Gi 13 | -------------------------------------------------------------------------------- /deployment/kubernetes/yaml/zookeeper.yaml.m4: -------------------------------------------------------------------------------- 1 | include(platform.m4) 2 | 3 | apiVersion: v1 4 | kind: Service 5 | metadata: 6 | name: zookeeper-service 7 | labels: 8 | app: zookeeper 9 | spec: 10 | ports: 11 | - port: 2181 12 | protocol: TCP 13 | selector: 14 | app: zookeeper 15 | 16 | --- 17 | 18 | apiVersion: apps/v1 19 | kind: Deployment 20 | metadata: 21 | name: zookeeper 22 | labels: 23 | app: zookeeper 24 | spec: 25 | replicas: 1 26 | selector: 27 | matchLabels: 28 | app: zookeeper 29 | template: 30 | metadata: 31 | labels: 32 | app: zookeeper 33 | spec: 34 | enableServiceLinks: false 35 | containers: 36 | - name: zookeeper 37 | image: zookeeper:3.5.6 38 | imagePullPolicy: IfNotPresent 39 | ports: 40 | - containerPort: 2181 41 | env: 42 | - name: "ZOO_TICK_TIME" 43 | value: "10000" 44 | - name: "ZOO_MAX_CLIENT_CNXNS" 45 | value: "160000" 46 | - name: "ZOO_AUTOPURGE_PURGEINTERVAL" 47 | value: "1" 48 | - name: "ZOO_LOG4J_PROP" 49 | value: "ERROR" 50 | securityContext: 51 | runAsUser: 1000 52 | PLATFORM_NODE_SELECTOR(`Xeon')dnl 53 | -------------------------------------------------------------------------------- /doc/ad-insertion-sequence.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/doc/ad-insertion-sequence.png -------------------------------------------------------------------------------- /doc/ad-insertion-service-arch.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/doc/ad-insertion-service-arch.png -------------------------------------------------------------------------------- /doc/adi-ui.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/doc/adi-ui.gif -------------------------------------------------------------------------------- /doc/cmake.md: -------------------------------------------------------------------------------- 1 | 2 | ### CMake Options: 3 | 4 | Use the following definitions to customize the building process: 5 | - **REGISTRY**: Specify the URL of the private docker registry. 6 | - **PLATFORM**: Specify the target platform: `Xeon` or [`VCAC-A`](vcac-a.md). 7 | - **FRAMEWORK**: Specify the target framework: `gst` or `ffmpeg`. 8 | - **NANALYTICS**: Specify the number of analytics instances enabled for content analysis. 9 | - **NTRANSCODES**: Specify the number of transcoding instances enabled for content or AD transcoding. 10 | - **MINRESOLUTION**: Specify the mininum resolution to transcode for content and ad clip. `360p`, `480p`, `720p` etc. 11 | - **NETWORK**: Specify the model network preference: `FP32`, `FP16`, `INT8` or the combination of them. 12 | 13 | ### Examples: 14 | 15 | ``` 16 | cd build 17 | cmake -DPLATFORM=Xeon .. 18 | ``` 19 | 20 | ``` 21 | cd build 22 | cmake -DFRAMEWORK=ffmpeg -DPLATFORM=Xeon .. 23 | ``` 24 | 25 | ### Make Commands: 26 | 27 | - **build**: Build the sample (docker) images. 28 | - **update**: Distribute the sample images to worker nodes. 29 | - **volume**: For Kubernetes, prepare persistent volumes for the ad/content storage. 30 | - **dist**: Create the sample distribution package. 31 | - **start/stop_docker_compose**: Start/stop the sample orchestrated by docker-compose. 32 | - **start/stop_docker_swarm**: Start/stop the sample orchestrated by docker swarm. 33 | - **start/stop_kubernetes**: Start/stop the sample orchestrated by Kubernetes. 34 | 35 | ### See Also: 36 | 37 | - [Sample Distribution](dist.md) 38 | -------------------------------------------------------------------------------- /doc/content-provider-arch.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/doc/content-provider-arch.png -------------------------------------------------------------------------------- /doc/customize.md: -------------------------------------------------------------------------------- 1 | 2 | Customize the video playlist by adding videos under [volume/video/archive](../volume/video/archive) or in the build script [content-provider/archive/build.sh](../content-provider/archive/build.sh). 3 | 4 | Rerun `make` and restart the service after making any changes. 5 | 6 | -------------------------------------------------------------------------------- /doc/dist.md: -------------------------------------------------------------------------------- 1 | 2 | The sample distribution package provides an easy way to ship the sample to a different system for evaluation or demonstration. 3 | 4 | ### Build for Distribution: 5 | 6 | Run the following script to generate a distribution package: 7 | 8 | ```bash 9 | mkdir build 10 | cd build 11 | cmake .. 12 | make 13 | make dist 14 | ``` 15 | 16 | The generated sample distribution package is under the `dist` directory, which you need to distribute to replicate the sample on a different system. 17 | 18 | ### Restore Sample: 19 | 20 | On a system where you plan to run the sample, run the `restore.sh` script to restore the sample directory structure: 21 | 22 | ```bash 23 | ./restore.sh 24 | ``` 25 | 26 | ### Run Sample: 27 | 28 | Follow usual [sample build and run procedures](../README.md) to invoke the sample. You can alter sample [options](cmake.md), provided that any such parameter change does not incur an image build. For example, avoid changing the `PLATFORM` and `FRAMEWORK` parameters. 29 | 30 | ### See Also: 31 | 32 | - [Build Options](cmake.md) 33 | - [Sample README](../README.md) 34 | 35 | -------------------------------------------------------------------------------- /doc/overall-arch.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/doc/overall-arch.png -------------------------------------------------------------------------------- /doc/script.md: -------------------------------------------------------------------------------- 1 | 2 | The sample provides utility scripts (under the [script](../script) folder) to facilitate sample development and deployment: 3 | 4 | --- 5 | 6 | Password-less access is assumed between the docker swarm or Kubernetes manager and the workers. Setup as follows on each worker node: 7 | 8 | ``` 9 | ssh-keygen 10 | ssh-copy-id 11 | ``` 12 | 13 | --- 14 | 15 | ### [mk-dist.sh](../script/mk-dist.sh) 16 | 17 | The script creates a sample distribution package that contains sample (docker) images, deployment scripts and media files under the `dist` directory. You can [distribute the sample](dist.md) to a different system for evaluation and demonstration without the need to rebuild the sample. 18 | 19 | ### [update-image.sh](../script/update-image.sh) 20 | 21 | The script updates the worker nodes with the most recent images (on the current host.) The script scans the generated deployment scripts, and checks the worker nodes to ensure that they have the most recent images. If any node got out-dated images, the script updates them. 22 | 23 | To use the script, first perform a normal build `cmake...make...` and then `make update`. The script does not take any command-line argument. 24 | 25 | -------------------------------------------------------------------------------- /script/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | if test -z "${DIR}"; then 4 | echo "This script should not be called directly." 5 | exit -1 6 | fi 7 | 8 | PLATFORM="${1:-Xeon}" 9 | FRAMEWORK="${2:-gst}" 10 | REGISTRY="$7" 11 | 12 | build_docker() { 13 | docker_file="$1" 14 | shift 15 | image_name="$1" 16 | shift 17 | if test -f "$docker_file.m4"; then 18 | m4 -I "$(dirname $docker_file)" "$docker_file.m4" > "$docker_file" 19 | fi 20 | (cd "$DIR"; docker build --network host --file="$docker_file" "$@" -t "$image_name" "$DIR" $(env | cut -f1 -d= | grep -E '_(proxy|REPO|VER)$' | sed 's/^/--build-arg /') --build-arg UID=$(id -u) --build-arg GID=$(id -g)) 21 | 22 | # if REGISTRY is specified, push image to the private registry 23 | if [ -n "$REGISTRY" ]; then 24 | docker tag "$image_name" "$REGISTRY$image_name" 25 | docker push "$REGISTRY$image_name" 26 | fi 27 | } 28 | 29 | # build image(s) in order (to satisfy dependencies) 30 | for dep in '.5.*' '.4.*' '.3.*' '.2.*' '.1.*' '.0.*' ''; do 31 | dirs=("$DIR/$PLATFORM/$FRAMEWORK" "$DIR/$PLATFORM" "$DIR") 32 | for dockerfile in $(find "${dirs[@]}" -maxdepth 1 -name "Dockerfile$dep" -print 2>/dev/null); do 33 | image=$(head -n 1 "$dockerfile" | grep '# ' | cut -d' ' -f2) 34 | if test -z "$image"; then image="$IMAGE"; fi 35 | build_docker "$dockerfile" "$image" 36 | done 37 | done 38 | -------------------------------------------------------------------------------- /script/deployment.cmake: -------------------------------------------------------------------------------- 1 | add_custom_target(start_${service} "${CMAKE_CURRENT_SOURCE_DIR}/start.sh" "${service}" "${PLATFORM}" "${FRAMEWORK}" "${NANALYTICS}" "${NTRANSCODES}" "${MINRESOLUTION}" "${NETWORK}" "${REGISTRY}") 2 | add_custom_target(stop_${service} "${CMAKE_CURRENT_SOURCE_DIR}/stop.sh" "${service}") 3 | -------------------------------------------------------------------------------- /script/mk-dist.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | case "$0" in 4 | *restore*) 5 | for tarfile in dist/*.tar; do 6 | docker load -i "$tarfile" 7 | done 8 | tar xvfzm dist/dirs.tgz 9 | ;; 10 | *) 11 | DIR=$(dirname $(readlink -f "$0")) 12 | YML="$DIR/../deployment/docker-swarm/docker-compose.yml" 13 | rm -rf "$DIR/../dist" 14 | if test -e "$YML"; then 15 | mkdir -p "$DIR/../dist/dist" 16 | for image in `awk -v 'labels=*' -f "$DIR/scan-yaml.awk" "$YML"` ssai_content_provider_archive:latest ssai_self_certificate:latest ssai_ad_insertion_ad_static:latest ssai_ad_content_archive:latest; do 17 | imagefile=${image//\//-} 18 | imagefile=${imagefile//:/-} 19 | echo "archiving $image => $imagefile" 20 | (docker image save "$image" > "$DIR/../dist/dist/${imagefile}.tar") || (docker pull "$image" && (docker image save "$image" > "$DIR/../dist/dist/${imagefile}.tar")) 21 | done 22 | (cd "$DIR/.."; tar cfz "$DIR/../dist/dist/dirs.tgz" script deployment doc CMakeLists.txt README.md volume --exclude=doc/asset) 23 | cp "$0" "$DIR/../dist/restore.sh" 24 | cp "$DIR/../LICENSE" "$DIR/../dist" 25 | else 26 | echo "Missing $YML" 27 | echo "Sample not fully built? (Please run 'make')" 28 | fi 29 | ;; 30 | esac 31 | -------------------------------------------------------------------------------- /script/scan-all.cmake: -------------------------------------------------------------------------------- 1 | file(GLOB dirs "*") 2 | foreach (dir ${dirs}) 3 | if(EXISTS ${dir}/CMakeLists.txt) 4 | add_subdirectory(${dir}) 5 | endif() 6 | endforeach() 7 | -------------------------------------------------------------------------------- /script/service.cmake: -------------------------------------------------------------------------------- 1 | if(EXISTS "${CMAKE_CURRENT_SOURCE_DIR}/build.sh") 2 | add_custom_target(build_${service} ALL "${CMAKE_CURRENT_SOURCE_DIR}/build.sh" "${PLATFORM}" "${FRAMEWORK}" "${NANALYTICS}" "${NTRANSCODES}" "${MINRESOLUTION}" "${NETWORK}" "${REGISTRY}") 3 | endif() 4 | -------------------------------------------------------------------------------- /script/shell.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | if test -z "${DIR}"; then 4 | echo "This script should not be called directly." 5 | exit -1 6 | fi 7 | 8 | pid="$(docker ps -f ancestor=$IMAGE --format='{{.ID}}' | head -n 1)" 9 | if [ -n "$pid" ] && [ "$#" -le "1" ]; then 10 | echo "bash into running container...$IMAGE" 11 | docker exec -it $pid ${*-/bin/bash} 12 | else 13 | echo "bash into new container...$IMAGE" 14 | args=("$@") 15 | docker run --rm ${OPTIONS[@]} $(env | cut -f1 -d= | grep -E '_(proxy|REPO|VER)$' | sed 's/^/-e /') --entrypoint ${1:-/bin/bash} -it "${IMAGE}" ${args[@]:1} 16 | fi 17 | -------------------------------------------------------------------------------- /script/travis_build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | cd $1 4 | BUILD_NAME=$1 5 | 6 | echo Building $BUILD_NAME 7 | 8 | BUILD_NAME=$(sed 's/[\/]/_/g' <<< $BUILD_NAME) 9 | 10 | make >> log_$BUILD_NAME.log 11 | -------------------------------------------------------------------------------- /volume/.gitignore: -------------------------------------------------------------------------------- 1 | ad/* 2 | video/* 3 | -------------------------------------------------------------------------------- /volume/ad/archive/car6.mp4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/volume/ad/archive/car6.mp4 -------------------------------------------------------------------------------- /volume/ad/archive/catfood.mp4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/volume/ad/archive/catfood.mp4 -------------------------------------------------------------------------------- /volume/ad/archive/travel6.mp4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/Ad-Insertion-Sample/77dc65ede7ad05641a12971b4b4dc579cc0f01f8/volume/ad/archive/travel6.mp4 -------------------------------------------------------------------------------- /volume/video/archive/.gitignore: -------------------------------------------------------------------------------- 1 | *.mp4 2 | *.png 3 | --------------------------------------------------------------------------------