├── .gitignore ├── .travis.yml ├── ATTRIBUTION.md ├── CMakeLists.txt ├── CONTRIBUTING.md ├── LICENSE ├── README.md ├── benchmark ├── .dockerignore ├── CMakeLists.txt ├── Dockerfile ├── build.sh ├── main.py ├── shell.sh └── transcoding.json ├── cdn-server ├── .dockerignore ├── CMakeLists.txt ├── Dockerfile ├── build.sh ├── html │ ├── css │ │ ├── app.css │ │ ├── foundation.css │ │ └── foundation.min.css │ ├── favicon.ico │ ├── header.shtml │ ├── icon │ │ ├── foundation-icons.css │ │ ├── foundation-icons.eot │ │ ├── foundation-icons.svg │ │ ├── foundation-icons.ttf │ │ └── foundation-icons.woff │ ├── image │ │ ├── grafana │ │ │ ├── Kubernetes-Monitoring-Arch.png │ │ │ ├── alertmanager.png │ │ │ ├── dashboards_list.png │ │ │ ├── home.png │ │ │ ├── login.png │ │ │ ├── navigation.png │ │ │ ├── nodes_dashboard.png │ │ │ ├── promQL.png │ │ │ └── prometheus.png │ │ └── kibana │ │ │ ├── Kubernetes-Logging-Arch.png │ │ │ ├── Kubernetes-Logging-Arch.vsdx │ │ │ ├── choose_source.png │ │ │ ├── ffmpeg_log.png │ │ │ ├── filter_log.png │ │ │ ├── generate_report.png │ │ │ ├── logs.png │ │ │ ├── navigation.png │ │ │ ├── save_log.png │ │ │ ├── step_1.png │ │ │ ├── step_2.png │ │ │ ├── visualization_show.png │ │ │ └── visualization_type.png │ ├── index.html │ ├── js │ │ ├── app-api.js │ │ ├── app-header.js │ │ ├── app-player.js │ │ ├── app.js │ │ └── vendor │ │ │ ├── dash.all.min.js │ │ │ ├── foundation.min.js │ │ │ ├── hls.min.js │ │ │ ├── jquery-3.2.1.min.js │ │ │ └── what-input.js │ └── player.shtml ├── main.py ├── messaging.py ├── nginx.conf ├── playlist.py ├── schedule.py ├── shell.sh ├── stat.xsl ├── tasks.py └── upload.py ├── client ├── killvlc.bat └── vlc_playback.bat ├── common ├── CMakeLists.txt ├── Dockerfile ├── build.sh ├── ffmpegcmd.py ├── messaging.py ├── shell.sh └── zkstate.py ├── content-provider ├── CMakeLists.txt ├── Dockerfile ├── build.sh └── shell.sh ├── deployment ├── CMakeLists.txt ├── certificate │ ├── CMakeLists.txt │ ├── Dockerfile │ ├── build.sh │ ├── self-sign.sh │ └── shell.sh └── kubernetes │ ├── CMakeLists.txt │ ├── README.md │ ├── build.sh │ ├── helm │ ├── .gitignore │ ├── CMakeLists.txt │ ├── build.sh │ ├── cdn-transcode │ │ ├── Chart.yaml │ │ ├── README.md │ │ ├── templates │ │ │ ├── cdn.yaml │ │ │ ├── kafka.yaml │ │ │ ├── live.yaml │ │ │ ├── redis.yaml │ │ │ ├── video-archive-pvc.yaml │ │ │ ├── video-cache-pvc.yaml │ │ │ ├── xcode.yaml │ │ │ └── zookeeper.yaml │ │ └── values.yaml.m4 │ ├── start.sh │ └── stop.sh │ ├── mkvolume.sh │ ├── video-archive-pv.yaml.m4 │ ├── video-cache-pv.yaml.m4 │ ├── volume-info.sh │ └── yaml │ ├── CMakeLists.txt │ ├── build.sh │ ├── cdn.yaml.m4 │ ├── configure.m4 │ ├── kafka.yaml.m4 │ ├── live.yaml.m4 │ ├── platform.m4 │ ├── redis.yaml.m4 │ ├── start.sh │ ├── stop.sh │ ├── video-archive-pvc.yaml.m4 │ ├── video-cache-pvc.yaml.m4 │ ├── xcode.yaml.m4 │ └── zookeeper.yaml.m4 ├── doc ├── CDN-Transcode-Sample-Arch.png └── cmake.md ├── kafka ├── CMakeLists.txt ├── Dockerfile ├── build.sh └── shell.sh ├── script ├── Kubernetes_remove.sh ├── Kubernetes_setup_master.sh ├── Kubernetes_setup_node.sh ├── README.md ├── build.sh ├── cadvisor.sh ├── deployment.cmake ├── enable_gpu_plugin.sh ├── enable_nat.sh ├── install_dependency.sh ├── loop.m4 ├── nfs_setup.sh ├── scan-all.cmake ├── scan-yaml.awk ├── service.cmake ├── shell.sh └── update-image.sh ├── streaming-server ├── .dockerignore ├── CMakeLists.txt ├── Dockerfile ├── build.sh ├── nginx.conf └── shell.sh └── xcode-server ├── .dockerignore ├── CMakeLists.txt ├── SG1 └── Dockerfile ├── Xeon └── Dockerfile ├── XeonE3 └── Dockerfile ├── build.sh ├── main.py ├── models ├── vmaf_4k_v0.6.1.json └── vmaf_v0.6.1.json └── shell.sh /.gitignore: -------------------------------------------------------------------------------- 1 | build/* 2 | volume/video/* 3 | deployment/docker-swarm/dhparam.pem 4 | deployment/docker-swarm/self.crt 5 | deployment/docker-swarm/self.key 6 | deployment/kubernetes/*.yaml 7 | deployment/kubernetes/yaml/*.yaml 8 | deployment/certificate/self.crt 9 | deployment/certificate/self.key 10 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: bash 2 | dist: xenial 3 | services: docker 4 | #cache: 5 | # directories: 6 | # - /var/lib/apt/lists 7 | 8 | before_install: 9 | - curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add - 10 | - sudo add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" 11 | - sudo apt-get update 12 | - sudo apt-get -y install docker-ce 13 | - docker version 14 | 15 | jobs: 16 | include: 17 | - name: xcode-server 18 | script: 19 | - mkdir build 20 | - cd build 21 | - cmake .. 22 | - cd xcode-server 23 | - make 24 | - name: cdn-server 25 | script: 26 | - mkdir build 27 | - cd build 28 | - cmake .. 29 | - cd cdn-server 30 | - make 31 | - name: content-provider 32 | script: 33 | - mkdir build 34 | - cd build 35 | - cmake .. 36 | - cd content-provider 37 | - make 38 | - name: self-certificates 39 | script: 40 | - mkdir build 41 | - cd build 42 | - cmake .. 43 | - cd deployment/certificate 44 | - make 45 | -------------------------------------------------------------------------------- /ATTRIBUTION.md: -------------------------------------------------------------------------------- 1 | Component: Apache python3-kazoo 2 | Repository: https://github.com/python-zk/kazoo 3 | License: Apache License 2.0 (https://github.com/python-zk/kazoo/blob/master/LICENSE) 4 | 5 | Component: BeautifulSoup4 6 | Repository: https://github.com/il-vladislav/BeautifulSoup4/ 7 | License: MIT (https://pypi.org/project/beautifulsoup4/) 8 | 9 | Component: confluentinc/cp-docker-images 10 | Repository: https://github.com/confluentinc/cp-docker-images 11 | License: Apache License 2.0 (https://github.com/confluentinc/cp-docker-images/blob/5.3.3-post/LICENSE) 12 | 13 | Component: dashjs-tmp-nicky 14 | Repository: https://github.com/Dash-Industry-Forum/dash.js/ 15 | License: BSD 3-clause "New" or "Revised" License (https://github.com/Dash-Industry-Forum/dash.js/blob/development/LICENSE.md) 16 | 17 | Component: jQuery (New Wave JavaScript) 18 | Repository: https://github.com/jquery/jquery 19 | License: MIT (https://github.com/jquery/jquery/blob/master/LICENSE.txt) 20 | 21 | Component: kafka-python 22 | Repository: https://github.com/dpkp/kafka-python/ 23 | License: MIT (https://github.com/dpkp/kafka-python/blob/master/LICENSE) 24 | 25 | Component: What Input 26 | Repository: https://github.com/ten1seven/what-input 27 | License: MIT (https://github.com/ten1seven/what-input/blob/master/LICENSE) 28 | 29 | Component: Tornado Tornado Web Server 30 | Repository: https://github.com/tornadoweb/tornado 31 | License: Apache License 2.0 (https://github.com/tornadoweb/tornado/blob/master/LICENSE) 32 | 33 | Component: video-dev hls.js 34 | Repository: https://github.com/video-dev/hls.js 35 | License: Apache License 2.0 (https://github.com/video-dev/hls.js/blob/master/LICENSE) 36 | 37 | Component: Foundation for Sites 38 | Repository: https://github.com/zurb/foundation-sites-6 39 | License: MIT (https://github.com/zurb/foundation-sites-6/blob/develop/LICENSE) 40 | 41 | Component: Netflix/vmaf 42 | Repository: https://github.com/Netflix/vmaf 43 | License: BSD-2-Clause-Patent (https://github.com/Netflix/vmaf/blob/master/LICENSE) 44 | 45 | Component: Jquery-Magnific-Popup 46 | Repository: https://github.com/dimsemenov/Magnific-Popup 47 | License: MIT (https://github.com/dimsemenov/Magnific-Popup/blob/master/LICENSE) 48 | 49 | Component: bootstrap.min.js 50 | Repository: https://github.com/twbs/bootstrap 51 | License: MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE) 52 | -------------------------------------------------------------------------------- /CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required (VERSION 2.8) 2 | 3 | Project(CDNTranscodeSample NONE) 4 | 5 | if (NOT DEFINED REGISTRY) 6 | set(REGISTRY "") 7 | elseif (NOT ${REGISTRY} MATCHES "/$") 8 | set(REGISTRY "${REGISTRY}/") 9 | endif() 10 | if (NOT DEFINED NVODS) 11 | set(NVODS "2") 12 | endif() 13 | if (NOT DEFINED NLIVES) 14 | set(NLIVES "1") 15 | endif() 16 | 17 | if (NOT DEFINED PLATFORM) 18 | set(PLATFORM "Xeon") 19 | endif() 20 | 21 | if (NOT DEFINED SCENARIO) 22 | set(SCENARIO "transcode") 23 | endif() 24 | 25 | if (NOT (SCENARIO STREQUAL "cdn")) 26 | set(NLIVES "0") 27 | endif() 28 | 29 | file(GLOB dirs "deployment" "*") 30 | list(REMOVE_DUPLICATES dirs) 31 | foreach(dir ${dirs}) 32 | if(EXISTS ${dir}/CMakeLists.txt) 33 | add_subdirectory(${dir}) 34 | endif() 35 | endforeach() 36 | 37 | # legal message 38 | execute_process(COMMAND printf "\nThis script will build third party components licensed under various open source licenses into your container images. The terms under which those components may be used and distributed can be found with the license document that is provided with those components. Please familiarize yourself with those terms to ensure your distribution of those components complies with the terms of those licenses.\n\n") 39 | execute_process(COMMAND printf "\n-- Setting: PLATFORM=${PLATFORM} SCENARIO=${SCENARIO}\n") 40 | execute_process(COMMAND printf "-- Setting: NVODS=${NVODS}, NLIVES=${NLIVES}\n") 41 | execute_process(COMMAND printf "-- Setting: REGISTRY=${REGISTRY}\n") 42 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | We welcome community contributions to the CDN Transcode Sample. Thank you for your time! By contributing to the project, you agree to the license and copyright terms therein and to the release of your contribution under these terms. 2 | 3 | ## Contribution process 4 | - Validate that your changes do not break a build 5 | - Perform smoke tests and ensure they pass 6 | - Submit a pull request for review to the maintainer 7 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # PROJECT NOT UNDER ACTIVE MANAGEMENT # 2 | This project will no longer be maintained by Intel. 3 | Intel has ceased development and contributions including, but not limited to, maintenance, bug fixes, new releases, or updates, to this project. 4 | Intel no longer accepts patches to this project. 5 | If you have an ongoing need to use this project, are interested in independently developing it, or would like to maintain patches for the open source software community, please create your own fork of this project. 6 | 7 | 8 | 9 | ### Open Visual Cloud CDN Transcode Sample 10 | 11 | [![Travis Build Status](https://travis-ci.com/OpenVisualCloud/CDN-Transcode-Sample.svg?branch=master)](https://travis-ci.com/OpenVisualCloud/CDN-Transcode-Sample) 12 | [![Stable release](https://img.shields.io/badge/latest_release-v1.0-green.svg)](https://github.com/OpenVisualCloud/CDN-Transcode-Sample/releases/tag/v1.0) 13 | [![License](https://img.shields.io/badge/license-BSD_3_Clause-green.svg)](https://github.com/OpenVisualCloud/CDN-Transcode-Sample/blob/master/LICENSE) 14 | [![Contributions](https://img.shields.io/badge/contributions-welcome-blue.svg)](https://github.com/OpenVisualCloud/CDN-Transcode-Sample/wiki) 15 | 16 | The CDN Transcode Sample is an Open Visual Cloud software stack with all required open source ingredients well integrated to provide out-of-box simple transcode or transcode+CDN service, including live streaming and video on demand. It also provides Docker-based media delivery software development environment upon which developer can easily build their specific applications. 17 | 18 | ### Architecture 19 | 20 | The sample implements a reference server-side transcode system over CDN infrastructure, which features `live streaming` and `VOD`. Among them, the `VOD` service can run independently to provide a simple transcode service. 21 | 22 | 23 | 24 | ### Software Stacks 25 | 26 | The sample is powered by the following Open Visual Cloud software stacks: 27 | 28 | - Media transcoding software stack: 29 | 30 | The FFmpeg-based media transcoding stack is used to transcode media content from a higher resolution/quality to a lower resolution/quality. The software stack is optimized for Intel Xeon Scalable Processors and Intel XeonE3 Scalable Processors. 31 | 32 | - Media streaming and Web Hosting software stack: 33 | 34 | The NGINX-based software stack is used to host web services, video content and provide video streaming services. The software stack is optimized for Intel Xeon Scalable Processors. 35 | 36 | ### Install Prerequisites: 37 | 38 | - **Time Zone**: Check that the timezone setting of your host machine is correctly configured. Timezone is used during build. If you plan to run the sample on a cluster of machines, please make sure to synchronize time among the controller node and worker nodes. 39 | 40 | - **Build Tools**: Install `cmake`, `make`, `m4`, `wget` and `gawk` if they are not available on your system. 41 | 42 | - **Docker Engine**: 43 | 44 | - Install [docker engine](https://docs.docker.com/get-docker). Minimum version required: `17.05`. Make sure you setup docker to run as a regular user. 45 | - Setup `Kubernetes`. See [Kubernetes Setup](deployment/kubernetes/README.md) for additional setup details. 46 | - Setup docker proxy as follows if you are behind a firewall: 47 | 48 | ``` 49 | sudo mkdir -p /etc/systemd/system/docker.service.d 50 | printf "[Service]\nEnvironment=\"HTTPS_PROXY=$https_proxy\" \"NO_PROXY=$no_proxy\"\n" | sudo tee /etc/systemd/system/docker.service.d/proxy.conf 51 | sudo systemctl daemon-reload 52 | sudo systemctl restart docker 53 | ``` 54 | 55 | ### Build the Sample 56 | 57 | Run the following command to run the sample as a simple transcoder: 58 | ``` 59 | mkdir build 60 | cd build 61 | cmake .. 62 | make 63 | ``` 64 | 65 | Run the following command to run the sample as transcode+CDN: 66 | ``` 67 | mkdir build 68 | cd build 69 | cmake -DSCENARIO=cdn .. 70 | make 71 | ``` 72 | 73 | --- 74 | 75 | If you deploy the sample to a cluster, please configure the sample, as `cmake -DREGISTRY= ..`, to push the sample images to the private docker registry after each build. 76 | 77 | To deploy without a private registry, run `make update` after each build to push the sample images to the cluster nodes (which requires passwordless access from the master node to the worker nodes.) 78 | 79 | --- 80 | 81 | ### Deploy the Sample 82 | 83 | Start/stop the sample with Kubernetes [yaml configurations](deployment/kubernetes/yaml): 84 | 85 | ``` 86 | make volume 87 | make start_kubernetes 88 | ... 89 | make stop_kubernetes 90 | ``` 91 | 92 | Start/stop the sample with Kubernetes [Helm charts](deployment/kubernetes/helm): 93 | 94 | ``` 95 | make volume 96 | make start_helm 97 | ... 98 | make stop_helm 99 | ``` 100 | 101 | For the `transcode` scenario, look at the logs of the `benchmark` pod for the batch transcoding summary. For the `cdn` scenario, point your browser to `https://` to watch the list of video clips via `DASH` or `HLS`. 102 | 103 | # See Also 104 | 105 | - [Kubernetes Setup](deployment/kubernetes/README.md) 106 | - [Build Options](doc/cmake.md) 107 | - [Reference Architecture](https://networkbuilders.intel.com/solutionslibrary/container-bare-metal-for-2nd-generation-intel-xeon-scalable-processor) 108 | 109 | -------------------------------------------------------------------------------- /benchmark/.dockerignore: -------------------------------------------------------------------------------- 1 | CMakeLists.txt 2 | *.m4 3 | test/* 4 | -------------------------------------------------------------------------------- /benchmark/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | if (SCENARIO STREQUAL "transcode") 2 | set(service "tc_benchmark_service") 3 | include("${CMAKE_SOURCE_DIR}/script/service.cmake") 4 | add_dependencies(build_${service} build_tc_common) 5 | endif() 6 | -------------------------------------------------------------------------------- /benchmark/Dockerfile: -------------------------------------------------------------------------------- 1 | 2 | FROM tc_common 3 | 4 | Run apt-get update -q && apt-get install -y -q python3-kafka python3-kazoo && rm -rf /var/lib/apt/lists/* 5 | 6 | COPY *.py /home/ 7 | COPY *.json /home/ 8 | CMD ["/bin/bash","-c","/home/main.py"] 9 | WORKDIR /home 10 | 11 | #### 12 | ARG UID 13 | RUN mkdir -p /var/www/archive 14 | USER ${UID} 15 | #### 16 | -------------------------------------------------------------------------------- /benchmark/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | IMAGE="tc_benchmark_service" 4 | DIR=$(dirname $(readlink -f "$0")) 5 | 6 | . "${DIR}/../script/build.sh" 7 | -------------------------------------------------------------------------------- /benchmark/main.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | from messaging import Producer, Consumer 4 | from os import listdir, walk 5 | from os.path import getsize 6 | import time 7 | import json 8 | import psutil 9 | import re 10 | 11 | KAFKA_TOPIC = "content_provider_sched" 12 | KAFKA_WORKLOAD_TOPIC = "transcoding" 13 | 14 | ARCHIVE_ROOT = "/var/www/archive" 15 | TARGET_ROOT = "/var/www/video" 16 | log_file = TARGET_ROOT+"/log.txt" 17 | 18 | config_file="/home/transcoding.json" 19 | 20 | streams = [s for s in listdir(ARCHIVE_ROOT) if s.endswith((".mp4", ".avi"))] 21 | 22 | jobs=[] 23 | with open(config_file,"rt") as fd: 24 | jobs=json.load(fd) 25 | 26 | print("Submit jobs:", flush=True) 27 | # ingest jobs to start transcoding 28 | producer = Producer() 29 | idx=0 30 | for idx1,msg in enumerate(jobs): 31 | # schedule producing the stream 32 | name_pattern=msg["name"] 33 | for stream1 in streams: 34 | if re.search(name_pattern, stream1): 35 | msg.update({"idx": str(idx), "name": stream1}) 36 | print(msg,flush=True) 37 | idx=idx+1 38 | 39 | while True: 40 | try: 41 | producer.send(KAFKA_TOPIC, json.dumps(msg)) 42 | break 43 | except Exception as e: 44 | print("Exception: {}".format(e)) 45 | time.sleep(5) 46 | 47 | # show transcoding statistics 48 | 49 | def stats_fileinfo(root): 50 | nfiles=0 51 | size=0 52 | for path, dirs, files in walk(root): 53 | for stream1 in files: 54 | if stream1.endswith((".mp4", ".avi", ".ts")): 55 | nfiles=nfiles+1 56 | size=size+getsize(path+"/"+stream1) 57 | return (nfiles, size) 58 | 59 | c = Consumer(None) 60 | 61 | info={ 62 | "summary":{ 63 | "cpu": round(psutil.cpu_percent(),2), 64 | "mem": round(int(psutil.virtual_memory().total - psutil.virtual_memory().free) / float(psutil.virtual_memory().total), 2), 65 | "active":0, 66 | "completed":0, 67 | "aborted":0 68 | }, 69 | "active_task":[], 70 | "completed_task":[], 71 | "aborted_task":[] 72 | } 73 | 74 | def status_check(item, status): 75 | return isinstance(item,dict) and "status" in item.keys() and item["status"] == status 76 | 77 | def process_message(msg,sinfo): 78 | msg=json.loads(message) 79 | sinfo.update({msg["id"]:msg}) 80 | active=[ item["id"] for k,item in sinfo.items() if status_check(item, "active")] 81 | completed=[ item["id"] for k,item in sinfo.items() if status_check(item, "completed")] 82 | aborted=[ item["id"] for k,item in sinfo.items() if status_check(item, "aborted")] 83 | sinfo.update({ 84 | "summary":{ 85 | "cpu": round(psutil.cpu_percent(),2), 86 | "mem": round(int(psutil.virtual_memory().total - psutil.virtual_memory().free) / float(psutil.virtual_memory().total), 2), 87 | "active": len(active), 88 | "completed": len(completed), 89 | "aborted": len(aborted) 90 | }, 91 | "active_task":active, 92 | "completed_task":completed, 93 | "aborted_task":aborted 94 | }) 95 | return active,completed,aborted 96 | 97 | def log_info(sinfo): 98 | with open(log_file, "w") as f: 99 | for k,v in sinfo.items(): 100 | f.write(str(k)+": "+json.dumps(v)) 101 | f.write("\n") 102 | 103 | def format_info(sinfo,task_list): 104 | print("\n", flush=True) 105 | for k,v in sinfo.items(): 106 | if k in task_list + ["summary","active_task","completed_task","aborted_task"]: 107 | print(k,v, flush=True) 108 | 109 | while True: 110 | try: 111 | print("Waiting...",flush=True) 112 | for message in c.messages(KAFKA_WORKLOAD_TOPIC): 113 | active,completed,aborted = process_message(message,info) 114 | log_info(info) 115 | format_info(info,active) 116 | except Exception as e: 117 | print("Exception: {}".format(e)) 118 | time.sleep(2) 119 | -------------------------------------------------------------------------------- /benchmark/shell.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | IMAGE="tc_benchmark_service" 4 | DIR=$(dirname $(readlink -f "$0")) 5 | OPTIONS=("--volume=${DIR}/../../volume/video/archive:/var/www/archive:ro" "--volume=${DIR}/../../volume/video/dash:/var/www/dash:ro" "--volume=${DIR}/../../volume/video/hls:/var/www/hls:ro") 6 | 7 | . "${DIR}/../script/shell.sh" 8 | -------------------------------------------------------------------------------- /benchmark/transcoding.json: -------------------------------------------------------------------------------- 1 | [{ 2 | "name": ".mp4$", 3 | "parameters": { 4 | "renditions":[[1920, 1080, 3280000, 192000]], 5 | "codec_type":"HEVC", 6 | "gop_size": "100", 7 | "framerate": "30", 8 | "bframe": "2", 9 | "preset": "9", 10 | "profile": "main", 11 | "refs": "2", 12 | "forced_idr": "1" 13 | }, 14 | "loop": 0, 15 | "output": { 16 | "target": "file", 17 | "type": "mp4" 18 | }, 19 | "live_vod": "live" 20 | },{ 21 | "name": ".mp4$", 22 | "parameters": { 23 | "renditions":[[1920, 1080, 4100000, 192000]], 24 | "codec_type":"AVC", 25 | "gop_size": "100", 26 | "framerate": "30", 27 | "bframe": "2", 28 | "preset": "veryfast", 29 | "profile": "main", 30 | "refs": "2", 31 | "forced_idr": "1" 32 | }, 33 | "loop": 0, 34 | "output": { 35 | "target": "file", 36 | "type": "mp4" 37 | }, 38 | "live_vod": "live" 39 | },{ 40 | "name": ".mp4$", 41 | "parameters": { 42 | "renditions":[[1280, 720, 1680000, 192000]], 43 | "codec_type":"HEVC", 44 | "gop_size": "100", 45 | "framerate": "30", 46 | "bframe": "2", 47 | "preset": "9", 48 | "profile": "main", 49 | "refs": "2", 50 | "forced_idr": "1" 51 | }, 52 | "loop": 0, 53 | "output": { 54 | "target": "file", 55 | "type": "mp4" 56 | }, 57 | "live_vod": "live" 58 | },{ 59 | "name": ".mp4$", 60 | "parameters": { 61 | "renditions":[[1280, 720, 2100000, 192000]], 62 | "codec_type":"AVC", 63 | "gop_size": "100", 64 | "framerate": "30", 65 | "bframe": "2", 66 | "preset": "veryfast", 67 | "profile": "main", 68 | "refs": "2", 69 | "forced_idr": "1" 70 | }, 71 | "loop": 0, 72 | "output": { 73 | "target": "file", 74 | "type": "mp4" 75 | }, 76 | "live_vod": "live" 77 | },{ 78 | "name": ".mp4$", 79 | "parameters": { 80 | "renditions":[[640, 480, 960000, 128000]], 81 | "codec_type":"HEVC", 82 | "gop_size": "100", 83 | "framerate": "30", 84 | "bframe": "2", 85 | "preset": "9", 86 | "profile": "main", 87 | "refs": "2", 88 | "forced_idr": "1" 89 | }, 90 | "loop": 0, 91 | "output": { 92 | "target": "file", 93 | "type": "mp4" 94 | }, 95 | "live_vod": "live" 96 | },{ 97 | "name": ".mp4$", 98 | "parameters": { 99 | "renditions":[[640, 480, 1200000, 128000]], 100 | "codec_type":"AVC", 101 | "gop_size": "100", 102 | "framerate": "30", 103 | "bframe": "2", 104 | "preset": "veryfast", 105 | "profile": "main", 106 | "refs": "2", 107 | "forced_idr": "1" 108 | }, 109 | "loop": 0, 110 | "output": { 111 | "target": "file", 112 | "type": "mp4" 113 | }, 114 | "live_vod": "live" 115 | },{ 116 | "name": ".mp4$", 117 | "parameters": { 118 | "renditions":[[480, 360, 440000, 128000]], 119 | "codec_type":"HEVC", 120 | "gop_size": "100", 121 | "framerate": "30", 122 | "bframe": "2", 123 | "preset": "9", 124 | "profile": "main", 125 | "refs": "2", 126 | "forced_idr": "1" 127 | }, 128 | "loop": 0, 129 | "output": { 130 | "target": "file", 131 | "type": "mp4" 132 | }, 133 | "live_vod": "live" 134 | },{ 135 | "name": ".mp4$", 136 | "parameters": { 137 | "renditions":[[480, 360, 550000, 128000]], 138 | "codec_type":"AVC", 139 | "gop_size": "100", 140 | "framerate": "30", 141 | "bframe": "2", 142 | "preset": "veryfast", 143 | "profile": "main", 144 | "refs": "2", 145 | "forced_idr": "1" 146 | }, 147 | "loop": 0, 148 | "output": { 149 | "target": "file", 150 | "type": "mp4" 151 | }, 152 | "live_vod": "live" 153 | }] 154 | -------------------------------------------------------------------------------- /cdn-server/.dockerignore: -------------------------------------------------------------------------------- 1 | CMakeLists.txt 2 | *.m4 3 | test/* 4 | -------------------------------------------------------------------------------- /cdn-server/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | if (SCENARIO STREQUAL "cdn") 2 | set(service "tc_cdn_service") 3 | include("${CMAKE_SOURCE_DIR}/script/service.cmake") 4 | endif() 5 | -------------------------------------------------------------------------------- /cdn-server/Dockerfile: -------------------------------------------------------------------------------- 1 | 2 | FROM openvisualcloud/xeon-ubuntu1804-media-nginx:21.3 3 | 4 | Run DEBIAN_FRONTEND=noninteractive apt-get update && apt-get install -y -q --no-install-recommends python3-setuptools python3-redis python-celery-common python3-tornado python3-kafka python3-kazoo openssh-server && rm -rf /var/lib/apt/lists/* 5 | 6 | COPY *.xsl /etc/nginx/ 7 | COPY *.conf /etc/nginx/ 8 | COPY html /var/www/html 9 | COPY *.py /home/ 10 | CMD ["/bin/bash","-c","/home/main.py&/usr/local/sbin/nginx"] 11 | WORKDIR /home 12 | 13 | #### 14 | ARG UID 15 | ARG GID 16 | ## must use ; here to ignore user exist status code 17 | RUN [ ${GID} -gt 0 ] && groupadd -f -g ${GID} docker; \ 18 | [ ${UID} -gt 0 ] && useradd -d /home/docker -g ${GID} -K UID_MAX=${UID} -K UID_MIN=${UID} docker; \ 19 | touch /var/run/nginx.pid && \ 20 | mkdir -p /var/log/nginx /var/lib/nginx /var/www/video /var/www/archive && \ 21 | chown -R ${UID}:${GID} /var/run/nginx.pid /var/www /var/log/nginx /var/lib/nginx 22 | USER ${UID} 23 | #### 24 | -------------------------------------------------------------------------------- /cdn-server/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | IMAGE="tc_cdn_service" 4 | DIR=$(dirname $(readlink -f "$0")) 5 | 6 | . "${DIR}/../script/build.sh" 7 | -------------------------------------------------------------------------------- /cdn-server/html/css/app.css: -------------------------------------------------------------------------------- 1 | 2 | form .form-icons { 3 | text-align: center; 4 | } 5 | 6 | form .form-icons h4 { 7 | margin-bottom: 1rem; 8 | } 9 | 10 | form .form-icons .input-group-label { 11 | background-color: #1779ba; 12 | border-color: #1779ba; 13 | } 14 | 15 | form .form-icons .input-group-field { 16 | border-color: #1779ba; 17 | } 18 | 19 | form .form-icons .fa { 20 | color: white; 21 | width: 1rem; 22 | } 23 | 24 | .input-group-0 { 25 | display: none; 26 | } 27 | 28 | .flex-center{ 29 | width: 100%; 30 | height: 50px; 31 | display: none; 32 | } 33 | 34 | .flex-center .input-group-bar { 35 | border-style: solid; 36 | border-width: 1px; 37 | border-color: #cccccc; 38 | width: 80%; 39 | height: 10px; 40 | border-radius: 2px; 41 | margin: 0 auto; 42 | } 43 | .flex-center .bar{ 44 | background-color: #ff4c4c; 45 | width: 0%; 46 | height: 100%; 47 | 48 | } 49 | 50 | -------------------------------------------------------------------------------- /cdn-server/html/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/CDN-Transcode-Sample/093ecc7b4db5b8ef0e9fa93e70829b596ebb7cc2/cdn-server/html/favicon.ico -------------------------------------------------------------------------------- /cdn-server/html/header.shtml: -------------------------------------------------------------------------------- 1 |
2 |
3 | 8 |
9 |
10 | 18 |
19 |
20 | 21 |
22 |
23 |

Setting

24 | 25 |
26 | User Name: 27 |
28 |
29 | 30 | 31 |
32 |
33 | 34 |
35 |
36 |

Upload

37 | 38 |
39 | 40 |
41 | 42 |
43 | Choose File: 44 |
45 |
46 | 47 | 48 |
49 |
Upload 0%
50 |
51 |
52 |
53 |
54 | 55 | 56 |
57 |
58 | -------------------------------------------------------------------------------- /cdn-server/html/icon/foundation-icons.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/CDN-Transcode-Sample/093ecc7b4db5b8ef0e9fa93e70829b596ebb7cc2/cdn-server/html/icon/foundation-icons.eot -------------------------------------------------------------------------------- /cdn-server/html/icon/foundation-icons.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/CDN-Transcode-Sample/093ecc7b4db5b8ef0e9fa93e70829b596ebb7cc2/cdn-server/html/icon/foundation-icons.ttf -------------------------------------------------------------------------------- /cdn-server/html/icon/foundation-icons.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/CDN-Transcode-Sample/093ecc7b4db5b8ef0e9fa93e70829b596ebb7cc2/cdn-server/html/icon/foundation-icons.woff -------------------------------------------------------------------------------- /cdn-server/html/image/grafana/Kubernetes-Monitoring-Arch.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/CDN-Transcode-Sample/093ecc7b4db5b8ef0e9fa93e70829b596ebb7cc2/cdn-server/html/image/grafana/Kubernetes-Monitoring-Arch.png -------------------------------------------------------------------------------- /cdn-server/html/image/grafana/alertmanager.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/CDN-Transcode-Sample/093ecc7b4db5b8ef0e9fa93e70829b596ebb7cc2/cdn-server/html/image/grafana/alertmanager.png -------------------------------------------------------------------------------- /cdn-server/html/image/grafana/dashboards_list.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/CDN-Transcode-Sample/093ecc7b4db5b8ef0e9fa93e70829b596ebb7cc2/cdn-server/html/image/grafana/dashboards_list.png -------------------------------------------------------------------------------- /cdn-server/html/image/grafana/home.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/CDN-Transcode-Sample/093ecc7b4db5b8ef0e9fa93e70829b596ebb7cc2/cdn-server/html/image/grafana/home.png -------------------------------------------------------------------------------- /cdn-server/html/image/grafana/login.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/CDN-Transcode-Sample/093ecc7b4db5b8ef0e9fa93e70829b596ebb7cc2/cdn-server/html/image/grafana/login.png -------------------------------------------------------------------------------- /cdn-server/html/image/grafana/navigation.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/CDN-Transcode-Sample/093ecc7b4db5b8ef0e9fa93e70829b596ebb7cc2/cdn-server/html/image/grafana/navigation.png -------------------------------------------------------------------------------- /cdn-server/html/image/grafana/nodes_dashboard.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/CDN-Transcode-Sample/093ecc7b4db5b8ef0e9fa93e70829b596ebb7cc2/cdn-server/html/image/grafana/nodes_dashboard.png -------------------------------------------------------------------------------- /cdn-server/html/image/grafana/promQL.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/CDN-Transcode-Sample/093ecc7b4db5b8ef0e9fa93e70829b596ebb7cc2/cdn-server/html/image/grafana/promQL.png -------------------------------------------------------------------------------- /cdn-server/html/image/grafana/prometheus.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/CDN-Transcode-Sample/093ecc7b4db5b8ef0e9fa93e70829b596ebb7cc2/cdn-server/html/image/grafana/prometheus.png -------------------------------------------------------------------------------- /cdn-server/html/image/kibana/Kubernetes-Logging-Arch.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/CDN-Transcode-Sample/093ecc7b4db5b8ef0e9fa93e70829b596ebb7cc2/cdn-server/html/image/kibana/Kubernetes-Logging-Arch.png -------------------------------------------------------------------------------- /cdn-server/html/image/kibana/Kubernetes-Logging-Arch.vsdx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/CDN-Transcode-Sample/093ecc7b4db5b8ef0e9fa93e70829b596ebb7cc2/cdn-server/html/image/kibana/Kubernetes-Logging-Arch.vsdx -------------------------------------------------------------------------------- /cdn-server/html/image/kibana/choose_source.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/CDN-Transcode-Sample/093ecc7b4db5b8ef0e9fa93e70829b596ebb7cc2/cdn-server/html/image/kibana/choose_source.png -------------------------------------------------------------------------------- /cdn-server/html/image/kibana/ffmpeg_log.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/CDN-Transcode-Sample/093ecc7b4db5b8ef0e9fa93e70829b596ebb7cc2/cdn-server/html/image/kibana/ffmpeg_log.png -------------------------------------------------------------------------------- /cdn-server/html/image/kibana/filter_log.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/CDN-Transcode-Sample/093ecc7b4db5b8ef0e9fa93e70829b596ebb7cc2/cdn-server/html/image/kibana/filter_log.png -------------------------------------------------------------------------------- /cdn-server/html/image/kibana/generate_report.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/CDN-Transcode-Sample/093ecc7b4db5b8ef0e9fa93e70829b596ebb7cc2/cdn-server/html/image/kibana/generate_report.png -------------------------------------------------------------------------------- /cdn-server/html/image/kibana/logs.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/CDN-Transcode-Sample/093ecc7b4db5b8ef0e9fa93e70829b596ebb7cc2/cdn-server/html/image/kibana/logs.png -------------------------------------------------------------------------------- /cdn-server/html/image/kibana/navigation.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/CDN-Transcode-Sample/093ecc7b4db5b8ef0e9fa93e70829b596ebb7cc2/cdn-server/html/image/kibana/navigation.png -------------------------------------------------------------------------------- /cdn-server/html/image/kibana/save_log.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/CDN-Transcode-Sample/093ecc7b4db5b8ef0e9fa93e70829b596ebb7cc2/cdn-server/html/image/kibana/save_log.png -------------------------------------------------------------------------------- /cdn-server/html/image/kibana/step_1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/CDN-Transcode-Sample/093ecc7b4db5b8ef0e9fa93e70829b596ebb7cc2/cdn-server/html/image/kibana/step_1.png -------------------------------------------------------------------------------- /cdn-server/html/image/kibana/step_2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/CDN-Transcode-Sample/093ecc7b4db5b8ef0e9fa93e70829b596ebb7cc2/cdn-server/html/image/kibana/step_2.png -------------------------------------------------------------------------------- /cdn-server/html/image/kibana/visualization_show.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/CDN-Transcode-Sample/093ecc7b4db5b8ef0e9fa93e70829b596ebb7cc2/cdn-server/html/image/kibana/visualization_show.png -------------------------------------------------------------------------------- /cdn-server/html/image/kibana/visualization_type.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/CDN-Transcode-Sample/093ecc7b4db5b8ef0e9fa93e70829b596ebb7cc2/cdn-server/html/image/kibana/visualization_type.png -------------------------------------------------------------------------------- /cdn-server/html/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | CDN Transcode Sample 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | -------------------------------------------------------------------------------- /cdn-server/html/js/app-api.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | var apiHost={ 4 | playList: function (name) { 5 | var url="api/playlist"; 6 | var args= { name: name }; 7 | console.log("GET "+url+"?"+JSON.stringify(args)); 8 | return $.get(url, args); 9 | }, 10 | click: function (name, x, y, t) { 11 | var url="api/click"; 12 | var args= { x: x, y:y, name: name, t:t } 13 | console.log("POST "+url+"?"+JSON.stringify(args)); 14 | return $.post(url, args); 15 | }, 16 | }; 17 | -------------------------------------------------------------------------------- /cdn-server/html/js/app-header.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | $(".top-bar").on(":initpage", function(e) { 4 | $("#setting").find("[ui-header-setting-user] input").val(settings.user()); 5 | $(this).find("[user-name-menu]").text(settings.user()); 6 | }); 7 | 8 | $("#setting").find("form").submit(function() { 9 | var page=$(this); 10 | 11 | var user=page.find("[ui-header-setting-user] input").val().toLowerCase(); 12 | settings.user(user); 13 | $(".top-bar").find("[user-name-menu]").text(user); 14 | $("#player").trigger(":update"); 15 | return false; 16 | }); 17 | 18 | var settings={ 19 | user: function (name) { 20 | if (typeof name != "undefined") localStorage.user=name; 21 | return typeof localStorage.user!="undefined"?localStorage.user:"guest"; 22 | }, 23 | } 24 | 25 | 26 | //upload 27 | $("#upload .choose-file").click( 28 | function() { 29 | $("#upload .input_file").trigger('click') 30 | } 31 | ) 32 | 33 | $("#upload .input_file").change( 34 | function() { 35 | var filePath = $("#upload .input_file").val() 36 | var pos=filePath.lastIndexOf("\\"); 37 | var fileName = filePath.substring(pos+1); 38 | $("#upload .choose-file .input-group-field").val(fileName) 39 | } 40 | ) 41 | 42 | $("#upload").find("form .button").click(function() { 43 | if(!$("#upload .input_file").val()){ 44 | return false 45 | } 46 | if (!(document.querySelector("#upload .input_file").files[0].name.endsWith('.mp4'))) { 47 | $(".flex-center").show() 48 | $(".flex-center h6").html("Please choose mp4 file") 49 | $(".flex-center .input-group-bar").hide() 50 | return false 51 | } else { 52 | upload() 53 | } 54 | }); 55 | 56 | function upload() { 57 | $(".flex-center").show() 58 | $(".flex-center .input-group-bar").show() 59 | const LENGTH = 1024 * 1024 * 10; 60 | var timeStamp = new Date().getTime(); 61 | var fileName=$("#upload .choose-file .input-group-field").val(); 62 | var file = document.querySelector('#upload .input_file').files[0]; 63 | var totalSize = file.size; 64 | var start = 0; 65 | var end = start + LENGTH; 66 | var fd = null; 67 | var blob = null; 68 | var xhr = null; 69 | var sum = Math.ceil(totalSize/LENGTH) 70 | var count = 0 71 | var timer = setInterval(function () { 72 | if (start < totalSize) { 73 | fd = new FormData(); 74 | xhr = new XMLHttpRequest(); 75 | xhr.open('POST', '/upload/', false); 76 | blob = file.slice(start, end); 77 | fd.append('file', blob); 78 | fd.append('fileName', fileName); 79 | fd.append('timeStamp', timeStamp); 80 | fd.append('count', String(count)); 81 | if (end >= totalSize) { 82 | fd.append('uploadStatus','end') 83 | } 84 | xhr.send(fd); 85 | console.log(xhr.status) 86 | if (xhr.status != 200) { 87 | console.log("error" + xhr.status) 88 | $(".flex-center h6").html("Error, Please try again") 89 | $(".flex-center .input-group-bar").hide() 90 | clearInterval(timer); 91 | return false 92 | } 93 | count += 1 94 | $(".flex-center h6").html("Upload " + parseInt(count * 100 / sum) + "%") 95 | $(".flex-center .bar").width(parseInt(count * 100 / sum) + "%") 96 | start = end; 97 | end = start + LENGTH; 98 | } else { 99 | $(".flex-center h6").html("Upload success") 100 | setTimeout(function () { 101 | $(".flex-center h6").html("Upload 0%") 102 | $(".flex-center .bar").width("0%") 103 | $(".flex-center").hide() 104 | $("#upload .input_file").val('') 105 | $("#upload .choose-file .input-group-field").val('') 106 | $("#setting").find("form").submit(); 107 | $(".reveal-overlay").trigger('click'); 108 | },1000) 109 | clearInterval(timer); 110 | } 111 | },100) 112 | } 113 | -------------------------------------------------------------------------------- /cdn-server/html/js/app-player.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | function hls_play(page, video, url) { 4 | if (Hls.isSupported()) { 5 | var config = { 6 | xhrSetup: function(xhr, url) { 7 | xhr.setRequestHeader("X-USER", settings.user()); 8 | } 9 | }; 10 | var player=new Hls(config); 11 | player.loadSource(url); 12 | player.attachMedia(video[0]); 13 | player.on(Hls.Events.MANIFEST_PARSED,function () { 14 | video[0].play(); 15 | }); 16 | } else if (video[0].canPlayType('application/vnd.apple.mpegurl')) { 17 | video[0].src= url; 18 | video[0].addEventListener('canplay', function() { 19 | video[0].play(); 20 | }); 21 | } 22 | 23 | page.unbind(":close").on(":close", function (e) { 24 | video[0].src=''; 25 | if (typeof(video[0].pause)==='function') video[0].pause(); 26 | }); 27 | } 28 | 29 | function dash_play(page, video, url) { 30 | var player=dashjs.MediaPlayer().create(); 31 | player.extend("RequestModifier", function () { 32 | return { 33 | modifyRequestHeader: function (xhr) { 34 | xhr.setRequestHeader("X-USER",settings.user()); 35 | return xhr; 36 | }, 37 | modifyRequestURL: function (url) { 38 | return url; 39 | } 40 | }; 41 | },true); 42 | 43 | player.initialize(); 44 | player.attachView(video[0]); 45 | player.attachSource(url); 46 | 47 | page.unbind(":close").on(":close", function (e) { 48 | player.attachSource(null); 49 | }); 50 | } 51 | 52 | $("#player").on(":play", function (e, url) { 53 | var page=$(this); 54 | var video=page.find("video"); 55 | 56 | page.trigger(":close"); 57 | if (url.endsWith(".m3u8") || url.endsWith(".M3U8")) { 58 | hls_play(page, video, url); 59 | } 60 | if (url.endsWith(".mpd") || url.endsWith(".MPD")) { 61 | dash_play(page, video, url); 62 | } 63 | }).on(":update", function (e) { 64 | var plist=$(this).find("[play-list]"); 65 | plist.empty(); 66 | apiHost.playList(settings.user()).then(function (data) { 67 | $.each(data, function (k,v) { 68 | var line=$(''+v.name+'
'+v.name+'
'); 69 | line.find("a").click(function () { 70 | var e = $.Event("keydown", { keyCode: 13 }); 71 | $("#player input").val(v.url).trigger(e); 72 | }); 73 | plist.append(line); 74 | }); 75 | }); 76 | }).find("input").keydown(function (e) { 77 | if (e.keyCode!=13) return; 78 | $("#player").trigger(":close").trigger(":play", [$(this).val()]); 79 | }); 80 | 81 | $("#player video").click(function (e) { 82 | var rect=e.target.getBoundingClientRect(); 83 | var x=(e.clientX-rect.left)/rect.width; 84 | var y=(e.clientY-rect.top)/rect.height; 85 | apiHost.click(settings.user(), x, y, this.currentTime); 86 | }); 87 | -------------------------------------------------------------------------------- /cdn-server/html/js/app.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | $(document).foundation(); 4 | $(window).bind("load", function () { 5 | $(".top-bar").trigger(":initpage"); 6 | $("#player").trigger(":update"); 7 | }); 8 | -------------------------------------------------------------------------------- /cdn-server/html/js/vendor/what-input.js: -------------------------------------------------------------------------------- 1 | /** 2 | * what-input - A global utility for tracking the current input method (mouse, keyboard or touch). 3 | * @version v4.2.0 4 | * @link https://github.com/ten1seven/what-input 5 | * @license MIT 6 | */ 7 | "use strict"; 8 | 9 | (function webpackUniversalModuleDefinition(root, factory) { 10 | if(typeof exports === 'object' && typeof module === 'object') 11 | module.exports = factory(); 12 | else if(typeof define === 'function' && define.amd) 13 | define("whatInput", [], factory); 14 | else if(typeof exports === 'object') 15 | exports["whatInput"] = factory(); 16 | else 17 | root["whatInput"] = factory(); 18 | })(this, function() { 19 | return /******/ (function(modules) { // webpackBootstrap 20 | /******/ // The module cache 21 | /******/ var installedModules = {}; 22 | 23 | /******/ // The require function 24 | /******/ function __webpack_require__(moduleId) { 25 | 26 | /******/ // Check if module is in cache 27 | /******/ if(installedModules[moduleId]) 28 | /******/ return installedModules[moduleId].exports; 29 | 30 | /******/ // Create a new module (and put it into the cache) 31 | /******/ var module = installedModules[moduleId] = { 32 | /******/ exports: {}, 33 | /******/ id: moduleId, 34 | /******/ loaded: false 35 | /******/ }; 36 | 37 | /******/ // Execute the module function 38 | /******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__); 39 | 40 | /******/ // Flag the module as loaded 41 | /******/ module.loaded = true; 42 | 43 | /******/ // Return the exports of the module 44 | /******/ return module.exports; 45 | /******/ } 46 | 47 | 48 | /******/ // expose the modules object (__webpack_modules__) 49 | /******/ __webpack_require__.m = modules; 50 | 51 | /******/ // expose the module cache 52 | /******/ __webpack_require__.c = installedModules; 53 | 54 | /******/ // __webpack_public_path__ 55 | /******/ __webpack_require__.p = ""; 56 | 57 | /******/ // Load entry module and return exports 58 | /******/ return __webpack_require__(0); 59 | /******/ }) 60 | /************************************************************************/ 61 | /******/ ([ 62 | /* 0 */ 63 | /***/ (function(module, exports) { 64 | 65 | 'use strict'; 66 | 67 | module.exports = function () { 68 | /* 69 | * variables 70 | */ 71 | 72 | // last used input type 73 | var currentInput = 'initial'; 74 | 75 | // last used input intent 76 | var currentIntent = null; 77 | 78 | // cache document.documentElement 79 | var doc = document.documentElement; 80 | 81 | // form input types 82 | var formInputs = ['input', 'select', 'textarea']; 83 | 84 | var functionList = []; 85 | 86 | // list of modifier keys commonly used with the mouse and 87 | // can be safely ignored to prevent false keyboard detection 88 | var ignoreMap = [16, // shift 89 | 17, // control 90 | 18, // alt 91 | 91, // Windows key / left Apple cmd 92 | 93 // Windows menu / right Apple cmd 93 | ]; 94 | 95 | // list of keys for which we change intent even for form inputs 96 | var changeIntentMap = [9 // tab 97 | ]; 98 | 99 | // mapping of events to input types 100 | var inputMap = { 101 | keydown: 'keyboard', 102 | mousedown: 'mouse', 103 | mousemove: 'mouse', 104 | MSPointerDown: 'pointer', 105 | MSPointerMove: 'pointer', 106 | pointerdown: 'pointer', 107 | pointermove: 'pointer', 108 | touchstart: 'touch' 109 | }; 110 | 111 | // array of all used input types 112 | var inputTypes = []; 113 | 114 | // boolean: true if touch buffer is active 115 | var isBuffering = false; 116 | 117 | // boolean: true if the page is being scrolled 118 | var isScrolling = false; 119 | 120 | // store current mouse position 121 | var mousePos = { 122 | x: null, 123 | y: null 124 | }; 125 | 126 | // map of IE 10 pointer events 127 | var pointerMap = { 128 | 2: 'touch', 129 | 3: 'touch', // treat pen like touch 130 | 4: 'mouse' 131 | }; 132 | 133 | var supportsPassive = false; 134 | 135 | try { 136 | var opts = Object.defineProperty({}, 'passive', { 137 | get: function get() { 138 | supportsPassive = true; 139 | } 140 | }); 141 | 142 | window.addEventListener('test', null, opts); 143 | } catch (e) {} 144 | 145 | /* 146 | * set up 147 | */ 148 | 149 | var setUp = function setUp() { 150 | // add correct mouse wheel event mapping to `inputMap` 151 | inputMap[detectWheel()] = 'mouse'; 152 | 153 | addListeners(); 154 | setInput(); 155 | }; 156 | 157 | /* 158 | * events 159 | */ 160 | 161 | var addListeners = function addListeners() { 162 | // `pointermove`, `MSPointerMove`, `mousemove` and mouse wheel event binding 163 | // can only demonstrate potential, but not actual, interaction 164 | // and are treated separately 165 | 166 | // pointer events (mouse, pen, touch) 167 | if (window.PointerEvent) { 168 | doc.addEventListener('pointerdown', updateInput); 169 | doc.addEventListener('pointermove', setIntent); 170 | } else if (window.MSPointerEvent) { 171 | doc.addEventListener('MSPointerDown', updateInput); 172 | doc.addEventListener('MSPointerMove', setIntent); 173 | } else { 174 | // mouse events 175 | doc.addEventListener('mousedown', updateInput); 176 | doc.addEventListener('mousemove', setIntent); 177 | 178 | // touch events 179 | if ('ontouchstart' in window) { 180 | doc.addEventListener('touchstart', touchBuffer); 181 | doc.addEventListener('touchend', touchBuffer); 182 | } 183 | } 184 | 185 | // mouse wheel 186 | doc.addEventListener(detectWheel(), setIntent, supportsPassive ? { passive: true } : false); 187 | 188 | // keyboard events 189 | doc.addEventListener('keydown', updateInput); 190 | }; 191 | 192 | // checks conditions before updating new input 193 | var updateInput = function updateInput(event) { 194 | // only execute if the touch buffer timer isn't running 195 | if (!isBuffering) { 196 | var eventKey = event.which; 197 | var value = inputMap[event.type]; 198 | if (value === 'pointer') value = pointerType(event); 199 | 200 | if (currentInput !== value || currentIntent !== value) { 201 | var activeElem = document.activeElement; 202 | var activeInput = false; 203 | var notFormInput = activeElem && activeElem.nodeName && formInputs.indexOf(activeElem.nodeName.toLowerCase()) === -1; 204 | 205 | if (notFormInput || changeIntentMap.indexOf(eventKey) !== -1) { 206 | activeInput = true; 207 | } 208 | 209 | if (value === 'touch' || 210 | // ignore mouse modifier keys 211 | value === 'mouse' || 212 | // don't switch if the current element is a form input 213 | value === 'keyboard' && eventKey && activeInput && ignoreMap.indexOf(eventKey) === -1) { 214 | // set the current and catch-all variable 215 | currentInput = currentIntent = value; 216 | 217 | setInput(); 218 | } 219 | } 220 | } 221 | }; 222 | 223 | // updates the doc and `inputTypes` array with new input 224 | var setInput = function setInput() { 225 | doc.setAttribute('data-whatinput', currentInput); 226 | doc.setAttribute('data-whatintent', currentInput); 227 | 228 | if (inputTypes.indexOf(currentInput) === -1) { 229 | inputTypes.push(currentInput); 230 | doc.className += ' whatinput-types-' + currentInput; 231 | } 232 | 233 | fireFunctions('input'); 234 | }; 235 | 236 | // updates input intent for `mousemove` and `pointermove` 237 | var setIntent = function setIntent(event) { 238 | // test to see if `mousemove` happened relative to the screen 239 | // to detect scrolling versus mousemove 240 | if (mousePos['x'] !== event.screenX || mousePos['y'] !== event.screenY) { 241 | isScrolling = false; 242 | 243 | mousePos['x'] = event.screenX; 244 | mousePos['y'] = event.screenY; 245 | } else { 246 | isScrolling = true; 247 | } 248 | 249 | // only execute if the touch buffer timer isn't running 250 | // or scrolling isn't happening 251 | if (!isBuffering && !isScrolling) { 252 | var value = inputMap[event.type]; 253 | if (value === 'pointer') value = pointerType(event); 254 | 255 | if (currentIntent !== value) { 256 | currentIntent = value; 257 | 258 | doc.setAttribute('data-whatintent', currentIntent); 259 | 260 | fireFunctions('intent'); 261 | } 262 | } 263 | }; 264 | 265 | // buffers touch events because they frequently also fire mouse events 266 | var touchBuffer = function touchBuffer(event) { 267 | if (event.type === 'touchstart') { 268 | isBuffering = false; 269 | 270 | // set the current input 271 | updateInput(event); 272 | } else { 273 | isBuffering = true; 274 | } 275 | }; 276 | 277 | var fireFunctions = function fireFunctions(type) { 278 | for (var i = 0, len = functionList.length; i < len; i++) { 279 | if (functionList[i].type === type) { 280 | functionList[i].function.call(undefined, currentIntent); 281 | } 282 | } 283 | }; 284 | 285 | /* 286 | * utilities 287 | */ 288 | 289 | var pointerType = function pointerType(event) { 290 | if (typeof event.pointerType === 'number') { 291 | return pointerMap[event.pointerType]; 292 | } else { 293 | // treat pen like touch 294 | return event.pointerType === 'pen' ? 'touch' : event.pointerType; 295 | } 296 | }; 297 | 298 | // detect version of mouse wheel event to use 299 | // via https://developer.mozilla.org/en-US/docs/Web/Events/wheel 300 | var detectWheel = function detectWheel() { 301 | var wheelType = void 0; 302 | 303 | // Modern browsers support "wheel" 304 | if ('onwheel' in document.createElement('div')) { 305 | wheelType = 'wheel'; 306 | } else { 307 | // Webkit and IE support at least "mousewheel" 308 | // or assume that remaining browsers are older Firefox 309 | wheelType = document.onmousewheel !== undefined ? 'mousewheel' : 'DOMMouseScroll'; 310 | } 311 | 312 | return wheelType; 313 | }; 314 | 315 | /* 316 | * init 317 | */ 318 | 319 | // don't start script unless browser cuts the mustard 320 | // (also passes if polyfills are used) 321 | if ('addEventListener' in window && Array.prototype.indexOf) { 322 | setUp(); 323 | } 324 | 325 | /* 326 | * api 327 | */ 328 | 329 | return { 330 | // returns string: the current input type 331 | // opt: 'loose'|'strict' 332 | // 'strict' (default): returns the same value as the `data-whatinput` attribute 333 | // 'loose': includes `data-whatintent` value if it's more current than `data-whatinput` 334 | ask: function ask(opt) { 335 | return opt === 'loose' ? currentIntent : currentInput; 336 | }, 337 | 338 | // returns array: all the detected input types 339 | types: function types() { 340 | return inputTypes; 341 | }, 342 | 343 | // overwrites ignored keys with provided array 344 | ignoreKeys: function ignoreKeys(arr) { 345 | ignoreMap = arr; 346 | }, 347 | 348 | // attach functions to input and intent "events" 349 | // funct: function to fire on change 350 | // eventType: 'input'|'intent' 351 | onChange: function onChange(funct, eventType) { 352 | functionList.push({ 353 | function: funct, 354 | type: eventType 355 | }); 356 | } 357 | }; 358 | }(); 359 | 360 | /***/ }) 361 | /******/ ]) 362 | }); 363 | ; 364 | -------------------------------------------------------------------------------- /cdn-server/html/player.shtml: -------------------------------------------------------------------------------- 1 |
2 | 3 | 4 | 5 | 20 | 21 |
6 |
7 | Video URL: 8 | 9 |
10 |
11 | 12 |
13 |
14 | 15 | 16 | 17 | 18 |
Playlist:
19 |
22 | 23 |
24 | -------------------------------------------------------------------------------- /cdn-server/main.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | from tornado import ioloop, web 4 | from tornado.options import define, options, parse_command_line 5 | from playlist import PlayListHandler 6 | from schedule import ScheduleHandler 7 | from upload import UploadHandler 8 | import os 9 | 10 | APP = web.Application([ 11 | (r'/playlist', PlayListHandler), 12 | (r'/schedule/.*', ScheduleHandler), 13 | (r'/upload/', UploadHandler), 14 | ]) 15 | 16 | if __name__ == "__main__": 17 | define("port", default=2222, help="the binding port", type=int) 18 | define("ip", default="127.0.0.1", help="the binding ip") 19 | parse_command_line() 20 | os.popen('celery multi start w1 -A tasks -l info --logfile=/var/www/log/celery.log --pidfile=/var/www/celery.pid') 21 | print("Listening to " + options.ip + ":" + str(options.port)) 22 | APP.listen(options.port, address=options.ip) 23 | ioloop.IOLoop.instance().start() 24 | -------------------------------------------------------------------------------- /cdn-server/messaging.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | import socket 4 | from kafka import KafkaProducer, KafkaConsumer, TopicPartition 5 | import traceback 6 | import socket 7 | import time 8 | 9 | KAFKA_HOSTS = ["kafka-service:9092"] 10 | 11 | class Producer(object): 12 | def __init__(self): 13 | super(Producer, self).__init__() 14 | self._client_id = socket.gethostname() 15 | self._producer = None 16 | 17 | def send(self, topic, message): 18 | if not self._producer: 19 | try: 20 | self._producer = KafkaProducer(bootstrap_servers=KAFKA_HOSTS, 21 | client_id=self._client_id, 22 | api_version=(0, 10), acks=0) 23 | except: 24 | print(traceback.format_exc(), flush=True) 25 | self._producer = None 26 | 27 | try: 28 | self._producer.send(topic, message.encode('utf-8')) 29 | except: 30 | print(traceback.format_exc(), flush=True) 31 | 32 | def flush(self): 33 | if self._producer: 34 | self._producer.flush() 35 | 36 | def close(self): 37 | if self._producer: 38 | self._producer.close() 39 | self._producer=None 40 | 41 | class Consumer(object): 42 | def __init__(self, group=None): 43 | super(Consumer, self).__init__() 44 | self._client_id = socket.gethostname() 45 | self._group = group 46 | 47 | def messages(self, topic, timeout=None): 48 | c = KafkaConsumer(topic, bootstrap_servers=KAFKA_HOSTS, client_id=self._client_id, 49 | group_id=self._group, auto_offset_reset="earliest", api_version=(0, 10)) 50 | 51 | for msg in c: 52 | yield msg.value.decode('utf-8') 53 | c.close() 54 | 55 | def debug(self, topic): 56 | c = KafkaConsumer(bootstrap_servers=KAFKA_HOSTS, client_id=self._client_id, 57 | group_id=None, api_version=(0, 10)) 58 | 59 | # assign/subscribe topic 60 | partitions = c.partitions_for_topic(topic) 61 | if not partitions: 62 | raise Exception("Topic "+topic+" not exist") 63 | c.assign([TopicPartition(topic, p) for p in partitions]) 64 | 65 | # seek to beginning if needed 66 | c.seek_to_beginning() 67 | 68 | # fetch messages 69 | while True: 70 | partitions = c.poll(100) 71 | if partitions: 72 | for p in partitions: 73 | for msg in partitions[p]: 74 | yield msg.value.decode('utf-8') 75 | yield "" 76 | 77 | c.close() 78 | -------------------------------------------------------------------------------- /cdn-server/nginx.conf: -------------------------------------------------------------------------------- 1 | 2 | worker_processes auto; 3 | worker_rlimit_nofile 8192; 4 | daemon off; 5 | error_log /var/www/log/error.log warn; 6 | 7 | events { 8 | worker_connections 4096; 9 | } 10 | 11 | rtmp { 12 | server { 13 | listen 1935; 14 | chunk_size 4000; 15 | 16 | application stream { 17 | live on; 18 | } 19 | 20 | application hls { 21 | live on; 22 | hls on; 23 | hls_path /var/www/video/hls; 24 | hls_nested on; 25 | hls_fragment 3; 26 | hls_playlist_length 60; 27 | hls_variant _low BANDWIDTH=2048000 RESOLUTION=854x480; 28 | hls_variant _mid BANDWIDTH=4096000 RESOLUTION=1280x720; 29 | hls_variant _hi BANDWIDTH=8192000 RESOLUTION=1920x1080; 30 | } 31 | 32 | application dash { 33 | live on; 34 | dash on; 35 | dash_path /var/www/video/dash; 36 | dash_fragment 4; 37 | dash_playlist_length 120; 38 | dash_nested on; 39 | dash_repetition on; 40 | dash_cleanup on; 41 | dash_variant _low bandwidth="2048000" width="854" height="480"; 42 | dash_variant _med bandwidth="4096000" width="1280" height="720"; 43 | dash_variant _hi bandwidth="8192000" width="1920" height="1080" max; 44 | } 45 | } 46 | } 47 | 48 | http { 49 | server_tokens off; 50 | include mime.types; 51 | default_type application/octet-stream; 52 | directio 512; 53 | sendfile on; 54 | tcp_nopush on; 55 | tcp_nodelay on; 56 | keepalive_timeout 65; 57 | aio on; 58 | 59 | log_format upstream_time '$remote_addr - $remote_user [$time_local] ' 60 | '"$request" $status $body_bytes_sent ' 61 | '"$http_referer" "$http_user_agent"' 62 | 'rt=$request_time uct="$upstream_connect_time" uht="$upstream_header_time" urt="$upstream_response_time"'; 63 | 64 | proxy_cache_path /var/www/cache levels=1:2 keys_zone=one:10m use_temp_path=off; 65 | limit_conn_zone $binary_remote_addr zone=perip:10m; 66 | limit_conn_zone $server_name zone=perserver:10m; 67 | limit_req_zone $binary_remote_addr zone=allips:10m rate=200r/s; 68 | 69 | server { 70 | listen 8443 ssl; 71 | server_name _; 72 | 73 | ssl_certificate /var/run/secrets/self.crt; 74 | ssl_certificate_key /var/run/secrets/self.key; 75 | 76 | ssl_protocols TLSv1 TLSv1.1 TLSv1.2; 77 | ssl_ciphers "ECDHE-RSA-AES256-GCM-SHA384:ECDHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-SHA384:ECDHE-RSA-AES128-SHA256:ECDHE-RSA-AES256-SHA:ECDHE-RSA-AES128-SHA:DHE-RSA-AES256-SHA256:DHE-RSA-AES128-SHA256:DHE-RSA-AES256-SHA:DHE-RSA-AES128-SHA:ECDHE-RSA-DES-CBC3-SHA:EDH-RSA-DES-CBC3-SHA:AES256-GCM-SHA384:AES128-GCM-SHA256:AES256-SHA256:AES128-SHA256:AES256-SHA:AES128-SHA:DES-CBC3-SHA:HIGH:!aNULL:!eNULL:!EXPORT:!DES:!MD5:!PSK:!RC4"; 78 | ssl_prefer_server_ciphers on; 79 | ssl_ecdh_curve secp384r1; 80 | ssl_session_cache shared:SSL:10m; 81 | ssl_session_tickets off; 82 | ssl_stapling off; 83 | ssl_stapling_verify off; 84 | 85 | location / { 86 | root /var/www/html; 87 | sendfile on; 88 | 89 | # proxy cache settings 90 | proxy_cache one; 91 | proxy_no_cache $http_pragma $http_authorization; 92 | proxy_cache_bypass $cookie_nocache $arg_nocache$arg_comment; 93 | proxy_cache_valid 200 302 10m; 94 | proxy_cache_valid 303 1m; 95 | 96 | add_header 'Access-Control-Allow-Origin' '*' always; 97 | add_header 'Access-Control-Expose-Headers' 'Content-Length'; 98 | add_header 'X-Frame-Options' 'deny' always; 99 | add_header 'X-XSS-Protection' '1' always; 100 | add_header 'X-Content-Type-Options' 'nosniff' always; 101 | ssi on; 102 | } 103 | 104 | location /api/playlist { 105 | add_header Cache-Control no-cache; 106 | rewrite ^/api(/playlist.*) $1 break; 107 | proxy_pass http://localhost:2222; 108 | } 109 | 110 | location /upload/ { 111 | add_header Cache-Control no-cache; 112 | client_max_body_size 200M; 113 | limit_conn perip 2000; 114 | limit_req zone=allips burst=20; 115 | proxy_pass http://localhost:2222; 116 | } 117 | 118 | location /hls/ { 119 | root /var/www/video; 120 | add_header Cache-Control no-cache; 121 | add_header 'Access-Control-Allow-Origin' '*' always; 122 | add_header 'Access-Control-Expose-Headers' 'Content-Length'; 123 | types { 124 | application/vnd.apple.mpegurl m3u8; 125 | video/mp2t ts; 126 | } 127 | } 128 | 129 | location /dash/ { 130 | root /var/www/video; 131 | add_header Cache-Control no-cache; 132 | add_header 'Access-Control-Allow-Origin' '*' always; 133 | add_header 'Access-Control-Expose-Headers' 'Content-Length'; 134 | types { 135 | application/dash+xml mpd; 136 | } 137 | } 138 | 139 | location ~* /dash/.*/index.mpd$ { 140 | root /var/www/video; 141 | add_header Cache-Control no-cache; 142 | add_header 'Access-Control-Allow-Origin' '*' always; 143 | add_header 'Access-Control-Expose-Headers' 'Content-Length'; 144 | types { 145 | application/dash+xml mpd; 146 | } 147 | try_files $uri @dashls; 148 | } 149 | 150 | location ~* /hls/.*/index.m3u8$ { 151 | root /var/www/video; 152 | add_header Cache-Control no-cache; 153 | add_header 'Access-Control-Allow-Origin' '*' always; 154 | add_header 'Access-Control-Expose-Headers' 'Content-Length'; 155 | types { 156 | application/vnd.apple.mpegurl m3u8; 157 | } 158 | try_files $uri @dashls; 159 | } 160 | 161 | location @dashls { 162 | add_header Cache-Control no-cache; 163 | rewrite ^/(dash|hls)/(.*) /schedule/$1/$2 break; 164 | proxy_pass http://localhost:2222; 165 | } 166 | 167 | location ~* /thumbnail/.*.png$ { 168 | root /var/www/archive; 169 | add_header Cache-Control no-cache; 170 | rewrite ^/thumbnail(/.*) $1 break; 171 | } 172 | 173 | location /stat { 174 | rtmp_stat all; 175 | rtmp_stat_stylesheet stat.xsl; 176 | } 177 | 178 | location /stat.xsl { 179 | root /etc/nginx/; 180 | } 181 | 182 | access_log /var/www/log/access.log upstream_time; 183 | } 184 | } 185 | -------------------------------------------------------------------------------- /cdn-server/playlist.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | from os import listdir 4 | import json 5 | from tornado import web, gen 6 | 7 | ARCHIVE_ROOT = "/var/www/archive" 8 | 9 | class PlayListHandler(web.RequestHandler): 10 | def __init__(self, app, request, **kwargs): 11 | super(PlayListHandler, self).__init__(app, request, **kwargs) 12 | self._cache = {} 13 | 14 | @gen.coroutine 15 | def get(self): 16 | try: 17 | streams = [s for s in listdir(ARCHIVE_ROOT) if s.endswith((".mp4", ".avi"))] 18 | except: 19 | self.set_status(404, "VIDEO NOT FOUND") 20 | return 21 | 22 | self.set_status(200, "OK") 23 | self.set_header("Content-Type", "application/json") 24 | types = [("hls", ".m3u8"), ("dash", ".mpd")] 25 | self.write(json.dumps([{"name":t[0]+"-"+s, "url":t[0]+"/"+s+"/index"+t[1], 26 | "img":"thumbnail/"+s+".png"} for t in types for s in streams])) 27 | -------------------------------------------------------------------------------- /cdn-server/schedule.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | from os.path import isfile 4 | from tornado import web, gen 5 | from messaging import Producer 6 | import time 7 | import json 8 | 9 | KAFKA_TOPIC = "content_provider_sched" 10 | DASHLS_ROOT = "/var/www/video" 11 | 12 | class ScheduleHandler(web.RequestHandler): 13 | @gen.coroutine 14 | def get(self): 15 | stream = self.request.uri.replace("/schedule/", "") 16 | 17 | # schedule producing the stream 18 | print("request received to process stream: "+stream, flush=True) 19 | producer = Producer() 20 | msg={} 21 | msg.update({ 22 | "name":stream.split("/")[1], 23 | "parameters": { 24 | "renditions":[ ], 25 | "codec_type": "AVC" 26 | }, 27 | "output": { 28 | "target": "file", 29 | "type": stream.split("/")[0] 30 | }, 31 | "live_vod": "vod", 32 | "loop": 0 33 | }) 34 | producer.send(KAFKA_TOPIC, json.dumps(msg)) 35 | producer.close() 36 | 37 | # wait until file is available, return it 38 | start_time = time.time() 39 | while time.time() - start_time < 60: 40 | if isfile(DASHLS_ROOT+"/"+stream): 41 | self.set_header('X-Accel-Redirect', '/'+stream) 42 | self.set_status(200, "OK") 43 | return 44 | yield gen.sleep(0.5) 45 | 46 | # wait too long, skip this REST API 47 | self.set_status(503, "Request scheduled") 48 | -------------------------------------------------------------------------------- /cdn-server/shell.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | IMAGE="tc_cdn_service" 4 | DIR=$(dirname $(readlink -f "$0")) 5 | OPTIONS=("--volume=${DIR}/../../volume/video/archive:/var/www/archive:ro" "--volume=${DIR}/../../volume/video/dash:/var/www/dash:ro" "--volume=${DIR}/../../volume/video/hls:/var/www/hls:ro") 6 | 7 | . "${DIR}/../script/shell.sh" 8 | -------------------------------------------------------------------------------- /cdn-server/stat.xsl: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | HTTP-FLV statistics 17 | 18 | 19 | 20 |
21 | Generated by 22 | nginx-http-flv-module , 23 | nginx , 24 | pid , 25 | built   26 | 27 | 28 |
29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 62 | 67 | 74 | 81 | 87 | 88 | 89 |
HTTP-FLV#clientsServerVideoAudioIn bytesOut bytesIn bits/sOut bits/sStateTime
Accepted: portindexcodecbits/ssizefpscodecbits/sfreqchan 58 | 59 | 60 | 61 | 63 | 64 | 65 | 66 | 68 | 69 | 70 | 71 | 72 | 73 | 75 | 76 | 77 | 78 | 79 | 80 | 82 | 83 | 84 | 85 | 86 |
90 |
91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | 106 | 107 | 108 | 109 | live streams 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | 119 | 120 | 121 | vod streams 122 | 123 | 124 | 125 | 126 | 127 | 128 | 129 | 130 | 131 | 132 | 133 | 134 | #cccccc 135 | #dddddd 136 | 137 | 138 | 139 | 140 | 141 | var d=document.getElementById('---'); 142 | d.style.display=d.style.display=='none'?'':'none'; 143 | return false 144 | 145 | 146 | 147 | [EMPTY] 148 | 149 | 150 | 151 | 152 | 153 | 154 | 155 |    156 | 157 | 158 | 159 | 160 | 161 | 162 | 163 | 164 | 165 | 166 | 167 | 168 | 169 | 170 | 171 |   172 | 173 | 174 | 175 | 176 | 177 | 178 | 179 | 180 | 181 | 182 | 183 | 184 | 185 | 186 | 187 | 188 | 189 | 190 | 191 | 192 | 193 | 194 | 195 | 196 | 197 | 198 | 199 | 200 | 201 | 202 | 203 | 204 | 205 | 206 | 207 | 208 | 209 | 210 | 211 | 212 | 213 | 214 | 215 | 216 | 217 | 218 | 219 | --- 220 | 221 | 222 | 223 | 224 | 225 | 226 | 227 | 228 | 229 | 230 | 231 | 232 | 233 | 234 | 235 | 236 |
IdStateAddressFlash versionPage URLSWF URLDroppedTimestampA-VTime
237 | 238 | 239 |
240 | 241 | 242 | 243 | 244 | 245 | 246 | 247 | 248 | 249 | 250 | d 251 | 252 | 253 | 254 | h 255 | 256 | 257 | 258 | m 259 | 260 | 261 | s 262 | 263 | 264 | 265 | 266 | 267 | 268 | 269 | 270 | 271 | 272 | 273 | 274 | T 275 | 276 | 277 | G 278 | 279 | 280 | M 281 | 282 | K 283 | 284 | 285 | 286 | b 287 | B 288 | 289 | /s 290 | 291 | 292 | 293 | 294 | 295 | active 296 | idle 297 | 298 | 299 | 300 | 301 | 302 | 303 | publishing 304 | playing 305 | 306 | 307 | 308 | 309 | 310 | 311 | 312 | 313 | #cccccc 314 | #eeeeee 315 | 316 | 317 | 318 | 319 | 320 | 321 | 322 | http://apps.db.ripe.net/search/query.html?searchtext= 323 | 324 | whois 325 | 326 | 327 | 328 | 329 | 330 | 331 | 332 | 333 | 334 | 335 | 336 | 337 | 338 | 339 | 340 | 341 | 342 | 343 | 344 | 345 | 346 | 347 | 348 | 349 | 350 | publishing 351 | 352 | 353 | 354 | active 355 | 356 | 357 | 358 | x 359 | 360 | 361 |
362 | -------------------------------------------------------------------------------- /cdn-server/tasks.py: -------------------------------------------------------------------------------- 1 | import os 2 | import shutil 3 | from time import sleep 4 | from celery import Celery 5 | 6 | def del_file(path): 7 | if os.path.isdir(path): 8 | shutil.rmtree(path) 9 | if os.path.exists(path): 10 | os.remove(path) 11 | 12 | celery = Celery('tasks', 13 | broker = 'redis://redis-service:6379', 14 | brckend = 'redis://redis-service:6379') 15 | 16 | @celery.task 17 | def in_out(proPath, srcPath, fileName, count): 18 | i=0 19 | while i < 20: 20 | if len(os.listdir(proPath)) == int(count) + 1: 21 | try: 22 | with open(os.path.join(srcPath, fileName), "wb") as upload_file: 23 | for i in range(0, int(count) + 1): 24 | with open(os.path.join(proPath, str(i)), "rb") as data: 25 | upload_file.write(data.read()) 26 | cmd = "ffmpeg -i " + os.path.join(srcPath, fileName) + " -vf thumbnail,scale=640:360 -frames:v 1 -y " + srcPath + "/" + fileName + ".png" 27 | res = os.system(cmd) 28 | if not res == 0: 29 | raise Exception("image error") 30 | except Exception as e: 31 | del_file(os.path.join(srcPath, fileName, '0')) 32 | del_file(os.path.join(srcPath, fileName)) 33 | del_file(proPath) 34 | return (e) 35 | else: 36 | del_file(proPath) 37 | return ('delete success') 38 | else: 39 | sleep(1) 40 | i += 1 41 | del_file(proPath) 42 | return ("can't find file") 43 | -------------------------------------------------------------------------------- /cdn-server/upload.py: -------------------------------------------------------------------------------- 1 | import os 2 | import shutil 3 | import traceback 4 | from tornado.web import RequestHandler 5 | from tasks import in_out 6 | 7 | TEMP_ROOT = "/var/www/temp" 8 | ARCHIVE_ROOT = "/var/www/archive" 9 | 10 | class UploadHandler(RequestHandler): 11 | def post(self, *args, **kwargs): 12 | fileName = self.get_body_argument('fileName', None) 13 | file = self.request.files.get('file', None) 14 | uploadStatus = self.get_body_argument('uploadStatus', None) 15 | timeStamp = self.get_body_argument('timeStamp', None) 16 | count = self.get_body_argument('count', None) 17 | fileName = timeStamp + "-" + fileName 18 | proPath = os.path.join(TEMP_ROOT, fileName) 19 | if not os.path.isdir(proPath): 20 | os.makedirs(proPath) 21 | try: 22 | with open(os.path.join(proPath, count), 'wb') as f: 23 | f.write(file[0]['body']) 24 | self.set_status(200) 25 | if uploadStatus == 'end': 26 | res = in_out.delay(proPath, ARCHIVE_ROOT, fileName, count) 27 | except: 28 | self.set_status(401) 29 | print(traceback.format_exc(), flush=True) 30 | -------------------------------------------------------------------------------- /client/killvlc.bat: -------------------------------------------------------------------------------- 1 | rem 2 | rem This script will help to kill all the VLC processes on Windows 3 | rem 4 | 5 | taskkill /IM vlc.exe /F /T 6 | -------------------------------------------------------------------------------- /client/vlc_playback.bat: -------------------------------------------------------------------------------- 1 | rem 2 | rem This is an example to playback 4 channels of video streams with 2x2 layout on Windows using VLC. 3 | rem It will launch 4 VLC process with each process playing back 1 channel of video stream, while keep 4 | rem in a well designed layout. The VLC player will have no border, menu bar and tool bar, and also with 5 | rem proper Windows position. In this example it works for 1920x1080 resolution. You can reference this 6 | rem example to design other layout and also for other resolution. 7 | rem 8 | rem Below is how the windows will look like: 9 | rem 10 | rem +---------+---------+ 11 | rem | | | 12 | rem | | | 13 | rem +---------+---------+ 14 | rem | | | 15 | rem | | | 16 | rem +---------+---------+ 17 | rem 18 | 19 | rem Please change the path to the actual VLC executable path in your setup 20 | C: 21 | cd "C:\Program Files (x86)\VideoLAN\VLC" 22 | 23 | rem Please change the IP address to actual CDN-Transcoder IP address in your setup 24 | set IP_Addr=192.168.1.107 25 | 26 | start vlc https://%IP_Addr%/hls/big_buck_bunny_2560x1440/index.m3u8 --no-video-deco --no-embedded-video --video-x=1 --video-y=1 --qt-start-minimized --zoom=0.375 -L 27 | timeout /t 3 /NOBREAK 28 | 29 | start vlc https://%IP_Addr%/hls/big_buck_bunny_1920x1080/index.m3u8 --no-video-deco --no-embedded-video --video-x=960 --video-y=1 --qt-start-minimized --zoom=0.5 -L 30 | timeout /t 3 /NOBREAK 31 | 32 | start vlc https://%IP_Addr%/hls/big_buck_bunny_1280x720/index.m3u8 --no-video-deco --no-embedded-video --video-x=1 --video-y=540 --qt-start-minimized --zoom=0.75 -L 33 | timeout /t 3 /NOBREAK 34 | 35 | start vlc https://%IP_Addr%/hls/big_buck_bunny_854x480/index.m3u8 --no-video-deco --no-embedded-video --video-x=960 --video-y=540 --qt-start-minimized --zoom=1.12 -L 36 | timeout /t 3 /NOBREAK 37 | 38 | -------------------------------------------------------------------------------- /common/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | set(service "tc_common") 2 | include("${CMAKE_SOURCE_DIR}/script/service.cmake") 3 | -------------------------------------------------------------------------------- /common/Dockerfile: -------------------------------------------------------------------------------- 1 | 2 | FROM ubuntu:18.04 3 | 4 | RUN apt-get update -q && apt-get install -y -q python3-requests python3-ply python3-psutil && rm -rf /var/lib/apt/lists/* 5 | 6 | COPY *.py /home/ 7 | ENV PYTHONIOENCODING=UTF-8 8 | 9 | #### 10 | ARG USER=docker 11 | ARG GROUP=docker 12 | ARG UID 13 | ARG GID 14 | ## must use ; here to ignore user exist status code 15 | RUN [ ${GID} -gt 0 ] && groupadd -f -g ${GID} ${GROUP}; \ 16 | [ ${UID} -gt 0 ] && useradd -d /home -M -g ${GID} -K UID_MAX=${UID} -K UID_MIN=${UID} ${USER}; \ 17 | chown -R ${UID}:${GID} /home 18 | #### 19 | 20 | -------------------------------------------------------------------------------- /common/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | IMAGE="tc_common" 4 | DIR=$(dirname $(readlink -f "$0")) 5 | . "$DIR/../script/build.sh" 6 | -------------------------------------------------------------------------------- /common/ffmpegcmd.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | import subprocess 4 | import json 5 | 6 | RENDITIONS_SAMPLE = ( 7 | # resolution bitrate(kbps) audio-rate(kbps) 8 | [3840, 2160, 14000000, 192000], 9 | [2560, 1440, 10000000, 192000], 10 | [1920, 1080, 5000000, 192000], 11 | [1280, 720, 2800000, 192000], 12 | [842, 480, 1400000, 128000], 13 | [640, 360, 800000, 128000] 14 | ) 15 | 16 | default_params={ 17 | "hls_dash_params": { 18 | "duration": 2, 19 | "segment_num": 0 20 | }, 21 | "tc_params": { 22 | "renditions":[[1920, 1080, 5000000, 192000]], 23 | "codec_type":"AVC", 24 | "gop_size": "100", 25 | "framerate": "30", 26 | "bframe": "2", 27 | "preset": "veryfast", 28 | "profile": "578", 29 | "level": "30", 30 | "refs": "2", 31 | "forced_idr": "1", 32 | "target_type": "mp4" 33 | } 34 | } 35 | 36 | codec_setting={ 37 | "sw": { 38 | "AVC": "libx264", 39 | "HEVC": "libsvt_hevc" 40 | }, 41 | "vaapi": { 42 | "AVC": "h264_vaapi", 43 | "HEVC": "hevc_vaapi" 44 | }, 45 | "qsv": { 46 | "AVC": "h264_qsv", 47 | "HEVC": "hevc_qsv" 48 | } 49 | } 50 | class FFMpegCmd: 51 | def __init__(self, in_params, out_params, streaming_type, params, loop=0, acc_type="sw", device=None): 52 | self._in_file=in_params 53 | self._target=out_params 54 | self._tc_params=params if params else default_params["tc_params"] 55 | self._hls_dash_params=params["hls_dash_params"] if "hls_dash_params" in params.keys() else default_params["hls_dash_params"] 56 | self._acc_type=acc_type 57 | 58 | self._segment_num=self._hls_dash_params["segment_num"] 59 | self._duration=self._hls_dash_params["duration"] 60 | 61 | self._stream_info=None 62 | self._streaming_type=streaming_type 63 | 64 | self._renditions=self._tc_params["renditions"] if self._tc_params["renditions"] else RENDITIONS_SAMPLE 65 | 66 | self._codec_type = self._tc_params["codec_type"] 67 | 68 | self._cmd_base=["ffmpeg", "-hide_banner", "-y"] 69 | if loop: 70 | self._cmd_base = self._cmd_base + ["-stream_loop", "-1"] 71 | 72 | self._device=device 73 | if not device and self._acc_type != "sw": 74 | self._device = "/dev/dri/renderD128" 75 | 76 | if self._acc_type == "vaapi": 77 | self._cmd_base = self._cmd_base + ["-hwaccel", "vaapi", "-hwaccel_device", self._device, "-hwaccel_output_format", "vaapi"] 78 | elif self._acc_type == "qsv": 79 | self._cmd_base = self._cmd_base + ["-hwaccel", "qsv", "-qsv_device", self._device, "-c:v", "h264_qsv"] 80 | 81 | self._cmd_base = self._cmd_base + ["-i", self._in_file] 82 | 83 | self._keyframe_interval = 0 84 | self._frame_height = 0 85 | self._clip_v_duration = 0 86 | self._clip_a_duration = 0 87 | 88 | self._segment_target_duration = self._duration # try to create a new segment every X seconds 89 | self._max_bitrate_ratio = 1.07 # maximum accepted bitrate fluctuations 90 | self._rate_monitor_buffer_ratio = 1.5 # maximum buffer size between bitrate conformance checks 91 | 92 | self._default_threshold = 4 93 | self.stream_info(self._in_file) 94 | self._codec = self._get_codec() 95 | # hls and dash 96 | self._cmd_static = ["-c:v", self._codec, "-profile:v", "main", "-sc_threshold", "0", "-strict", "-2"] 97 | if self._acc_type != "sw": 98 | self._cmd_static = ["-profile:v", "main", "-c:v", self._codec] 99 | self._cmd_static += ["-g", str(self._keyframe_interval)] 100 | 101 | 102 | def _to_kps(self, bitrate): 103 | return str(int(bitrate/1000))+"k" 104 | 105 | def _get_codec(self): 106 | return codec_setting[self._acc_type][self._codec_type] 107 | 108 | def stream_info(self, in_file): 109 | ffprobe_cmd = ["ffprobe", "-v", "quiet", "-print_format", "json", "-show_streams", in_file] 110 | p = subprocess.Popen(ffprobe_cmd, stdout=subprocess.PIPE) 111 | p.wait() 112 | clip_info = json.loads(p.stdout.read().decode("utf-8")) 113 | 114 | for item in clip_info["streams"]: 115 | if item["codec_type"] == "video": 116 | self._keyframe_interval = int(eval(item["avg_frame_rate"])+0.5) 117 | self._frame_height = item["height"] 118 | self._clip_v_duration = eval(item["duration"]) 119 | if item["codec_type"] == "audio": 120 | self._clip_a_duration = eval(item["duration"]) 121 | 122 | if self._segment_num != 0: 123 | segment_duration = (int)((self._clip_v_duration+2.0)/self._segment_num) 124 | if segment_duration < self._segment_target_duration: 125 | self._segment_target_duration = segment_duration 126 | 127 | def _hls(self): 128 | cmd_hls = ["-hls_time", str(self._segment_target_duration), "-hls_list_size", "0"] 129 | cmd_fade_in_out = ["-an"] 130 | cmd_abr=[] 131 | master_playlist = "#EXTM3U" + "\n" + "#EXT-X-VERSION:3" +"\n" + "#" + "\n" 132 | count = 0 133 | for item in self._renditions: 134 | width = item[0] 135 | height = item[1] 136 | v_bitrate = self._to_kps(item[2]) 137 | a_bitrate = self._to_kps(item[3]) 138 | maxrate = self._to_kps(item[2] * self._max_bitrate_ratio) 139 | name = str(height) + "p" 140 | if self._frame_height < height: 141 | continue 142 | 143 | cmd_1 = ["-vf", "scale=w="+str(width)+":"+"h="+str(height)] 144 | if self._acc_type == "vaapi": 145 | cmd_1 = ["-vf", "scale_vaapi=w="+str(width)+":"+"h="+str(height)+":format=nv12"] 146 | elif self._acc_type == "qsv": 147 | cmd_1 = ["-vf", "scale_qsv=w="+str(width)+":"+"h="+str(height)+":format=nv12"] 148 | 149 | cmd_2 = ["-b:v", v_bitrate, "-maxrate", maxrate] 150 | cmd_3 = ["-f", self._streaming_type] 151 | cmd_4 = ["-hls_segment_filename", self._target+"/"+name+"_"+"%03d.ts", self._target+"/"+name+".m3u8"] 152 | master_playlist += "#EXT-X-STREAM-INF:BANDWIDTH="+str(item[2])+","+"RESOLUTION="+str(width)+"x"+str(height)+"\n"+name+".m3u8"+"\n" 153 | cmd_abr += cmd_1 + self._cmd_static + cmd_2 + cmd_fade_in_out + cmd_3 + cmd_hls + cmd_4 154 | 155 | count += 1 156 | if count > self._default_threshold: 157 | break 158 | with open(self._target+"/"+"index.m3u8", "w", encoding='utf-8') as f: 159 | f.write(master_playlist) 160 | return cmd_abr 161 | 162 | def _dash(self): 163 | cmd_dash = ["-use_timeline", "1", "-use_template", "1", "-seg_duration", str(self._segment_target_duration), "-adaptation_sets", "id=0,streams=v"] 164 | cmd_abr=[] 165 | cmd_scale=[] 166 | 167 | count = 0 168 | for item in self._renditions: 169 | width = item[0] 170 | height = item[1] 171 | v_bitrate = self._to_kps(item[2]) 172 | a_bitrate = self._to_kps(item[3]) 173 | maxrate = self._to_kps(item[2] * self._max_bitrate_ratio) 174 | if self._frame_height < height: 175 | continue 176 | cmd_1 = ["-map", "[out"+str(count) +"]", "-b:v"+":"+str(count), v_bitrate, "-maxrate"+":"+str(count), maxrate] 177 | if self._acc_type == "vaapi": 178 | cmd_scale += [";", "[mid"+str(count) +"]", "scale_vaapi=w="+str(width)+":"+"h="+str(height)+":format=nv12","[out"+str(count) +"]"] 179 | elif self._acc_type == "qsv": 180 | cmd_scale += [";", "[mid"+str(count) +"]", "scale_qsv=w="+str(width)+":"+"h="+str(height)+":format=nv12","[out"+str(count) +"]"] 181 | else: 182 | cmd_scale += [";", "[mid"+str(count) +"]", "scale=w="+str(width)+":"+"h="+str(height),"[out"+str(count) +"]"] 183 | cmd_abr += cmd_1 184 | count += 1 185 | if count > self._default_threshold: 186 | break 187 | cmd_scale = ["[0:v]split="+str(count)]+["[mid"+str(_id) +"]" for _id in range(count)]+cmd_scale 188 | return ["-filter_complex"] +["".join(cmd_scale)]+ self._cmd_static + cmd_abr +["-f", "dash"] + cmd_dash + ["-y", self._target+"/"+"index.mpd"] 189 | 190 | def _tc(self): 191 | cmd_1 = [] 192 | params = self._tc_params 193 | stream_name = self._target.split("/")[-1].split(".")[0] 194 | for item in self._renditions: 195 | width = item[0] 196 | height = item[1] 197 | v_bitrate = self._to_kps(item[2]) 198 | a_bitrate = self._to_kps(item[3]) 199 | maxrate = self._to_kps(item[2] * self._max_bitrate_ratio) 200 | name= self._target+"/"+stream_name+self._codec_type+"_"+str(height)+"p."+self._streaming_type if self._streaming_type == "mp4" else self._target+"_"+self._codec_type+str(height)+"p" 201 | 202 | if self._acc_type == "vaapi": 203 | cmd_1 += ["-vf", "scale_vaapi=w="+str(width)+":"+"h="+str(height)+":format=nv12", "-c:v", self._codec] 204 | cmd_1 += ["-profile:v", "main", "-b:v", v_bitrate, "-maxrate", v_bitrate, "-r", params["framerate"],"-g", params["gop_size"], "-bf", params["bframe"], "-an", "-f", self._streaming_type, name] 205 | elif self._acc_type == "qsv": 206 | cmd_1 += ["-vf", "scale_qsv=w="+str(width)+":"+"h="+str(height)+":format=nv12", "-c:v", self._codec] 207 | cmd_1 += ["-profile:v", "main", "-b:v", v_bitrate, "-maxrate", v_bitrate, "-r", params["framerate"],"-g", params["gop_size"], "-bf", params["bframe"], "-an", "-f", self._streaming_type, name] 208 | else: 209 | cmd_1 += ["-vf", "scale=w="+str(width)+":"+"h="+str(height),"-c:v", self._codec, "-b:v", v_bitrate] 210 | cmd_1 += ["-r", params["framerate"],"-g", params["gop_size"], "-bf", params["bframe"], "-refs", params["refs"], "-preset", params["preset"], "-forced-idr", params["forced_idr"], "-an", "-f", self._streaming_type, name] 211 | 212 | return cmd_1 213 | 214 | def cmd(self): 215 | cmd = [] 216 | if self._streaming_type == "hls": 217 | cmd = self._cmd_base + self._hls() 218 | if self._streaming_type == "dash": 219 | cmd = self._cmd_base + self._dash() 220 | elif self._streaming_type == "mp4": 221 | cmd = self._cmd_base + self._tc() 222 | return cmd 223 | 224 | -------------------------------------------------------------------------------- /common/messaging.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | import socket 4 | from kafka import KafkaProducer, KafkaConsumer, TopicPartition 5 | import traceback 6 | import socket 7 | import time 8 | 9 | KAFKA_HOSTS = ["kafka-service:9092"] 10 | 11 | class Producer(object): 12 | def __init__(self): 13 | super(Producer, self).__init__() 14 | self._client_id = socket.gethostname() 15 | self._producer = None 16 | 17 | def send(self, topic, message): 18 | if not self._producer: 19 | try: 20 | self._producer = KafkaProducer(bootstrap_servers=KAFKA_HOSTS, 21 | client_id=self._client_id, 22 | api_version=(0, 10), acks=0) 23 | except: 24 | print(traceback.format_exc(), flush=True) 25 | self._producer = None 26 | 27 | try: 28 | self._producer.send(topic, message.encode('utf-8')) 29 | except: 30 | print(traceback.format_exc(), flush=True) 31 | 32 | def flush(self): 33 | if self._producer: 34 | self._producer.flush() 35 | 36 | def close(self): 37 | if self._producer: 38 | self._producer.close() 39 | self._producer=None 40 | 41 | class Consumer(object): 42 | def __init__(self, group=None): 43 | super(Consumer, self).__init__() 44 | self._client_id = socket.gethostname() 45 | self._group = group 46 | 47 | def messages(self, topic, timeout=None): 48 | c = KafkaConsumer(topic, bootstrap_servers=KAFKA_HOSTS, client_id=self._client_id, 49 | group_id=self._group, auto_offset_reset="earliest", api_version=(0, 10)) 50 | 51 | for msg in c: 52 | yield msg.value.decode('utf-8') 53 | c.close() 54 | 55 | def debug(self, topic): 56 | c = KafkaConsumer(bootstrap_servers=KAFKA_HOSTS, client_id=self._client_id, 57 | group_id=None, api_version=(0, 10)) 58 | 59 | # assign/subscribe topic 60 | partitions = c.partitions_for_topic(topic) 61 | if not partitions: 62 | raise Exception("Topic "+topic+" not exist") 63 | c.assign([TopicPartition(topic, p) for p in partitions]) 64 | 65 | # seek to beginning if needed 66 | c.seek_to_beginning() 67 | 68 | # fetch messages 69 | while True: 70 | partitions = c.poll(100) 71 | if partitions: 72 | for p in partitions: 73 | for msg in partitions[p]: 74 | yield msg.value.decode('utf-8') 75 | yield "" 76 | 77 | c.close() 78 | -------------------------------------------------------------------------------- /common/shell.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | IMAGE="tc_common" 4 | DIR=$(dirname $(readlink -f "$0")) 5 | 6 | . "$DIR/../script/shell.sh" 7 | -------------------------------------------------------------------------------- /common/zkstate.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | from kazoo.client import KazooClient 4 | from kazoo.exceptions import NoNodeError, NodeExistsError 5 | from kazoo.protocol.states import KazooState 6 | import traceback 7 | import time 8 | 9 | ZK_HOSTS = 'zookeeper-service:2181' 10 | 11 | class ZKState(object): 12 | def __init__(self, path, name=None): 13 | super(ZKState, self).__init__() 14 | options={"max_tries":-1, "max_delay":5, "ignore_expire":True} 15 | self._zk = KazooClient(hosts=ZK_HOSTS, connection_retry=options) 16 | try: 17 | self._zk.start(timeout=3600) 18 | except: 19 | print(traceback.format_exc(), flush=True) 20 | self._path = path 21 | self._name="" if name is None else name+"." 22 | self._zk.ensure_path(path) 23 | 24 | def processed(self): 25 | return self._zk.exists(self._path+"/"+self._name+"complete") 26 | 27 | def process_start(self): 28 | if self.processed(): 29 | return False 30 | try: 31 | self._zk.create(self._path+"/"+self._name+"processing", ephemeral=True) 32 | return True 33 | except NodeExistsError: # another process wins 34 | return False 35 | 36 | def process_end(self): 37 | try: 38 | self._zk.create(self._path+"/"+self._name+"complete") 39 | except NodeExistsError: 40 | pass 41 | 42 | def process_abort(self): 43 | # the ephemeral node will be deleted upon close 44 | pass 45 | 46 | def close(self): 47 | self._zk.stop() 48 | self._zk.close() 49 | -------------------------------------------------------------------------------- /content-provider/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | set(service "tc_content_provider_archive") 2 | include("${CMAKE_SOURCE_DIR}/script/service.cmake") 3 | -------------------------------------------------------------------------------- /content-provider/Dockerfile: -------------------------------------------------------------------------------- 1 | 2 | FROM ubuntu:18.04 3 | RUN apt-get update && apt-get install -y wget ffmpeg && rm -rf /var/lib/apt/lists/* 4 | 5 | #### 6 | ARG UID 7 | ARG GID 8 | ## must use ; here to ignore user exist status code 9 | RUN [ ${GID} -gt 0 ] && groupadd -f -g ${GID} docker; \ 10 | [ ${UID} -gt 0 ] && useradd -d /home/docker -g ${GID} -K UID_MAX=${UID} -K UID_MIN=${UID} docker; \ 11 | chown -R ${UID}:${GID} /home 12 | USER ${UID} 13 | #### 14 | -------------------------------------------------------------------------------- /content-provider/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | IMAGE="tc_content_provider_archive" 4 | DIR=$(dirname $(readlink -f "$0")) 5 | LICENSE="https://www.pexels.com/photo-license" 6 | clips=( 7 | https://www.pexels.com/video/3115738/download 8 | https://www.pexels.com/video/1110140/download 9 | https://www.pexels.com/video/2644023/download 10 | https://www.pexels.com/video/2257025/download 11 | https://www.pexels.com/video/3743056/download 12 | https://www.pexels.com/video/5419496/download 13 | https://www.pexels.com/video/2324293/download 14 | https://www.pexels.com/video/5413799/download 15 | https://www.pexels.com/video/3063911/download 16 | ) 17 | 18 | case "$(cat /proc/1/sched | head -n 1)" in 19 | *build.sh*) 20 | cd /mnt 21 | mkdir -p /mnt/raw 22 | for clip in "${clips[@]}"; do 23 | clip_name="$(echo $clip | cut -f5 -d/).mp4" 24 | if test ! -f "archive/$clip_name"; then 25 | if test "$reply" == ""; then 26 | printf "\n\n\nThe sample requires you to have a set of video clips as the transcoding and streaming source. Please accept the license terms from $LICENSE to start downloading the video clips.\n\nThe terms and conditions of the license apply. Intel does not grant any rights to the video files.\n\n\nPlease type \"accept\" or anything else to skip the download.\n" 27 | read reply 28 | fi 29 | if test "$reply" == "accept"; then 30 | echo "Downloading $clip..." 31 | wget -q -U "XXX YYY" -O "archive/$clip_name" "$clip" 32 | fi 33 | fi 34 | done 35 | for clip in `find archive -name "*.mp4" -print`; do 36 | clip_name="${clip/*\//}" 37 | if test ! -f "archive/$clip_name".png; then 38 | ffmpeg -i "archive/$clip_name" -vf "thumbnail,scale=640:360" -frames:v 1 -y "archive/$clip_name".png 39 | fi 40 | done 41 | for clip in `find archive -name "*.mp4" -print`; do 42 | clip_name="${clip/*\//}" 43 | if test ! -f "raw/$clip_name".yuv; then 44 | ffmpeg -i "archive/$clip_name" -vcodec rawvideo -an -frames:v 600 -y "raw/$clip_name".yuv 45 | fi 46 | done 47 | wait 48 | ;; 49 | *) 50 | mkdir -p "$DIR/../volume/video/archive" 51 | . "$DIR/../script/build.sh" 52 | . "$DIR/shell.sh" /home/build.sh $@ 53 | ;; 54 | esac 55 | -------------------------------------------------------------------------------- /content-provider/shell.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | IMAGE="tc_content_provider_archive" 4 | DIR=$(dirname $(readlink -f "$0")) 5 | OPTIONS=("--volume=$DIR/../volume/video:/mnt:rw" "--volume=$DIR:/home:ro") 6 | 7 | . "$DIR/../script/shell.sh" 8 | -------------------------------------------------------------------------------- /deployment/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | include("${CMAKE_SOURCE_DIR}/script/scan-all.cmake") 2 | 3 | if(NOT REGISTRY) 4 | add_custom_target(update ${CMAKE_HOME_DIRECTORY}/script/update-image.sh) 5 | endif() 6 | -------------------------------------------------------------------------------- /deployment/certificate/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | set(service "tc_self_certificate") 2 | include("${CMAKE_SOURCE_DIR}/script/service.cmake") 3 | -------------------------------------------------------------------------------- /deployment/certificate/Dockerfile: -------------------------------------------------------------------------------- 1 | 2 | FROM ubuntu:18.04 3 | RUN apt-get update && apt-get install -y openssh-server 4 | 5 | #### 6 | ARG UID 7 | ARG GID 8 | ## must use ; here to ignore user exist status code 9 | RUN [ ${GID} -gt 0 ] && groupadd -f -g ${GID} docker; \ 10 | [ ${UID} -gt 0 ] && useradd -d /home/docker -g ${GID} -K UID_MAX=${UID} -K UID_MIN=${UID} docker; \ 11 | echo 12 | USER ${UID} 13 | #### 14 | -------------------------------------------------------------------------------- /deployment/certificate/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | IMAGE="tc_self_certificate" 4 | DIR=$(dirname $(readlink -f "$0")) 5 | 6 | . "$DIR/../../script/build.sh" 7 | -------------------------------------------------------------------------------- /deployment/certificate/self-sign.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | IMAGE="tc_self_certificate" 4 | DIR=$(dirname $(readlink -f "$0")) 5 | 6 | case "$(cat /proc/1/sched | head -n 1)" in 7 | *self-sign*) 8 | openssl req -x509 -nodes -days 30 -newkey rsa:4096 -keyout /home/self.key -out /home/self.crt << EOL 9 | CN 10 | SH 11 | Shanghai 12 | Zizhu 13 | Data Center Group 14 | Intel Corporation 15 | $1 16 | nobody@intel.com 17 | EOL 18 | chmod 640 "/home/self.key" 19 | chmod 644 "/home/self.crt" 20 | ;; 21 | *) 22 | OPTIONS=("--volume=${DIR}:/home:rw") 23 | . "$DIR/../../script/shell.sh" /home/self-sign.sh $(hostname -f) 24 | ;; 25 | esac 26 | -------------------------------------------------------------------------------- /deployment/certificate/shell.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | IMAGE="tc_self_certificate" 4 | DIR=$(dirname $(readlink -f "$0")) 5 | 6 | . "$DIR/../../script/shell.sh" 7 | -------------------------------------------------------------------------------- /deployment/kubernetes/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | set(service "pv") 2 | include("${CMAKE_SOURCE_DIR}/script/service.cmake") 3 | include("${CMAKE_SOURCE_DIR}/script/scan-all.cmake") 4 | add_custom_target(volume ${CMAKE_CURRENT_SOURCE_DIR}/mkvolume.sh) 5 | 6 | # add cleanup files 7 | file(GLOB m4files "${CMAKE_CURRENT_SOURCE_DIR}/*.yaml.m4") 8 | foreach(m4file ${m4files}) 9 | string(REPLACE ".yaml.m4" ".yaml" yamlfile "${m4file}") 10 | set_property(DIRECTORY APPEND PROPERTY ADDITIONAL_MAKE_CLEAN_FILES "${yamlfile}") 11 | endforeach(m4file) 12 | -------------------------------------------------------------------------------- /deployment/kubernetes/README.md: -------------------------------------------------------------------------------- 1 | 2 | The CDN-Transcode sample can be deployed with Kubernetes. 3 | 4 | ### Kubernetes Setup 5 | 6 | - Follow the [instructions](https://kubernetes.io/docs/setup) to setup your Kubernetes cluster. 7 | 8 | - Optional: setup password-less access from the Kubernetes controller to each worker node (required by `make update`): 9 | 10 | ``` 11 | ssh-keygen 12 | ssh-copy-id 13 | ``` 14 | 15 | - Start/stop services as follows: 16 | 17 | ``` 18 | mkdir build 19 | cd build 20 | cmake .. 21 | make 22 | make update # optional for private docker registry 23 | make volume 24 | make start_kubernetes 25 | make stop_kubernetes 26 | ``` 27 | 28 | --- 29 | 30 | The command ```make update``` uploads the sample images to each worker node. If you prefer to use a private docker registry, configure the sample, `cmake -DREGISTRY= ..`, to push images to the private registry after each build. 31 | - The `make volume` command creates local persistent volumes under the `/tmp` directory of the first two Kubernetes workers. This is a temporary solution for quick sample deployment. For scalability beyond a two-node cluster, consider rewriting the persistent volume scripts. 32 | 33 | --- 34 | 35 | ### See Also 36 | 37 | - [Helm Charts](helm/cdn-transcode/README.md) 38 | - [CMake Options](../../doc/cmake.md) 39 | - [Reference Architecture](https://networkbuilders.intel.com/solutionslibrary/container-bare-metal-for-2nd-generation-intel-xeon-scalable-processor) 40 | 41 | 42 | -------------------------------------------------------------------------------- /deployment/kubernetes/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | DIR=$(dirname $(readlink -f "$0")) 4 | 5 | rm -rf "$DIR/../../volume/video/cache" 6 | mkdir -p "$DIR/../../volume/video/cache/hls" "$DIR/../../volume/video/cache/dash" 7 | 8 | # make sure kubectl is functional 9 | kubectl get node >/dev/null 2>/dev/null || exit 0 10 | 11 | hosts=($(kubectl get node -l vcac-zone!=yes -o custom-columns=NAME:metadata.name,STATUS:status.conditions[-1].type,TAINT:spec.taints | grep " Ready " | grep -v "NoSchedule" | cut -f1 -d' ')) 12 | 13 | if test ${#hosts[@]} -eq 0; then 14 | printf "\nFailed to locate worker node(s) for shared storage\n\n" 15 | exit -1 16 | elif test ${#hosts[@]} -lt 2; then 17 | hosts=(${hosts[0]} ${hosts[0]}) 18 | fi 19 | 20 | . "$DIR/volume-info.sh" "${hosts[@]}" 21 | for pv in $(find "${DIR}" -maxdepth 1 -name "*-pv.yaml.m4" -print); do 22 | m4 $(env | grep _VOLUME_ | sed 's/^/-D/') -I "${DIR}" "${pv}" > "${pv/.m4/}" 23 | done 24 | -------------------------------------------------------------------------------- /deployment/kubernetes/helm/.gitignore: -------------------------------------------------------------------------------- 1 | cdn-transcode/values.yaml 2 | *-pv.yaml 3 | -------------------------------------------------------------------------------- /deployment/kubernetes/helm/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | set(service "helm") 2 | include("${CMAKE_SOURCE_DIR}/script/service.cmake") 3 | include("${CMAKE_SOURCE_DIR}/script/deployment.cmake") 4 | add_dependencies(build_${service} build_pv) 5 | -------------------------------------------------------------------------------- /deployment/kubernetes/helm/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | DIR=$(dirname $(readlink -f "$0")) 4 | NVODS="${1:-1}" 5 | NLIVES="${2:-1}" 6 | SCENARIO="${3:-transcode}" 7 | PLATFORM="${4:-Xeon}" 8 | REGISTRY="$5" 9 | HOSTIP=$(ip route get 8.8.8.8 | awk '/ src /{split(substr($0,index($0," src ")),f);print f[2];exit}') 10 | 11 | # make sure helm is functional 12 | helm version >/dev/null 2>/dev/null || exit 0 13 | 14 | echo "Generating helm chart" 15 | . "${DIR}/../volume-info.sh" 16 | m4 -DREGISTRY_PREFIX=${REGISTRY} -DNVODS=${NVODS} -DNLIVES=${NLIVES} -DSCENARIO=${SCENARIO} -DPLATFORM=${PLATFORM} -DUSERID=$(id -u) -DGROUPID=$(id -g) -DHOSTIP=${HOSTIP} $(env | grep _VOLUME_ | sed 's/^/-D/') -I "${DIR}/cdn-transcode" "$DIR/cdn-transcode/values.yaml.m4" > "$DIR/cdn-transcode/values.yaml" 17 | 18 | -------------------------------------------------------------------------------- /deployment/kubernetes/helm/cdn-transcode/Chart.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v2 2 | appVersion: 0.1.0 3 | description: A Helm chart for the CDN Transcode sample 4 | home: https://github.com/OpenVisualCloud/CDN-Transocde-Sample 5 | icon: https://raw.githubusercontent.com/OpenVisualCloud/CDN-Transcode-Sample/master/volume/html/favicon.ico 6 | name: cdn-transcode-sample 7 | sources: 8 | - https://github.com/OpenVisualCloud/CDN-Transcode-Sample 9 | type: application 10 | version: 0.1.0 11 | -------------------------------------------------------------------------------- /deployment/kubernetes/helm/cdn-transcode/README.md: -------------------------------------------------------------------------------- 1 | 2 | The CDN Transcode Sample is an Open Visual Cloud software stack with all required open source ingredients well integrated to provide out-of-box simple transcode or CDN media transcode service, including live streaming and video on demand. It also provides docker-based media delivery software development environment upon which developer can easily build their specific applications. 3 | 4 | ### Prerequisites: 5 | 6 | The Sample assumes that you have a ready-to-use Kubernetes cluster environment with `helm` to manage the applicatoin deployment. 7 | 8 | ### Build: 9 | 10 | ```bash 11 | mkdir build 12 | cd build 13 | cmake .. 14 | make 15 | ``` 16 | 17 | --- 18 | 19 | If you deploy the sample to a cluster, please configure the sample, as `cmake -DREGISTRY= ..`, to push the sample images to the private docker registry after each build. 20 | 21 | --- 22 | 23 | ### Create Shared Volumes: 24 | 25 | ```bash 26 | make volume 27 | ``` 28 | 29 | The `make volume` command creates local persistent volumes under the /tmp directory of the first two Kubernetes workers. This is a temporary solution for quick sample deployment. For scalability beyond a two-node cluster, consider rewriting the `mkvolume.sh` script. 30 | 31 | `make volume` uses `scp` to copy volumes to the Kubernetes workers, assuming that the Kubernetes master can password-less access to the Kubernetes workers. 32 | 33 | ### Start/Stop Sample: 34 | 35 | ```bash 36 | make start_helm 37 | make stop_helm 38 | ``` 39 | 40 | -------------------------------------------------------------------------------- /deployment/kubernetes/helm/cdn-transcode/templates/cdn.yaml: -------------------------------------------------------------------------------- 1 | 2 | {{- if eq "cdn" $.Values.scenario }} 3 | 4 | apiVersion: v1 5 | kind: Service 6 | metadata: 7 | labels: 8 | app: cdn-service 9 | name: cdn-service 10 | spec: 11 | ports: 12 | - name: "443" 13 | port: 443 14 | targetPort: 8443 15 | - name: "1935" 16 | port: 1935 17 | targetPort: 1935 18 | externalIPs: 19 | - "{{ .Values.cdn.hostIP }}" 20 | selector: 21 | app: cdn-service 22 | 23 | --- 24 | 25 | apiVersion: apps/v1 26 | kind: Deployment 27 | metadata: 28 | labels: 29 | app: cdn-service 30 | name: cdn-service 31 | spec: 32 | selector: 33 | matchLabels: 34 | app: cdn-service 35 | replicas: 1 36 | template: 37 | metadata: 38 | creationTimestamp: null 39 | labels: 40 | app: cdn-service 41 | spec: 42 | containers: 43 | - args: 44 | - bash 45 | - -c 46 | - /home/main.py&/usr/local/sbin/nginx 47 | image: {{ $.Values.registryPrefix }}tc_{{ $.Values.scenario }}_service:latest 48 | imagePullPolicy: IfNotPresent 49 | name: cdn-service 50 | ports: 51 | - containerPort: 8443 52 | - containerPort: 1935 53 | volumeMounts: 54 | - mountPath: /var/run/secrets 55 | name: secrets 56 | readOnly: true 57 | - mountPath: /var/www/archive 58 | name: archive 59 | - mountPath: /var/www/video 60 | name: cache 61 | volumes: 62 | - name: secrets 63 | secret: 64 | secretName: self-signed-certificate 65 | - name: archive 66 | persistentVolumeClaim: 67 | claimName: video-archive 68 | - name: cache 69 | persistentVolumeClaim: 70 | claimName: video-cache 71 | restartPolicy: Always 72 | 73 | {{- end }} 74 | -------------------------------------------------------------------------------- /deployment/kubernetes/helm/cdn-transcode/templates/kafka.yaml: -------------------------------------------------------------------------------- 1 | 2 | apiVersion: v1 3 | kind: Service 4 | metadata: 5 | labels: 6 | app: kafka-service 7 | name: kafka-service 8 | spec: 9 | ports: 10 | - name: "9092" 11 | port: 9092 12 | targetPort: 9092 13 | selector: 14 | app: kafka-service 15 | 16 | --- 17 | 18 | apiVersion: apps/v1 19 | kind: Deployment 20 | metadata: 21 | labels: 22 | app: kafka-service 23 | name: kafka-service 24 | spec: 25 | selector: 26 | matchLabels: 27 | app: kafka-service 28 | replicas: 1 29 | template: 30 | metadata: 31 | labels: 32 | app: kafka-service 33 | spec: 34 | containers: 35 | - env: 36 | - name: KAFKA_ADVERTISED_HOST_NAME 37 | value: kafka-service 38 | - name: KAFKA_ADVERTISED_LISTENERS 39 | value: PLAINTEXT://kafka-service:9092 40 | - name: KAFKA_ADVERTISED_PORT 41 | value: "9092" 42 | - name: KAFKA_AUTO_CREATE_TOPICS_ENABLE 43 | value: "true" 44 | - name: KAFKA_BROKER_ID 45 | value: "1" 46 | - name: KAFKA_CREATE_TOPICS 47 | value: content_provider_sched:16:1 48 | - name: KAFKA_DEFAULT_REPLICATION_FACTOR 49 | value: "1" 50 | - name: KAFKA_HEAP_OPTS 51 | value: -Xmx{{ .Values.kafka.heapSize }} -Xms{{ .Values.kafka.heapSize }} 52 | - name: KAFKA_INTER_BROKER_LISTENER_NAME 53 | value: PLAINTEXT 54 | - name: KAFKA_LISTENER_SECURITY_PROTOCOL_MAP 55 | value: PLAINTEXT:PLAINTEXT 56 | - name: KAFKA_LOG4J_LOGGERS 57 | value: kafka=ERROR,kafka.controller=ERROR,state.change.logger=ERROR,org.apache.kafka=ERROR 58 | - name: KAFKA_LOG4J_ROOT_LOGLEVEL 59 | value: ERROR 60 | - name: KAFKA_LOG_RETENTION_HOURS 61 | value: "8" 62 | - name: KAFKA_NUM_PARTITIONS 63 | value: "16" 64 | - name: KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR 65 | value: "1" 66 | - name: KAFKA_ZOOKEEPER_CONNECT 67 | value: zookeeper-service:2181 68 | image: {{ $.Values.registryPrefix }}tc_kafka_service:latest 69 | imagePullPolicy: IfNotPresent 70 | name: kafka-service 71 | ports: 72 | - containerPort: 9092 73 | securityContext: 74 | runAsUser: 1000 75 | restartPolicy: Always 76 | 77 | -------------------------------------------------------------------------------- /deployment/kubernetes/helm/cdn-transcode/templates/live.yaml: -------------------------------------------------------------------------------- 1 | 2 | {{- if eq "cdn" $.Values.scenario }} 3 | 4 | {{- range $i,$v1 := .Values.liveTranscode.streams }} 5 | 6 | apiVersion: apps/v1 7 | kind: Deployment 8 | metadata: 9 | labels: 10 | app: live-service-{{ $i }} 11 | name: live-service-{{ $i }} 12 | spec: 13 | selector: 14 | matchLabels: 15 | app: live-service-{{ $i }} 16 | replicas: {{if lt (int $i) (int $.Values.liveTranscode.replicas)}}1{{else}}0{{end}} 17 | template: 18 | metadata: 19 | creationTimestamp: null 20 | labels: 21 | app: live-service-{{ $i }} 22 | spec: 23 | containers: 24 | - image: {{ $.Values.registryPrefix }}tc_xcode_{{ lower $.Values.platform }}:latest 25 | imagePullPolicy: IfNotPresent 26 | command: ["/usr/local/bin/ffmpeg","-re","-stream_loop","-1", 27 | "-i","{{ .name }}", 28 | {{- range $k,$v2 := .transcode }} 29 | "-vf","scale={{ .scale }}", 30 | "-c:v","{{ .encoderType }}", 31 | "-b:v","{{ .bitrate }}", 32 | "-r","{{ .framerate }}", 33 | "-g","{{ .gop }}", 34 | "-bf","{{ .maxbframes }}", 35 | "-refs","{{ .refsNum }}", 36 | "-preset","{{ .preset }}", 37 | "-forced-idr","1", 38 | {{- if eq ( hasPrefix "libsvt" .encoderType ) true }} 39 | "-thread_count","96", 40 | {{- end }} 41 | "-an", 42 | "-f","flv","rtmp://cdn-service/{{ .protocol }}/media_{{ $i }}_{{ $k }}", 43 | {{- end }} 44 | "-abr_pipeline"] 45 | lifecycle: 46 | preStop: 47 | exec: 48 | command: 49 | - rm 50 | - -rf 51 | {{- range $k,$v2 := .transcode }} 52 | - ' /var/www/video/{{ .protocol }}/media_{{ $i }}_{{ $k }}' 53 | {{- end }} 54 | name: live-service-{{ $i }} 55 | env: 56 | - name: NO_PROXY 57 | value: "cdn-service" 58 | - name: no_proxy 59 | value: "cdn-service" 60 | volumeMounts: 61 | - mountPath: /var/www/archive 62 | name: archive 63 | readOnly: true 64 | volumes: 65 | - name: archive 66 | persistentVolumeClaim: 67 | claimName: video-archive 68 | restartPolicy: Always 69 | 70 | --- 71 | {{- end }} 72 | {{- end }} 73 | 74 | -------------------------------------------------------------------------------- /deployment/kubernetes/helm/cdn-transcode/templates/redis.yaml: -------------------------------------------------------------------------------- 1 | {{- if eq "cdn" $.Values.scenario }} 2 | 3 | apiVersion: v1 4 | kind: Service 5 | metadata: 6 | labels: 7 | app: redis-service 8 | name: redis-service 9 | spec: 10 | ports: 11 | - name: "6379" 12 | port: 6379 13 | targetPort: 6379 14 | selector: 15 | app: redis-service 16 | 17 | --- 18 | 19 | apiVersion: apps/v1 20 | kind: Deployment 21 | metadata: 22 | labels: 23 | app: redis-service 24 | name: redis-service 25 | spec: 26 | selector: 27 | matchLabels: 28 | app: redis-service 29 | replicas: 1 30 | template: 31 | metadata: 32 | creationTimestamp: null 33 | labels: 34 | app: redis-service 35 | spec: 36 | containers: 37 | - args: 38 | - redis-server 39 | image: redis:latest 40 | imagePullPolicy: IfNotPresent 41 | name: redis-service 42 | ports: 43 | - containerPort: 6379 44 | securityContext: 45 | runAsUser: 999 46 | restartPolicy: Always 47 | 48 | {{- end }} 49 | -------------------------------------------------------------------------------- /deployment/kubernetes/helm/cdn-transcode/templates/video-archive-pvc.yaml: -------------------------------------------------------------------------------- 1 | 2 | apiVersion: v1 3 | kind: PersistentVolumeClaim 4 | metadata: 5 | name: video-archive 6 | spec: 7 | accessModes: 8 | - ReadWriteMany 9 | storageClassName: video-archive 10 | resources: 11 | requests: 12 | storage: "{{ .Values.volume.video.archive.size }}" 13 | 14 | -------------------------------------------------------------------------------- /deployment/kubernetes/helm/cdn-transcode/templates/video-cache-pvc.yaml: -------------------------------------------------------------------------------- 1 | 2 | apiVersion: v1 3 | kind: PersistentVolumeClaim 4 | metadata: 5 | name: video-cache 6 | spec: 7 | accessModes: 8 | - ReadWriteMany 9 | storageClassName: video-cache 10 | resources: 11 | requests: 12 | storage: "{{ .Values.volume.video.cache.size }}" 13 | 14 | -------------------------------------------------------------------------------- /deployment/kubernetes/helm/cdn-transcode/templates/xcode.yaml: -------------------------------------------------------------------------------- 1 | 2 | {{- range $deviceIdx := until ( int ( $.Values.hwDeviceNum ) ) }} 3 | 4 | apiVersion: apps/v1 5 | kind: Deployment 6 | metadata: 7 | labels: 8 | app: xcode-service-{{ $deviceIdx }} 9 | name: xcode-service-{{ $deviceIdx }} 10 | spec: 11 | selector: 12 | matchLabels: 13 | app: xcode-service-{{ $deviceIdx }} 14 | replicas: {{ $.Values.vodTranscode.replicas }} 15 | template: 16 | metadata: 17 | creationTimestamp: null 18 | labels: 19 | app: xcode-service-{{ $deviceIdx }} 20 | spec: 21 | containers: 22 | - args: 23 | - bash 24 | - -c 25 | - /home/main.py 26 | image: {{ $.Values.registryPrefix }}tc_xcode_{{ lower $.Values.platform }}:latest 27 | imagePullPolicy: IfNotPresent 28 | name: xcode-service-{{ $deviceIdx }} 29 | env: 30 | {{- if ne $.Values.platform "Xeon" }} 31 | - name: HW_ACC_TYPE 32 | value: {{ $.Values.hwAccType }} 33 | - name: HW_DEVICE 34 | value: /dev/dri/renderD{{ add $deviceIdx 128 }} 35 | {{- end }} 36 | - name: SCENARIO 37 | value: {{ $.Values.scenario | quote }} 38 | - name: NO_PROXY 39 | value: "*" 40 | - name: no_proxy 41 | value: "*" 42 | volumeMounts: 43 | - mountPath: /var/www/archive 44 | name: archive 45 | readOnly: true 46 | - mountPath: /var/www/video 47 | name: cache 48 | {{- if ne $.Values.platform "Xeon" }} 49 | # resources: 50 | # limits: 51 | # gpu.intel.com/i915: 1 52 | securityContext: 53 | privileged: true 54 | runAsUser: 0 55 | {{- end }} 56 | volumes: 57 | - name: archive 58 | persistentVolumeClaim: 59 | claimName: video-archive 60 | - name: cache 61 | persistentVolumeClaim: 62 | claimName: video-cache 63 | restartPolicy: Always 64 | 65 | --- 66 | {{- end }} 67 | 68 | 69 | {{- if ne "cdn" $.Values.scenario }} 70 | --- 71 | 72 | apiVersion: batch/v1 73 | kind: Job 74 | metadata: 75 | name: benchmark 76 | spec: 77 | template: 78 | spec: 79 | enableServiceLinks: false 80 | containers: 81 | - name: benchmark 82 | image: {{ $.Values.registryPrefix }}tc_benchmark_service:latest 83 | imagePullPolicy: IfNotPresent 84 | env: 85 | - name: NO_PROXY 86 | value: "*" 87 | - name: no_proxy 88 | value: "*" 89 | volumeMounts: 90 | - mountPath: /var/www/archive 91 | name: video-archive 92 | readOnly: true 93 | - mountPath: /var/www/video 94 | name: video-cache 95 | volumes: 96 | - name: video-archive 97 | persistentVolumeClaim: 98 | claimName: video-archive 99 | - name: video-cache 100 | persistentVolumeClaim: 101 | claimName: video-cache 102 | restartPolicy: Never 103 | 104 | {{- end }} 105 | -------------------------------------------------------------------------------- /deployment/kubernetes/helm/cdn-transcode/templates/zookeeper.yaml: -------------------------------------------------------------------------------- 1 | 2 | apiVersion: v1 3 | kind: Service 4 | metadata: 5 | labels: 6 | app: zookeeper-service 7 | name: zookeeper-service 8 | spec: 9 | ports: 10 | - name: "2181" 11 | port: 2181 12 | targetPort: 2181 13 | selector: 14 | app: zookeeper-service 15 | 16 | --- 17 | 18 | apiVersion: apps/v1 19 | kind: Deployment 20 | metadata: 21 | labels: 22 | app: zookeeper-service 23 | name: zookeeper-service 24 | spec: 25 | selector: 26 | matchLabels: 27 | app: zookeeper-service 28 | replicas: 1 29 | template: 30 | metadata: 31 | creationTimestamp: null 32 | labels: 33 | app: zookeeper-service 34 | spec: 35 | containers: 36 | - env: 37 | - name: ZOOKEEPER_CLIENT_PORT 38 | value: "2181" 39 | - name: ZOOKEEPER_HEAP_OPTS 40 | value: -Xmx{{ $.Values.zookeeper.heapSize }} -Xms{{ $.Values.zookeeper.heapSize }} 41 | - name: ZOOKEEPER_LOG4J_LOGGERS 42 | value: zookeepr=ERROR 43 | - name: ZOOKEEPER_LOG4J_ROOT_LOGLEVEL 44 | value: ERROR 45 | - name: ZOOKEEPER_MAX_CLIENT_CNXNS 46 | value: "20000" 47 | - name: ZOOKEEPER_SERVER_ID 48 | value: "1" 49 | - name: ZOOKEEPER_TICK_TIME 50 | value: "2000" 51 | image: zookeeper:3.5.6 52 | imagePullPolicy: IfNotPresent 53 | name: zookeeper-service 54 | ports: 55 | - containerPort: 2181 56 | securityContext: 57 | runAsUser: 1000 58 | restartPolicy: Always 59 | -------------------------------------------------------------------------------- /deployment/kubernetes/helm/cdn-transcode/values.yaml.m4: -------------------------------------------------------------------------------- 1 | 2 | registryPrefix: "defn(`REGISTRY_PREFIX')" 3 | 4 | # platform specifies the target platform: Xeon or XeonE3. 5 | platform: "defn(`PLATFORM')" 6 | 7 | # transcoding with HW QSV or VAAPI: qsv or vaapi. 8 | hwAccType: "qsv" 9 | hwDeviceNum: ifelse(defn(`PLATFORM'),`SG1',4,1) 10 | 11 | # scenario specifies the mode: cdn or batch. 12 | scenario: "defn(`SCENARIO')" 13 | 14 | zookeeper: 15 | heapSize: 1024m 16 | 17 | kafka: 18 | heapSize: 1024m 19 | 20 | liveTranscode: 21 | replicas: defn(`NLIVES') 22 | streams: 23 | - name: "/var/www/archive/3115738.mp4" 24 | transcode: 25 | - protocol: dash 26 | scale: "856:480" 27 | bitrate: "8000000" 28 | framerate: 25 29 | gop: 100 30 | maxbframes: 2 31 | refsNum: 2 32 | preset: veryfast 33 | encoderType: libx264 34 | - name: "/var/www/archive/3115738.mp4" 35 | transcode: 36 | - protocol: hls 37 | scale: "856:480" 38 | bitrate: "8000000" 39 | framerate: 25 40 | gop: 100 41 | maxbframes: 2 42 | refsNum: 2 43 | preset: 9 44 | encoderType: libsvt_hevc 45 | 46 | vodTranscode: 47 | replicas: defn(`NVODS') 48 | 49 | cdn: 50 | hostIP: defn(`HOSTIP') 51 | 52 | volume: 53 | video: 54 | archive: 55 | size: defn(`VIDEO_ARCHIVE_VOLUME_SIZE') 56 | cache: 57 | size: defn(`VIDEO_CACHE_VOLUME_SIZE') 58 | 59 | -------------------------------------------------------------------------------- /deployment/kubernetes/helm/start.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | DIR=$(dirname $(readlink -f "$0")) 4 | 5 | function create_secret { 6 | kubectl create secret generic self-signed-certificate "--from-file=${DIR}/../../certificate/self.crt" "--from-file=${DIR}/../../certificate/self.key" 7 | } 8 | 9 | # create secrets 10 | "$DIR/../../certificate/self-sign.sh" 11 | create_secret 2>/dev/null || (kubectl delete secret self-signed-certificate; create_secret) 12 | 13 | for yaml in $(find "$DIR/.." -maxdepth 1 -name "*-pv.yaml" -print); do 14 | kubectl apply -f "$yaml" 15 | done 16 | helm install cdn-transcode "$DIR/cdn-transcode" 17 | -------------------------------------------------------------------------------- /deployment/kubernetes/helm/stop.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | DIR=$(dirname $(readlink -f "$0")) 4 | 5 | helm uninstall cdn-transcode 6 | 7 | # delete pvs and scs 8 | for yaml in $(find "${DIR}/.." -maxdepth 1 -name "*-pv.yaml" -print); do 9 | kubectl delete --wait=false -f "$yaml" --ignore-not-found=true 2>/dev/null 10 | done 11 | 12 | kubectl delete secret self-signed-certificate 2> /dev/null || echo -n "" 13 | -------------------------------------------------------------------------------- /deployment/kubernetes/mkvolume.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | DIR=$(dirname $(readlink -f "$0")) 4 | 5 | echo "Making volumes..." 6 | HOSTS=$(kubectl get node -o 'custom-columns=NAME:.status.addresses[?(@.type=="Hostname")].address,IP:.status.addresses[?(@.type=="InternalIP")].address' | awk '!/NAME/{print $1":"$2}') 7 | awk -v DIR="$DIR" -v HOSTS="$HOSTS" ' 8 | BEGIN{ 9 | split(HOSTS,tmp1," "); 10 | for (i in tmp1) { 11 | split(tmp1[i],tmp2,":"); 12 | host2ip[tmp2[1]]=tmp2[2]; 13 | } 14 | } 15 | /name:/ { 16 | gsub("-","/",$2) 17 | content="\""DIR"/../../volume/"$2"\"" 18 | } 19 | /path:/ { 20 | path=$2 21 | } 22 | /- ".*"/ { 23 | host=host2ip[substr($2,2,length($2)-2)]; 24 | paths[host,path]=1; 25 | contents[host,path]=content 26 | } 27 | END { 28 | for (item in paths) { 29 | split(item,tmp,SUBSEP); 30 | host=tmp[1] 31 | path=tmp[2]; 32 | print host, path; 33 | system("ssh "host" \"mkdir -p "path";find "path" -mindepth 1 -maxdepth 1 -exec rm -rf {} \\\\;\""); 34 | system("scp -r "contents[host,path]"/* "host":"path); 35 | } 36 | } 37 | ' "$DIR"/*-pv.yaml 38 | -------------------------------------------------------------------------------- /deployment/kubernetes/video-archive-pv.yaml.m4: -------------------------------------------------------------------------------- 1 | 2 | apiVersion: storage.k8s.io/v1 3 | kind: StorageClass 4 | metadata: 5 | name: video-archive 6 | provisioner: kubernetes.io/no-provisioner 7 | volumeBindingMode: WaitForFirstConsumer 8 | 9 | --- 10 | 11 | apiVersion: v1 12 | kind: PersistentVolume 13 | metadata: 14 | name: video-archive 15 | spec: 16 | capacity: 17 | storage: defn(`VIDEO_ARCHIVE_VOLUME_SIZE')Gi 18 | accessModes: 19 | - ReadWriteMany 20 | persistentVolumeReclaimPolicy: Retain 21 | storageClassName: video-archive 22 | local: 23 | path: defn(`VIDEO_ARCHIVE_VOLUME_PATH') 24 | nodeAffinity: 25 | required: 26 | nodeSelectorTerms: 27 | - matchExpressions: 28 | - key: kubernetes.io/hostname 29 | operator: In 30 | values: 31 | - "defn(`VIDEO_ARCHIVE_VOLUME_HOST')" 32 | 33 | -------------------------------------------------------------------------------- /deployment/kubernetes/video-cache-pv.yaml.m4: -------------------------------------------------------------------------------- 1 | 2 | apiVersion: storage.k8s.io/v1 3 | kind: StorageClass 4 | metadata: 5 | name: video-cache 6 | provisioner: kubernetes.io/no-provisioner 7 | volumeBindingMode: WaitForFirstConsumer 8 | 9 | --- 10 | 11 | apiVersion: v1 12 | kind: PersistentVolume 13 | metadata: 14 | name: video-cache 15 | spec: 16 | capacity: 17 | storage: defn(`VIDEO_CACHE_VOLUME_SIZE')Gi 18 | accessModes: 19 | - ReadWriteMany 20 | persistentVolumeReclaimPolicy: Retain 21 | storageClassName: video-cache 22 | local: 23 | path: defn(`VIDEO_CACHE_VOLUME_PATH') 24 | nodeAffinity: 25 | required: 26 | nodeSelectorTerms: 27 | - matchExpressions: 28 | - key: kubernetes.io/hostname 29 | operator: In 30 | values: 31 | - "defn(`VIDEO_CACHE_VOLUME_HOST')" 32 | 33 | -------------------------------------------------------------------------------- /deployment/kubernetes/volume-info.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | export VIDEO_ARCHIVE_VOLUME_PATH=/home/${USER}/sdp/archive/video 4 | export VIDEO_ARCHIVE_VOLUME_SIZE=2 5 | export VIDEO_ARCHIVE_VOLUME_HOST=$1 6 | 7 | export VIDEO_CACHE_VOLUME_PATH=/home/${USER}/sdp/cache/video 8 | export VIDEO_CACHE_VOLUME_SIZE=2 9 | export VIDEO_CACHE_VOLUME_HOST=$2 10 | -------------------------------------------------------------------------------- /deployment/kubernetes/yaml/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | set(service "kubernetes") 2 | include("${CMAKE_SOURCE_DIR}/script/service.cmake") 3 | include("${CMAKE_SOURCE_DIR}/script/deployment.cmake") 4 | 5 | # add cleanup files 6 | file(GLOB m4files "${CMAKE_CURRENT_SOURCE_DIR}/*.yaml.m4") 7 | foreach(m4file ${m4files}) 8 | string(REPLACE ".yaml.m4" ".yaml" yamlfile "${m4file}") 9 | set_property(DIRECTORY APPEND PROPERTY ADDITIONAL_MAKE_CLEAN_FILES "${yamlfile}") 10 | endforeach(m4file) 11 | -------------------------------------------------------------------------------- /deployment/kubernetes/yaml/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | DIR=$(dirname $(readlink -f "$0")) 4 | NVODS="${1:-1}" 5 | NLIVES="${2:-1}" 6 | SCENARIO="${3:-transcode}" 7 | PLATFORM="${4:-Xeon}" 8 | REGISTRY="$5" 9 | HOSTIP=$(ip route get 8.8.8.8 | awk '/ src /{split(substr($0,index($0," src ")),f);print f[2];exit}') 10 | 11 | . "${DIR}/../volume-info.sh" 12 | echo "NVODS=${NVODS} NLIVES=${NLIVES} SCENARIO=${SCENARIO} PLATFORM=${PLATFORM}" 13 | for template in $(find "${DIR}" -maxdepth 1 -name "*.yaml.m4" -print); do 14 | m4 -DNVODS=${NVODS} -DNLIVES=${NLIVES} -DSCENARIO=${SCENARIO} -DPLATFORM=${PLATFORM} -DUSERID=$(id -u) -DGROUPID=$(id -g) -DHOSTIP=${HOSTIP} -DREGISTRY_PREFIX=${REGISTRY} $(env | grep _VOLUME_ | sed 's/^/-D/') -I "${DIR}" "${template}" > "${template/.m4/}" 15 | done 16 | -------------------------------------------------------------------------------- /deployment/kubernetes/yaml/cdn.yaml.m4: -------------------------------------------------------------------------------- 1 | include(platform.m4) 2 | include(configure.m4) 3 | 4 | ifelse(defn(`SCENARIO'),`cdn',` 5 | apiVersion: v1 6 | kind: Service 7 | metadata: 8 | name: cdn-service 9 | labels: 10 | app: cdn 11 | spec: 12 | ports: 13 | - port: 443 14 | targetPort: 8443 15 | name: https 16 | - port: 1935 17 | targetPort: 1935 18 | name: rtmp 19 | externalIPs: 20 | - defn(`HOSTIP') 21 | selector: 22 | app: cdn 23 | 24 | --- 25 | ') 26 | 27 | apiVersion: apps/v1 28 | kind: Deployment 29 | metadata: 30 | name: cdn 31 | labels: 32 | app: cdn 33 | spec: 34 | replicas: ifelse(defn(`SCENARIO'),`cdn',1,0) 35 | selector: 36 | matchLabels: 37 | app: cdn 38 | template: 39 | metadata: 40 | labels: 41 | app: cdn 42 | spec: 43 | enableServiceLinks: false 44 | containers: 45 | - name: cdn 46 | image: defn(`REGISTRY_PREFIX')`tc_'defn(`SCENARIO')_service:latest 47 | imagePullPolicy: IfNotPresent 48 | ports: 49 | - containerPort: 8443 50 | - containerPort: 1935 51 | resources: 52 | limits: 53 | cpu: eval(defn(`CDN_CPU')*2) 54 | memory: eval(defn(`CDN_MEMORY')*2)Mi 55 | requests: 56 | cpu: defn(`CDN_CPU') 57 | memory: defn(`CDN_MEMORY')Mi 58 | volumeMounts: 59 | - mountPath: /var/www/archive 60 | name: video-archive 61 | - mountPath: /var/www/video 62 | name: video-cache 63 | - mountPath: /var/run/secrets 64 | name: self-signed-certificate 65 | readOnly: true 66 | volumes: 67 | - name: video-archive 68 | persistentVolumeClaim: 69 | claimName: video-archive 70 | - name: video-cache 71 | persistentVolumeClaim: 72 | claimName: video-cache 73 | - name: self-signed-certificate 74 | secret: 75 | secretName: self-signed-certificate 76 | PLATFORM_NODE_SELECTOR(`Xeon')dnl 77 | -------------------------------------------------------------------------------- /deployment/kubernetes/yaml/configure.m4: -------------------------------------------------------------------------------- 1 | 2 | define(`HW_ACC_PLUGIN_TYPE',`qsv') 3 | define(`HW_DEVICE_NUM',ifelse(defn(`PLATFORM'),`SG1',4,1)) 4 | 5 | define(`CDN_CPU',2) 6 | define(`CDN_MEMORY',2000) 7 | 8 | define(`REDIS_CPU',1) 9 | define(`REDIS_MEMORY',500) 10 | 11 | define(`ZOOKEEPER_CPU',1) 12 | define(`ZOOKEEPER_MEMORY',500) 13 | 14 | define(`KAFKA_CPU',1) 15 | define(`KAFKA_MEMORY',500) 16 | 17 | define(`VOD_CPU',1) 18 | define(`VOD_MEMORY',2000) 19 | 20 | define(`LIVE_0_CPU',1) 21 | define(`LIVE_0_MEMORY',2000) 22 | define(`LIVE_0_URL',3115738.mp4) 23 | 24 | define(`LIVE_0_0_PROTOCOL',hls) 25 | define(`LIVE_0_0_WIDTH',856) 26 | define(`LIVE_0_0_HEIGHT',480) 27 | define(`LIVE_0_0_BITRATE',8000000) 28 | define(`LIVE_0_0_FRAMERATE',30) 29 | define(`LIVE_0_0_GOP',100) 30 | define(`LIVE_0_0_MAXBFRAMES',2) 31 | define(`LIVE_0_0_REFSNUM',2) 32 | define(`LIVE_0_0_PRESET',veryfast) 33 | define(`LIVE_0_0_ENCODETYPE',libx264) 34 | define(`LIVE_0_0_HWACCEL',false) 35 | 36 | define(`LIVE_0_1_PROTOCOL',dash) 37 | define(`LIVE_0_1_WIDTH',856) 38 | define(`LIVE_0_1_HEIGHT',480) 39 | define(`LIVE_0_1_BITRATE',8000000) 40 | define(`LIVE_0_1_FRAMERATE',30) 41 | define(`LIVE_0_1_GOP',100) 42 | define(`LIVE_0_1_MAXBFRAMES',2) 43 | define(`LIVE_0_1_REFSNUM',2) 44 | define(`LIVE_0_1_PRESET',9) 45 | define(`LIVE_0_1_ENCODETYPE',libsvt_hevc) 46 | define(`LIVE_0_1_HWACCEL',false) 47 | 48 | -------------------------------------------------------------------------------- /deployment/kubernetes/yaml/kafka.yaml.m4: -------------------------------------------------------------------------------- 1 | include(platform.m4) 2 | include(configure.m4) 3 | 4 | apiVersion: v1 5 | kind: Service 6 | metadata: 7 | name: kafka-service 8 | labels: 9 | app: kafka 10 | spec: 11 | ports: 12 | - port: 9092 13 | protocol: TCP 14 | selector: 15 | app: kafka 16 | 17 | --- 18 | 19 | apiVersion: apps/v1 20 | kind: Deployment 21 | metadata: 22 | name: kafka 23 | labels: 24 | app: kafka 25 | spec: 26 | replicas: 1 27 | selector: 28 | matchLabels: 29 | app: kafka 30 | template: 31 | metadata: 32 | labels: 33 | app: kafka 34 | spec: 35 | enableServiceLinks: false 36 | containers: 37 | - name: kafka 38 | image: defn(`REGISTRY_PREFIX')tc_kafka_service:latest 39 | imagePullPolicy: IfNotPresent 40 | ports: 41 | - containerPort: 9092 42 | env: 43 | - name: "KAFKA_BROKER_ID" 44 | value: "1" 45 | - name: "KAFKA_ZOOKEEPER_CONNECT" 46 | value: "zookeeper-service:2181" 47 | - name: "KAFKA_LISTENERS" 48 | value: "PLAINTEXT://:9092" 49 | - name: "KAFKA_ADVERTISED_LISTENERS" 50 | value: "PLAINTEXT://kafka-service:9092" 51 | - name: "KAFKA_LISTENER_SECURITY_PROTOCOL_MAP" 52 | value: "PLAINTEXT:PLAINTEXT" 53 | - name: "KAFKA_INTER_BROKER_LISTENER_NAME" 54 | value: "PLAINTEXT" 55 | - name: "KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR" 56 | value: "1" 57 | - name: "KAFKA_DEFAULT_REPLICATION_FACTOR" 58 | value: "1" 59 | - name: "KAFKA_AUTO_CREATE_TOPICS_ENABLE" 60 | value: "true" 61 | - name: KAFKA_CREATE_TOPICS 62 | value: content_provider_sched:16:1 63 | - name: "KAFKA_NUM_PARTITIONS" 64 | value: "16" 65 | - name: "KAFKA_LOG_RETENTION_MINUTES" 66 | value: "30" 67 | - name: "KAFKA_HEAP_OPTS" 68 | value: "`-Xmx'defn(`KAFKA_MEMORY')m -`Xms'defn(`KAFKA_MEMORY')m" 69 | - name: "KAFKA_LOG4J_ROOT_LOGLEVEL" 70 | value: "ERROR" 71 | securityContext: 72 | runAsUser: 1000 73 | resources: 74 | requests: 75 | cpu: defn(`KAFKA_CPU') 76 | memory: defn(`KAFKA_MEMORY')Mi 77 | limits: 78 | cpu: eval(defn(`KAFKA_CPU')*2) 79 | memory: defn(`KAFKA_MEMORY')Mi 80 | PLATFORM_NODE_SELECTOR(`Xeon')dnl 81 | -------------------------------------------------------------------------------- /deployment/kubernetes/yaml/live.yaml.m4: -------------------------------------------------------------------------------- 1 | include(../../../script/loop.m4) 2 | include(configure.m4) 3 | include(platform.m4) 4 | 5 | loopifdef(LIDX,0,``LIVE_'defn(`LIDX')_CPU',` 6 | 7 | apiVersion: apps/v1 8 | kind: Deployment 9 | metadata: 10 | name: live-defn(`LIDX') 11 | labels: 12 | app: live-defn(`LIDX') 13 | spec: 14 | replicas: ifelse(defn(`SCENARIO'),`cdn',eval(defn(`LIDX')/dev/null || (kubectl delete secret self-signed-certificate; create_secret) 20 | 21 | for i in $(find "$DIR" "$DIR/.." -maxdepth 1 -name "*.yaml"); do 22 | kubectl apply -f "$i" 23 | done 24 | -------------------------------------------------------------------------------- /deployment/kubernetes/yaml/stop.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | DIR=$(dirname $(readlink -f "$0")) 4 | EXT=*.yaml 5 | 6 | # Set Bash color 7 | ECHO_PREFIX_INFO="\033[1;32;40mINFO...\033[0;0m" 8 | ECHO_PREFIX_ERROR="\033[1;31;40mError...\033[0;0m" 9 | 10 | # Try command for test command result. 11 | function try_command { 12 | "$@" 13 | status=$? 14 | if [ $status -ne 0 ]; then 15 | echo -e $ECHO_PREFIX_ERROR "ERROR with \"$@\", Return status $status." 16 | exit $status 17 | fi 18 | return $status 19 | } 20 | 21 | try_command hash kubectl > /dev/null 22 | 23 | for i in $(find "$DIR" "$DIR/.." -maxdepth 1 -name "*.yaml"); do 24 | kubectl delete --wait=false -f "$i" 2> /dev/null 25 | done 26 | 27 | kubectl delete secret self-signed-certificate 2> /dev/null || echo -n "" 28 | -------------------------------------------------------------------------------- /deployment/kubernetes/yaml/video-archive-pvc.yaml.m4: -------------------------------------------------------------------------------- 1 | 2 | apiVersion: v1 3 | kind: PersistentVolumeClaim 4 | metadata: 5 | name: video-archive 6 | spec: 7 | accessModes: 8 | - ReadWriteMany 9 | storageClassName: video-archive 10 | resources: 11 | requests: 12 | storage: defn(`VIDEO_ARCHIVE_VOLUME_SIZE')Gi 13 | -------------------------------------------------------------------------------- /deployment/kubernetes/yaml/video-cache-pvc.yaml.m4: -------------------------------------------------------------------------------- 1 | 2 | apiVersion: v1 3 | kind: PersistentVolumeClaim 4 | metadata: 5 | name: video-cache 6 | spec: 7 | accessModes: 8 | - ReadWriteMany 9 | storageClassName: video-cache 10 | resources: 11 | requests: 12 | storage: defn(`VIDEO_CACHE_VOLUME_SIZE')Gi 13 | -------------------------------------------------------------------------------- /deployment/kubernetes/yaml/xcode.yaml.m4: -------------------------------------------------------------------------------- 1 | include(../../../script/loop.m4) 2 | include(configure.m4) 3 | include(platform.m4) 4 | 5 | loop(DEVICEIDX,0,eval(defn(`HW_DEVICE_NUM')-1),` 6 | 7 | apiVersion: apps/v1 8 | kind: Deployment 9 | metadata: 10 | name: xcode-defn(`DEVICEIDX') 11 | labels: 12 | app: xcode-defn(`DEVICEIDX') 13 | spec: 14 | replicas: defn(`NVODS') 15 | selector: 16 | matchLabels: 17 | app: xcode-defn(`DEVICEIDX') 18 | template: 19 | metadata: 20 | labels: 21 | app: xcode-defn(`DEVICEIDX') 22 | spec: 23 | enableServiceLinks: false 24 | containers: 25 | - name: xcode-defn(`DEVICEIDX') 26 | image: defn(`REGISTRY_PREFIX')`tc_xcode_'defn(`PLATFORM_SUFFIX'):latest 27 | imagePullPolicy: IfNotPresent 28 | ifelse(defn(`SCENARIO'),`cdn',,`dnl 29 | resources: 30 | limits: 31 | cpu: eval(defn(`VOD_CPU')*4) 32 | memory: eval(defn(`VOD_MEMORY')*4)Mi 33 | requests: 34 | cpu: eval(defn(`VOD_CPU')*2) 35 | memory: eval(defn(`VOD_MEMORY')*2)Mi 36 | ')dnl 37 | env: 38 | - name: HW_ACC_TYPE 39 | value: ifelse(defn(`PLATFORM'),`Xeon',"sw","defn(`HW_ACC_PLUGIN_TYPE')") 40 | ifelse(defn(`PLATFORM'),`Xeon',,`dnl 41 | - name: HW_DEVICE 42 | value: "`/dev/dri/renderD'eval(defn(`DEVICEIDX')+128)" 43 | ')dnl 44 | - name: `SCENARIO' 45 | value: "defn(`SCENARIO')" 46 | - name: NO_PROXY 47 | value: "*" 48 | - name: no_proxy 49 | value: "*" 50 | volumeMounts: 51 | - mountPath: /var/www/video 52 | name: video-cache 53 | - mountPath: /var/www/archive 54 | name: video-archive 55 | readOnly: true 56 | defn(`PLATFORM_RESOURCES')dnl 57 | initContainers: 58 | - image: busybox:latest 59 | imagePullPolicy: IfNotPresent 60 | name: init 61 | command: ["sh", "-c", "chown -R 1000:1000 /var/www/video"] 62 | volumeMounts: 63 | - mountPath: /var/www/video 64 | name: video-cache 65 | volumes: 66 | - name: video-cache 67 | persistentVolumeClaim: 68 | claimName: video-cache 69 | - name: video-archive 70 | persistentVolumeClaim: 71 | claimName: video-archive 72 | PLATFORM_NODE_SELECTOR(`Xeon')dnl 73 | 74 | --- 75 | ') 76 | 77 | ifelse(defn(`SCENARIO'),`cdn',,` 78 | --- 79 | 80 | apiVersion: batch/v1 81 | kind: Job 82 | metadata: 83 | name: benchmark 84 | spec: 85 | template: 86 | spec: 87 | enableServiceLinks: false 88 | containers: 89 | - name: benchmark 90 | image: defn(`REGISTRY_PREFIX')tc_benchmark_service:latest 91 | imagePullPolicy: IfNotPresent 92 | env: 93 | - name: NO_PROXY 94 | value: "*" 95 | - name: no_proxy 96 | value: "*" 97 | volumeMounts: 98 | - mountPath: /var/www/archive 99 | name: video-archive 100 | readOnly: true 101 | - mountPath: /var/www/video 102 | name: video-cache 103 | volumes: 104 | - name: video-archive 105 | persistentVolumeClaim: 106 | claimName: video-archive 107 | - name: video-cache 108 | persistentVolumeClaim: 109 | claimName: video-cache 110 | restartPolicy: Never 111 | PLATFORM_NODE_SELECTOR(`Xeon')dnl 112 | ') 113 | -------------------------------------------------------------------------------- /deployment/kubernetes/yaml/zookeeper.yaml.m4: -------------------------------------------------------------------------------- 1 | include(platform.m4) 2 | include(configure.m4) 3 | 4 | apiVersion: v1 5 | kind: Service 6 | metadata: 7 | name: zookeeper-service 8 | labels: 9 | app: zookeeper 10 | spec: 11 | ports: 12 | - port: 2181 13 | protocol: TCP 14 | selector: 15 | app: zookeeper 16 | 17 | --- 18 | 19 | apiVersion: apps/v1 20 | kind: Deployment 21 | metadata: 22 | name: zookeeper 23 | labels: 24 | app: zookeeper 25 | spec: 26 | replicas: 1 27 | selector: 28 | matchLabels: 29 | app: zookeeper 30 | template: 31 | metadata: 32 | labels: 33 | app: zookeeper 34 | spec: 35 | enableServiceLinks: false 36 | containers: 37 | - name: zookeeper 38 | image: zookeeper:3.5.6 39 | imagePullPolicy: IfNotPresent 40 | ports: 41 | - containerPort: 2181 42 | env: 43 | - name: "ZOO_TICK_TIME" 44 | value: "10000" 45 | - name: "ZOO_MAX_CLIENT_CNXNS" 46 | value: "160000" 47 | - name: "ZOO_AUTOPURGE_PURGEINTERVAL" 48 | value: "1" 49 | - name: "ZOO_LOG4J_PROP" 50 | value: "ERROR" 51 | securityContext: 52 | runAsUser: 1000 53 | resources: 54 | requests: 55 | cpu: defn(`ZOOKEEPER_CPU') 56 | memory: defn(`ZOOKEEPER_MEMORY')Mi 57 | limits: 58 | cpu: eval(defn(`ZOOKEEPER_CPU')*2) 59 | memory: eval(defn(`ZOOKEEPER_MEMORY')*2)Mi 60 | PLATFORM_NODE_SELECTOR(`Xeon')dnl 61 | -------------------------------------------------------------------------------- /doc/CDN-Transcode-Sample-Arch.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenVisualCloud/CDN-Transcode-Sample/093ecc7b4db5b8ef0e9fa93e70829b596ebb7cc2/doc/CDN-Transcode-Sample-Arch.png -------------------------------------------------------------------------------- /doc/cmake.md: -------------------------------------------------------------------------------- 1 | 2 | ### CMake Options: 3 | 4 | Use the following definitions to customize the building process: 5 | - **REGISTRY**: Specify the URL of the privcay docker registry. 6 | - **PLATFORM**: Specify the target platform: `Xeon` , `XeonE3` or `SG1`. 7 | - **SCENARIO**: Specify the sample scenario(s): `transcode` or `cdn`. 8 | - **NLIVES**: Specify the number of live streaming services in the deployment. 9 | - **NVODS**: Specify the number of vod transcoding services in the deployment. 10 | 11 | ### Examples: 12 | 13 | ``` 14 | cd build 15 | cmake -DPLATFORM=Xeon .. 16 | ``` 17 | 18 | ``` 19 | cd build 20 | cmake -DNVODS=1 -DPLATFORM=Xeon .. 21 | ``` 22 | 23 | ``` 24 | cd build 25 | cmake -DPLATFORM=Xeon -DSCENARIO=cdn .. 26 | ``` 27 | 28 | ### Make Commands: 29 | 30 | - **build**: Build the sample (docker) images. 31 | - **update**: Distribute the sample images to the worker nodes. 32 | - **start/stop_kubernetes**: Start/stop the sample orchestrated by Kubernetes. 33 | - **start/stop_helm**: Start/stop the sample orchestrated by Kubernetes Helm Charts. 34 | 35 | -------------------------------------------------------------------------------- /kafka/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | set(service "tc_kafka_service") 2 | include("${CMAKE_SOURCE_DIR}/script/service.cmake") 3 | -------------------------------------------------------------------------------- /kafka/Dockerfile: -------------------------------------------------------------------------------- 1 | 2 | FROM wurstmeister/kafka:2.12-2.4.0 3 | 4 | RUN sed -i 's/\/kafka\/kafka/\/opt\/kafka\/logs\/kafka/' /usr/bin/start-kafka.sh && \ 5 | mkdir /opt/kafka/logs 6 | 7 | RUN addgroup kafka && \ 8 | adduser -D -H -G kafka kafka && \ 9 | chown -R kafka:kafka /opt/kafka 10 | RUN chown -R kafka:kafka /opt/kafka/* 11 | 12 | USER kafka 13 | -------------------------------------------------------------------------------- /kafka/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | IMAGE="tc_kafka_service" 4 | DIR=$(dirname $(readlink -f "$0")) 5 | 6 | . "${DIR}/../script/build.sh" 7 | -------------------------------------------------------------------------------- /kafka/shell.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | IMAGE="tc_kafka_service" 4 | DIR=$(dirname $(readlink -f "$0")) 5 | 6 | . "${DIR}/../script/shell.sh" 7 | -------------------------------------------------------------------------------- /script/Kubernetes_remove.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | # Set Bash color 4 | ECHO_PREFIX_INFO="\033[1;32;40mINFO...\033[0;0m" 5 | ECHO_PREFIX_ERROR="\033[1;31;40mError...\033[0;0m" 6 | 7 | # Try command for test command result. 8 | function try_command { 9 | "$@" 10 | status=$? 11 | if [ $status -ne 0 ]; then 12 | echo -e $ECHO_PREFIX_ERROR "ERROR with \"$@\", Return status $status." 13 | exit $status 14 | fi 15 | return $status 16 | } 17 | 18 | 19 | # This script must be run as root 20 | if [[ $EUID -ne 0 ]]; then 21 | echo -e $ECHO_PREFIX_ERROR "This script must be run as root!" 1>&2 22 | exit 1 23 | fi 24 | 25 | 26 | #detect system arch. 27 | ULONG_MASK=`getconf ULONG_MAX` 28 | if [ $ULONG_MASK == 18446744073709551615 ]; then 29 | SYSARCH=64 30 | else 31 | echo -e $ECHO_PREFIX_ERROR "This package does not support 32-bit system.\n" 32 | exit 1 33 | fi 34 | 35 | # Kubeadm reset 36 | try_command kubeadm reset 37 | try_command iptables -F && iptables -t nat -F && iptables -t mangle -F && iptables -X 38 | 39 | # Remove Package 40 | try_command lsb_release -si > /dev/null 41 | 42 | LINUX_DISTRO=`lsb_release -si` 43 | 44 | if [ "$LINUX_DISTRO" == "Ubuntu" ]; then 45 | try_command apt-get purge kubelet kubeadm kubectl 46 | try_command apt -y autoremove 47 | elif [ "$LINUX_DISTRO" == "CentOS" ]; then 48 | try_command yum autoremove kubelet kubeadm kubectl 49 | else 50 | echo -e $ECHO_PREFIX_INFO "The removal will be cancelled." 51 | echo -e $ECHO_PREFIX_INFO "The CDN-Transcode-Sample does not support this OS, please use Ubuntu 18.04 or CentOS 7.6.\n" 52 | exit 1 53 | fi 54 | 55 | echo -e $ECHO_PREFIX_INFO "removal completed." 56 | -------------------------------------------------------------------------------- /script/Kubernetes_setup_master.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | # Set Bash color 4 | ECHO_PREFIX_INFO="\033[1;32;40mINFO...\033[0;0m" 5 | ECHO_PREFIX_ERROR="\033[1;31;40mError...\033[0;0m" 6 | 7 | # Try command for test command result. 8 | function try_command { 9 | "$@" 10 | status=$? 11 | if [ $status -ne 0 ]; then 12 | echo -e $ECHO_PREFIX_ERROR "ERROR with \"$@\", Return status $status." 13 | exit $status 14 | fi 15 | return $status 16 | } 17 | 18 | 19 | # This script must be run as root 20 | if [[ $EUID -ne 0 ]]; then 21 | echo -e $ECHO_PREFIX_ERROR "This script must be run as root!" 1>&2 22 | exit 1 23 | fi 24 | 25 | 26 | #detect system arch. 27 | ULONG_MASK=`getconf ULONG_MAX` 28 | if [ $ULONG_MASK == 18446744073709551615 ]; then 29 | SYSARCH=64 30 | else 31 | echo -e $ECHO_PREFIX_ERROR "This package does not support 32-bit system.\n" 32 | exit 1 33 | fi 34 | 35 | # Kubeadm reset 36 | if [ -f /usr/bin/kubeadm ]; then 37 | try_command kubeadm reset 38 | try_command iptables -F && iptables -t nat -F && iptables -t mangle -F && iptables -X 39 | fi 40 | 41 | # Install packages 42 | # Set Proxy if need 43 | proxy_http=$http_proxy 44 | proxy_https=$https_proxy 45 | export http_proxy=$proxy_http 46 | export https_proxy=$proxy_https 47 | 48 | if [[ -z `grep "swapoff -a" "${HOME}/.bashrc"` ]]; then 49 | echo "swapoff -a" >> "${HOME}/.bashrc" 50 | fi 51 | 52 | try_command swapoff -a 53 | 54 | try_command lsb_release -si > /dev/null 55 | 56 | LINUX_DISTRO=`lsb_release -si` 57 | 58 | if [ "$LINUX_DISTRO" == "Ubuntu" ]; then 59 | try_command apt-get update && apt-get install -y apt-transport-https curl 60 | try_command curl -s https://packages.cloud.google.com/apt/doc/apt-key.gpg | sudo apt-key add - 61 | try_command cat </etc/apt/sources.list.d/kubernetes.list 62 | deb https://apt.kubernetes.io/ kubernetes-xenial main 63 | EOF 64 | try_command apt-get update 65 | try_command apt-get install -y kubelet=1.18.2-00 kubeadm=1.18.2-00 kubectl=1.18.2-00 openssh-client fabric 66 | try_command apt-mark hold kubelet kubeadm kubectl 67 | elif [ "$LINUX_DISTRO" == "CentOS" ]; then 68 | cat < /etc/yum.repos.d/kubernetes.repo 69 | [kubernetes] 70 | name=Kubernetes 71 | baseurl=https://packages.cloud.google.com/yum/repos/kubernetes-el7-x86_64 72 | enabled=1 73 | gpgcheck=1 74 | repo_gpgcheck=1 75 | gpgkey=https://packages.cloud.google.com/yum/doc/yum-key.gpg https://packages.cloud.google.com/yum/doc/rpm-package-key.gpg 76 | exclude=kube* 77 | EOF 78 | try_command yum install -y kubelet-1.18.2-0 kubeadm-1.18.2-0 kubectl-1.18.2-0 openssh-clients fabric --disableexcludes=kubernetes 79 | else 80 | echo -e $ECHO_PREFIX_INFO "The installation will be cancelled." 81 | echo -e $ECHO_PREFIX_INFO "The CDN-Transcode-Sample does not support this OS, please use Ubuntu 18.04 or CentOS 7.6.\n" 82 | exit 1 83 | fi 84 | 85 | try_command systemctl enable --now kubelet 86 | try_command systemctl start kubelet 87 | 88 | try_command modprobe br_netfilter 89 | 90 | try_command cat < /etc/sysctl.d/k8s.conf 91 | net.bridge.bridge-nf-call-ip6tables = 1 92 | net.bridge.bridge-nf-call-iptables = 1 93 | EOF 94 | try_command sysctl --system 95 | 96 | # Docker cgroupdriver 97 | try_command mkdir -p /etc/docker 98 | try_command cat > /etc/docker/daemon.json <&2 22 | exit 1 23 | fi 24 | 25 | 26 | #detect system arch. 27 | ULONG_MASK=`getconf ULONG_MAX` 28 | if [ $ULONG_MASK == 18446744073709551615 ]; then 29 | SYSARCH=64 30 | else 31 | echo -e $ECHO_PREFIX_ERROR "This package does not support 32-bit system.\n" 32 | exit 1 33 | fi 34 | 35 | # Kubeadm reset 36 | if [ -f /usr/bin/kubeadm ]; then 37 | try_command kubeadm reset 38 | try_command iptables -F && iptables -t nat -F && iptables -t mangle -F && iptables -X 39 | fi 40 | 41 | # Install packages 42 | # Set Proxy if need 43 | if [[ -z `grep "swapoff -a" "${HOME}/.bashrc"` ]]; then 44 | echo "swapoff -a" >> "${HOME}/.bashrc" 45 | fi 46 | 47 | try_command swapoff -a 48 | 49 | try_command lsb_release -si > /dev/null 50 | 51 | LINUX_DISTRO=`lsb_release -si` 52 | 53 | if [ "$LINUX_DISTRO" == "Ubuntu" ]; then 54 | try_command apt-get update && apt-get install -y apt-transport-https curl 55 | try_command curl -s https://packages.cloud.google.com/apt/doc/apt-key.gpg | sudo apt-key add - 56 | try_command cat </etc/apt/sources.list.d/kubernetes.list 57 | deb https://apt.kubernetes.io/ kubernetes-xenial main 58 | EOF 59 | try_command apt-get update 60 | try_command apt-get install -y kubelet=1.18.2-00 kubeadm=1.18.2-00 kubectl=1.18.2-00 61 | try_command apt-mark hold kubelet kubeadm kubectl 62 | elif [ "$LINUX_DISTRO" == "CentOS" ]; then 63 | cat < /etc/yum.repos.d/kubernetes.repo 64 | [kubernetes] 65 | name=Kubernetes 66 | baseurl=https://packages.cloud.google.com/yum/repos/kubernetes-el7-x86_64 67 | enabled=1 68 | gpgcheck=1 69 | repo_gpgcheck=1 70 | gpgkey=https://packages.cloud.google.com/yum/doc/yum-key.gpg https://packages.cloud.google.com/yum/doc/rpm-package-key.gpg 71 | exclude=kube* 72 | EOF 73 | try_command yum install -y kubelet-1.18.2-0 kubeadm-1.18.2-0 kubectl-1.18.2-0 --disableexcludes=kubernetes 74 | else 75 | echo -e $ECHO_PREFIX_INFO "The installation will be cancelled." 76 | echo -e $ECHO_PREFIX_INFO "The CDN-Transcode-Sample does not support this OS, please use Ubuntu 18.04 or CentOS 7.6.\n" 77 | exit 1 78 | fi 79 | 80 | try_command systemctl enable --now kubelet 81 | try_command systemctl start kubelet 82 | 83 | try_command modprobe br_netfilter 84 | 85 | try_command cat < /etc/sysctl.d/k8s.conf 86 | net.bridge.bridge-nf-call-ip6tables = 1 87 | net.bridge.bridge-nf-call-iptables = 1 88 | EOF 89 | try_command sysctl --system 90 | 91 | # Docker cgroupdriver 92 | try_command mkdir -p /etc/docker 93 | try_command cat > /etc/docker/daemon.json < "$docker_file" 18 | fi 19 | (cd "$DIR"; docker build --network host --file="$docker_file" "$@" -t "$image_name" "$DIR" $(env | cut -f1 -d= | grep -E '_(proxy|REPO|VER)$' | sed 's/^/--build-arg /') --build-arg UID=$(id -u) --build-arg GID=$(id -g)) 20 | 21 | # if REGISTRY is specified, push image to the private registry 22 | if [ -n "$REGISTRY" ]; then 23 | docker tag "$image_name" "$REGISTRY$image_name" 24 | docker push "$REGISTRY$image_name" 25 | fi 26 | } 27 | 28 | # build image(s) in order (to satisfy dependencies) 29 | #for dep in .8 .7 .6 .5 .4 .3 .2 .1 ''; do 30 | for dep in '.5.*' '.4.*' '.3.*' '.2.*' '.1.*' '.0.*' ''; do 31 | dirs=("$DIR/$PLATFORM" "$DIR") 32 | for dockerfile in $(find "${dirs[@]}" -maxdepth 1 -name "Dockerfile$dep" -print 2>/dev/null); do 33 | echo ${dirs[@]} 34 | image=$(head -n 1 "$dockerfile" | grep '# ' | cut -d' ' -f2) 35 | if test -z "$image"; then image="$IMAGE"; fi 36 | build_docker "$dockerfile" "$image" 37 | done 38 | done 39 | -------------------------------------------------------------------------------- /script/cadvisor.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | docker run \ 4 | --volume=/:/rootfs:ro \ 5 | --volume=/var/run:/var/run:rw \ 6 | --volume=/sys:/sys:ro \ 7 | --volume=/var/lib/docker/:/var/lib/docker:ro \ 8 | --publish=8087:8080 \ 9 | --detach=true \ 10 | --name=cadvisor \ 11 | google/cadvisor:latest 12 | -------------------------------------------------------------------------------- /script/deployment.cmake: -------------------------------------------------------------------------------- 1 | add_custom_target(start_${service} "${CMAKE_CURRENT_SOURCE_DIR}/start.sh" "${service}" "${NVODS}" "${NLIVES}" "${SCENARIO}" "${PLATFORM}" "${REGISTRY}") 2 | add_custom_target(stop_${service} "${CMAKE_CURRENT_SOURCE_DIR}/stop.sh" "${service}") 3 | -------------------------------------------------------------------------------- /script/enable_gpu_plugin.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | # Set Bash color 4 | ECHO_PREFIX_INFO="\033[1;32;40mINFO...\033[0;0m" 5 | ECHO_PREFIX_ERROR="\033[1;31;40mError...\033[0;0m" 6 | 7 | # Try command for test command result. 8 | function try_command { 9 | "$@" 10 | status=$? 11 | if [ $status -ne 0 ]; then 12 | echo -e $ECHO_PREFIX_ERROR "ERROR with \"$@\", Return status $status." 13 | exit $status 14 | fi 15 | return $status 16 | } 17 | 18 | 19 | # This script must be run as root 20 | if [[ $EUID -ne 0 ]]; then 21 | echo -e $ECHO_PREFIX_ERROR "This script must be run as root!" 1>&2 22 | exit 1 23 | fi 24 | 25 | 26 | #detect system arch. 27 | ULONG_MASK=`getconf ULONG_MAX` 28 | if [ $ULONG_MASK == 18446744073709551615 ]; then 29 | SYSARCH=64 30 | else 31 | echo -e $ECHO_PREFIX_ERROR "This package does not support 32-bit system.\n" 32 | exit 1 33 | fi 34 | 35 | # Build gpu plugin 36 | if [ -f go1.11.2.linux-amd64.tar.gz ]; then 37 | try_command rm go1.11.2.linux-amd64.tar.gz 38 | fi 39 | try_command wget https://dl.google.com/go/go1.11.2.linux-amd64.tar.gz 40 | try_command tar -C /usr/local -xzf go1.11.2.linux-amd64.tar.gz 41 | try_command rm go1.11.2.linux-amd64.tar.gz 42 | try_command export PATH=$PATH:/usr/local/go/bin 43 | try_command go version 44 | try_command mkdir -p /usr/local/go/src/github.com/intel 45 | try_command cd /usr/local/go/src/github.com/intel 46 | if [ -d intel-device-plugins-for-kubernetes ]; then 47 | try_command rm -rf intel-device-plugins-for-kubernetes 48 | fi 49 | try_command git clone https://github.com/intel/intel-device-plugins-for-kubernetes.git 50 | try_command cd intel-device-plugins-for-kubernetes 51 | try_command make 52 | -------------------------------------------------------------------------------- /script/enable_nat.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | sysctl -w net.ipv4.ip_forward=1 3 | [ ${UID} -ne 0 ] && echo "Please run as root!!" && exit 1 4 | for (( i=1; i<=6; i++)) 5 | do 6 | iptables -t nat -A POSTROUTING -s 172.31.$i.0/24 -d 0/0 -j MASQUERADE 7 | iptables -I FORWARD -j ACCEPT -i eth$(($i - 1)) 8 | iptables -I FORWARD -j ACCEPT -o eth$(($i - 1)) 9 | done 10 | echo "Done" 11 | -------------------------------------------------------------------------------- /script/install_dependency.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | # Set Bash color 4 | ECHO_PREFIX_INFO="\033[1;32;40mINFO...\033[0;0m" 5 | ECHO_PREFIX_ERROR="\033[1;31;40mError...\033[0;0m" 6 | 7 | # Try command for test command result. 8 | function try_command { 9 | "$@" 10 | status=$? 11 | if [ $status -ne 0 ]; then 12 | echo -e $ECHO_PREFIX_ERROR "ERROR with \"$@\", Return status $status." 13 | exit $status 14 | fi 15 | return $status 16 | } 17 | 18 | 19 | # This script must be run as root 20 | if [[ $EUID -ne 0 ]]; then 21 | echo -e $ECHO_PREFIX_ERROR "This script must be run as root!" 1>&2 22 | exit 1 23 | fi 24 | 25 | 26 | # Detect system arch. 27 | ULONG_MASK=`getconf ULONG_MAX` 28 | if [ $ULONG_MASK == 18446744073709551615 ]; then 29 | SYSARCH=64 30 | else 31 | echo -e $ECHO_PREFIX_ERROR "This package does not support 32-bit system.\n" 32 | exit 1 33 | fi 34 | 35 | 36 | if [ `cat /etc/os-release | grep -E "CentOS" | wc -l` -ne 0 ]; then 37 | try_command yum -y install redhat-lsb-core 38 | elif [ `cat /etc/os-release | grep -E "Ubuntu" | wc -l` -ne 0 ]; then 39 | try_command apt-get update 40 | try_command apt-get install -y lsb-release 41 | fi 42 | 43 | try_command lsb_release -si > /dev/null 44 | 45 | LINUX_DISTRO=`lsb_release -si` 46 | 47 | if [ "$LINUX_DISTRO" == "Ubuntu" ]; then 48 | try_command apt-get install -y curl gnupg software-properties-common cmake 49 | apt-get remove -y docker docker-engine docker.io containerd runc 50 | try_command curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add - 51 | try_command apt-key fingerprint 0EBFCD88 52 | try_command add-apt-repository \ 53 | "deb [arch=amd64] https://download.docker.com/linux/ubuntu \ 54 | $(lsb_release -cs) \ 55 | stable" 56 | try_command apt-get update 57 | try_command apt-get install -y docker-ce docker-ce-cli containerd.io 58 | try_command curl -L "https://github.com/docker/compose/releases/download/1.24.0/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose 59 | try_command apt-get install -y python3-pip 60 | try_command pip3 install ruamel.yaml fabric3 61 | elif [ "$LINUX_DISTRO" == "CentOS" ]; then 62 | try_command yum install -y curl cmake 63 | yum remove docker docker-client docker-client-latest docker-common docker-latest docker-latest-logrotate docker-logrotate docker-engine 64 | try_command yum install -y yum-utils device-mapper-persistent-data lvm2 65 | try_command yum-config-manager --add-repo https://download.docker.com/linux/centos/docker-ce.repo 66 | try_command yum install -y docker-ce docker-ce-cli containerd.io 67 | try_command curl -L "https://github.com/docker/compose/releases/download/1.24.0/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose 68 | try_command yum install -y epel-release 69 | try_command yum install -y python36 python36-pip python3-devel 70 | try_command pip3 install ruamel.yaml fabric3 71 | else 72 | echo -e $ECHO_PREFIX_INFO "The installation will be cancelled." 73 | echo -e $ECHO_PREFIX_INFO "The CDN-Transcode-Sample does not support this OS, please use Ubuntu 18.04 or CentOS 7.6.\n" 74 | exit 1 75 | fi 76 | 77 | echo -e $ECHO_PREFIX_INFO "Installation completed." 78 | -------------------------------------------------------------------------------- /script/loop.m4: -------------------------------------------------------------------------------- 1 | define(`loop',`ifelse(eval($2<=$3),1,`pushdef(`$1',$2)$4`'loop(`$1',incr($2),$3,`$4')popdef(`$1')')')dnl 2 | define(`loopifdef',`pushdef(`$1',$2)ifdef($3,`$4loopifdef(`$1',incr($2),`$3',`$4')')popdef(`$1')')dnl 3 | define(`looplist',`pushdef(`$1',regexp($2,`\(\w+\)',`\1'))ifelse(regexp($2,`\w+'),-1,,`$3looplist(`$1',regexp($2,`\w+[/ ]*\(.*\)',`\1'),`$3')')popdef(`$1')')dnl 4 | -------------------------------------------------------------------------------- /script/nfs_setup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | # Set Bash color 4 | ECHO_PREFIX_INFO="\033[1;32;40mINFO...\033[0;0m" 5 | ECHO_PREFIX_ERROR="\033[1;31;40mError...\033[0;0m" 6 | 7 | # Try command for test command result. 8 | function try_command { 9 | "$@" 10 | status=$? 11 | if [ $status -ne 0 ]; then 12 | echo -e $ECHO_PREFIX_ERROR "ERROR with \"$@\", Return status $status." 13 | exit $status 14 | fi 15 | return $status 16 | } 17 | 18 | 19 | # This script must be run as root 20 | if [[ $EUID -ne 0 ]]; then 21 | echo -e $ECHO_PREFIX_ERROR "This script must be run as root!" 1>&2 22 | exit 1 23 | fi 24 | 25 | 26 | #detect system arch. 27 | ULONG_MASK=`getconf ULONG_MAX` 28 | if [ $ULONG_MASK == 18446744073709551615 ]; then 29 | SYSARCH=64 30 | else 31 | echo -e $ECHO_PREFIX_ERROR "This package does not support 32-bit system.\n" 32 | exit 1 33 | fi 34 | 35 | # Set up NFS 36 | try_command echo -e "$PWD/../volume/video/archive *(ro,sync,no_root_squash,no_all_squash,no_subtree_check)" > /etc/exports 37 | try_command echo -e "$PWD/../volume/video/cache *(rw,sync,no_root_squash,no_all_squash,no_subtree_check)" >> /etc/exports 38 | 39 | try_command lsb_release -si > /dev/null 40 | 41 | LINUX_DISTRO=`lsb_release -si` 42 | 43 | if [ "$LINUX_DISTRO" == "Ubuntu" ]; then 44 | DEBIAN_FRONTEND=noninteractive apt-get install -y nfs-kernel-server openssh-server 45 | try_command /etc/init.d/nfs-kernel-server restart 46 | elif [ "$LINUX_DISTRO" == "CentOS" ]; then 47 | try_command yum install -y rpcbind nfs-utils openssh-server 48 | try_command systemctl start rpcbind nfs-server sshd 49 | else 50 | echo -e $ECHO_PREFIX_INFO "The installation will be cancelled." 51 | echo -e $ECHO_PREFIX_INFO "The CDN-Transcode-Sample does not support this OS, please use Ubuntu 18.04 or CentOS 7.6.\n" 52 | exit 1 53 | fi 54 | -------------------------------------------------------------------------------- /script/scan-all.cmake: -------------------------------------------------------------------------------- 1 | file(GLOB dirs "*") 2 | foreach (dir ${dirs}) 3 | if(EXISTS ${dir}/CMakeLists.txt) 4 | add_subdirectory(${dir}) 5 | endif() 6 | endforeach() 7 | -------------------------------------------------------------------------------- /script/scan-yaml.awk: -------------------------------------------------------------------------------- 1 | #!/usr/bin/awk 2 | 3 | BEGIN { 4 | im=""; 5 | n_space=c_space=0; 6 | matched=1; 7 | } 8 | 9 | function saveim() { 10 | split(im,ims,","); 11 | for (i in ims) { 12 | if (ims[i]!="" && (matched || labels=="*")) { 13 | images[ims[i]]=1; 14 | } 15 | } 16 | im=""; 17 | matched=1; 18 | } 19 | 20 | /containers:/ { 21 | c_space=index($0,"containers:"); 22 | } 23 | 24 | /initContainers:/ { 25 | c_space=index($0,"initContainers:"); 26 | } 27 | 28 | /image:/ && c_space==0 { 29 | saveim(); 30 | im=$2; 31 | } 32 | 33 | /image:/ && c_space>0 { 34 | im=im","$2 35 | } 36 | 37 | /VCAC_IMAGE:/ { 38 | im=im","$2 39 | } 40 | 41 | /- node\..*==.*/ && labels!="*" { 42 | gsub(/[\" ]/,"",$2); 43 | if (index(labels,$2)==0) { 44 | im=""; 45 | matched=0; 46 | } 47 | } 48 | 49 | /- node\..*!=.*/ && labels!="*" { 50 | gsub(/[\" ]/,"",$2); 51 | gsub(/!=/,"==",$2); 52 | if (index(labels,$2)!=0) { 53 | im=""; 54 | matched=0; 55 | } 56 | } 57 | 58 | /^\s*---\s*$/ || /^\s*$/ { 59 | n_space=c_space=0; 60 | saveim(); 61 | } 62 | 63 | /- key:/ && n_space>0 { 64 | match($0, /^ */); 65 | if (RLENGTH > n_space) key=$3 66 | } 67 | 68 | /operator:/ && n_space>0 { 69 | match($0, /^ */); 70 | if (RLENGTH > n_space) operator=$2 71 | } 72 | 73 | /- ".*"/ && n_space>0 { 74 | match($0, /^ */); 75 | if (RLENGTH > n_space) { 76 | label_eqn=key":"$2 77 | gsub(/[\" ]/,"",label_eqn); 78 | i=index(labels,label_eqn); 79 | if ((operator=="In" && i==0) || (operator=="NotIn" && i!=0)) { 80 | im=im2=""; 81 | matched=0; 82 | } 83 | } 84 | } 85 | 86 | /nodeAffinity:/ { 87 | n_space=index($0,"nodeAffinity:"); 88 | } 89 | 90 | END { 91 | saveim(); 92 | for (im in images) 93 | print(im); 94 | } 95 | -------------------------------------------------------------------------------- /script/service.cmake: -------------------------------------------------------------------------------- 1 | if(EXISTS "${CMAKE_CURRENT_SOURCE_DIR}/build.sh") 2 | add_custom_target(build_${service} ALL "${CMAKE_CURRENT_SOURCE_DIR}/build.sh" "${NVODS}" "${NLIVES}" "${SCENARIO}" "${PLATFORM}" "${REGISTRY}") 3 | endif() 4 | -------------------------------------------------------------------------------- /script/shell.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | if test -z "${DIR}"; then 4 | echo "This script should not be called directly." 5 | exit -1 6 | fi 7 | 8 | pid="$(docker ps -f ancestor=$IMAGE --format='{{.ID}}' | head -n 1)" 9 | if [ -n "$pid" ] && [ "$#" -le "1" ]; then 10 | echo "bash into running container...$IMAGE" 11 | docker exec -it $pid ${*-/bin/bash} 12 | else 13 | echo "bash into new container...$IMAGE" 14 | if test -z "$DOCKERFILE"; then 15 | DOCKERFILE="${DIR}/Dockerfile" 16 | fi 17 | args=("$@") 18 | docker run --network=host ${OPTIONS[@]} $(env | grep -E '_(proxy)=' | sed 's/^/-e /') $(grep '^ARG .*=' "$DOCKERFILE" | sed 's/^ARG /-e /') --entrypoint ${1:-/bin/bash} -it "${IMAGE}" ${args[@]:1} 19 | fi 20 | 21 | -------------------------------------------------------------------------------- /script/update-image.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | function transfer_image { 4 | image="$1" 5 | nodeid="$2" 6 | nodeip="$3" 7 | 8 | # overwrite vcac username 9 | case "$4" in 10 | *vcac-zone:yes*|*vcac_zone==yes*) 11 | worker="root@$nodeip";; 12 | *) 13 | worker="$nodeip";; 14 | esac 15 | 16 | echo "Update image: $image to $worker" 17 | sig1=$((docker image inspect -f {{.ID}} $image || ((docker pull $image 1>&2) && docker image inspect -f {{.ID}} $image)) | grep .) 18 | echo " local: $sig1" 19 | 20 | hostfile="$HOME/.vcac-hosts" 21 | if [ ! -f "$hostfile" ]; then hostfile="/etc/vcac-hosts"; fi 22 | host=$(awk -v node="$nodeid/$nodeip" '$1==node{print$2}' "$hostfile" 2>/dev/null || true) 23 | if [ -z "$host" ]; then host=$(hostname); fi 24 | 25 | CONNECTION_TIMEOUT=1 26 | case "$(hostname -f)" in 27 | $host | $host.*) # direct access 28 | sig2=$(ssh -o ConnectTimeout=$CONNECTION_TIMEOUT $worker "docker image inspect -f {{.ID}} $image 2> /dev/null || echo" || true) 29 | echo "remote: $sig2" 30 | 31 | if test "$sig1" != "$sig2"; then 32 | echo "Transfering image..." 33 | (docker save $image | ssh -o ConnectTimeout=$CONNECTION_TIMEOUT $worker "docker image rm -f $image 2>/dev/null; docker load") || true 34 | fi;; 35 | *) # access via jump host 36 | sig2=$(ssh -o ConnectTimeout=$CONNECTION_TIMEOUT $host "ssh -o ConnectTimeout=$CONNECTION_TIMEOUT $worker \"docker image inspect -f {{.ID}} $image 2> /dev/null || echo\"" || true) 37 | echo "remote: $sig2" 38 | 39 | if test "$sig1" != "$sig2"; then 40 | echo "Transfering image..." 41 | (docker save $image | ssh -o ConnectTimeout=$CONNECTION_TIMEOUT $host "ssh -o ConnectTimeout=$CONNECTION_TIMEOUT $worker \"docker image rm -f $image 2>/dev/null; docker load\"") || true 42 | fi;; 43 | esac 44 | echo "" 45 | } 46 | 47 | DIR=$(dirname $(readlink -f "$0")) 48 | docker node ls > /dev/null 2> /dev/null && ( 49 | echo "Updating docker-swarm nodes..." 50 | for id in $(docker node ls -q 2> /dev/null); do 51 | ready="$(docker node inspect -f {{.Status.State}} $id)" 52 | active="$(docker node inspect -f {{.Spec.Availability}} $id)" 53 | nodeip="$(docker node inspect -f {{.Status.Addr}} $id)" 54 | labels="$(docker node inspect -f {{.Spec.Labels}} $id | sed 's/map\[/node.labels./' | sed 's/\]$//' | sed 's/ / node.labels./g' | sed 's/:/==/g')" 55 | role="$(docker node inspect -f {{.Spec.Role}} $id)" 56 | 57 | if test "$ready" = "ready"; then 58 | if test "$active" = "active"; then 59 | # skip unavailable or manager node 60 | if test -z "$(hostname -I | grep --fixed-strings $nodeip)"; then 61 | for image in $(awk -v labels="$labels node.role=${role}" -f "$DIR/scan-yaml.awk" "${DIR}/../deployment/docker-swarm/docker-compose.yml"); do 62 | transfer_image $image "$id" "$nodeip" "$labels" 63 | done 64 | fi 65 | fi 66 | fi 67 | done 68 | ) || echo -n "" 69 | 70 | kubectl get node >/dev/null 2>/dev/null && ( 71 | echo "Updating Kubernetes nodes..." 72 | for id in $(kubectl get nodes --selector='!node-role.kubernetes.io/master' 2> /dev/null | grep ' Ready ' | cut -f1 -d' '); do 73 | nodeip="$(kubectl describe node $id | grep InternalIP | sed -E 's/[^0-9]+([0-9.]+)$/\1/')" 74 | labels="$(kubectl describe node $id | awk '/Annotations:/{lf=0}/Labels:/{sub("Labels:","",$0);lf=1}lf==1{sub("=",":",$1);print$1}')" 75 | 76 | for image in $(awk -v labels="$labels" -f "$DIR/scan-yaml.awk" "${DIR}/../deployment/kubernetes/yaml"/*.yaml); do 77 | transfer_image $image "$id" "$nodeip" "$labels" 78 | done 79 | done 80 | ) || echo -n "" 81 | -------------------------------------------------------------------------------- /streaming-server/.dockerignore: -------------------------------------------------------------------------------- 1 | CMakeLists.txt 2 | *.sh 3 | *.m4 4 | test/* 5 | -------------------------------------------------------------------------------- /streaming-server/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | if (SCENARIO STREQUAL "cdn") 2 | set(service "tc_streaming_service") 3 | include("${CMAKE_SOURCE_DIR}/script/service.cmake") 4 | endif() 5 | -------------------------------------------------------------------------------- /streaming-server/Dockerfile: -------------------------------------------------------------------------------- 1 | 2 | FROM openvisualcloud/xeon-ubuntu1804-media-nginx:21.3 3 | COPY *.conf /etc/nginx/ 4 | CMD ["/usr/sbin/nginx"] 5 | WORKDIR /home 6 | 7 | #### 8 | ARG UID 9 | ARG GID 10 | ## must use ; here to ignore user exist status code 11 | RUN [ ${GID} -gt 0 ] && groupadd -f -g ${GID} docker; \ 12 | [ ${UID} -gt 0 ] && useradd -d /home/docker -g ${GID} -K UID_MAX=${UID} -K UID_MIN=${UID} docker; \ 13 | touch /var/run/nginx.pid && \ 14 | mkdir -p /var/log/nginx /var/lib/nginx /var/www/video /var/www/archive && \ 15 | chown -R ${UID}:${GID} /var/run/nginx.pid /var/www /var/log/nginx /var/lib/nginx 16 | USER ${UID} 17 | #### 18 | -------------------------------------------------------------------------------- /streaming-server/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | IMAGE="tc_streaming_service" 4 | DIR=$(dirname $(readlink -f "$0")) 5 | 6 | . "${DIR}/../script/build.sh" 7 | -------------------------------------------------------------------------------- /streaming-server/nginx.conf: -------------------------------------------------------------------------------- 1 | 2 | worker_processes auto; 3 | daemon off; 4 | 5 | events { 6 | worker_connections 4096; 7 | } 8 | 9 | rtmp { 10 | server { 11 | listen 1935; 12 | chunk_size 4000; 13 | 14 | application stream { 15 | live on; 16 | exec_options on; 17 | exec_pull ffmpeg -re -i http://localhost/$name.mp4 -c:v copy -an -f flv rtmp://localhost/$app/$name; 18 | } 19 | 20 | application hls { 21 | live on; 22 | hls on; 23 | hls_path /var/www/hls; 24 | hls_nested on; 25 | hls_fragment 3; 26 | hls_playlist_length 60; 27 | } 28 | 29 | application dash { 30 | live on; 31 | dash on; 32 | dash_path /var/www/dash; 33 | dash_fragment 3; 34 | dash_playlist_length 60; 35 | dash_nested on; 36 | } 37 | } 38 | } 39 | 40 | http { 41 | include mime.types; 42 | default_type application/octet-stream; 43 | directio 512; 44 | sendfile on; 45 | tcp_nopush on; 46 | tcp_nodelay on; 47 | keepalive_timeout 65; 48 | aio on; 49 | 50 | ssl_ciphers HIGH:!aNULL:!MD5; 51 | ssl_protocols TLSv1 TLSv1.1 TLSv1.2; 52 | ssl_session_cache shared:SSL:10m; 53 | ssl_session_timeout 10m; 54 | 55 | proxy_cache_path /var/www/cache levels=1:2 keys_zone=one:10m use_temp_path=off; 56 | 57 | server { 58 | listen 80; 59 | 60 | # proxy cache settings 61 | proxy_cache one; 62 | proxy_no_cache $http_pragma $http_authorization; 63 | proxy_cache_bypass $cookie_nocache $arg_nocache$arg_comment; 64 | proxy_cache_valid 200 302 10m; 65 | proxy_cache_valid 303 1m; 66 | 67 | location / { 68 | root /data/www/file; 69 | autoindex on; 70 | autoindex_exact_size off; 71 | autoindex_localtime on; 72 | } 73 | 74 | location /hls/ { 75 | root /var/www; 76 | add_header Cache-Control no-cache; 77 | add_header 'Access-Control-Allow-Origin' '*' always; 78 | add_header 'Access-Control-Expose-Headers' 'Content-Length'; 79 | types { 80 | application/vnd.apple.mpegurl m3u8; 81 | video/mp2t ts; 82 | } 83 | } 84 | 85 | location /dash/ { 86 | root /var/www; 87 | add_header Cache-Control no-cache; 88 | add_header 'Access-Control-Allow-Origin' '*' always; 89 | add_header 'Access-Control-Expose-Headers' 'Content-Length'; 90 | types { 91 | application/dash+xml mpd; 92 | } 93 | } 94 | 95 | } 96 | } 97 | 98 | -------------------------------------------------------------------------------- /streaming-server/shell.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | IMAGE="tc_streaming_service" 4 | DIR=$(dirname $(readlink -f "$0")) 5 | 6 | . "${DIR}/../script/shell.sh" 7 | -------------------------------------------------------------------------------- /xcode-server/.dockerignore: -------------------------------------------------------------------------------- 1 | CMakeLists.txt 2 | *.m4 3 | test/* 4 | -------------------------------------------------------------------------------- /xcode-server/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | set(service "tc_xcode_service") 2 | include("${CMAKE_SOURCE_DIR}/script/service.cmake") 3 | add_dependencies(build_${service} build_tc_common) 4 | -------------------------------------------------------------------------------- /xcode-server/SG1/Dockerfile: -------------------------------------------------------------------------------- 1 | # tc_xcode_sg1 2 | 3 | FROM openvisualcloud/sg1-ubuntu1804-media-ffmpeg:21.3 4 | 5 | RUN DEBIAN_FRONTEND=noninteractive apt-get update && apt-get install -y -q --no-install-recommends python3-tornado python3-kafka python3-kazoo python3-psutil && rm -rf /var/lib/apt/lists/* 6 | 7 | COPY --from=tc_common /home/ /home/ 8 | COPY *.py /home/ 9 | CMD ["/home/main.py"] 10 | WORKDIR /home 11 | 12 | #### 13 | ARG UID 14 | ARG GID 15 | ## must use ; here to ignore user exist status code 16 | RUN [ ${GID} -gt 0 ] && groupadd -f -g ${GID} docker; \ 17 | [ ${UID} -gt 0 ] && useradd -d /home/docker -g ${GID} -K UID_MAX=${UID} -K UID_MIN=${UID} docker; \ 18 | chown -R ${UID}:${GID} /home 19 | USER ${UID} 20 | #### 21 | -------------------------------------------------------------------------------- /xcode-server/Xeon/Dockerfile: -------------------------------------------------------------------------------- 1 | # tc_xcode_xeon 2 | 3 | FROM openvisualcloud/xeon-ubuntu1804-media-ffmpeg:21.3 4 | RUN DEBIAN_FRONTEND=noninteractive apt-get update && apt-get install -y -q --no-install-recommends python3-tornado python3-kafka python3-kazoo python3-psutil && rm -rf /var/lib/apt/lists/* 5 | 6 | COPY --from=tc_common /home/ /home/ 7 | COPY *.py /home/ 8 | CMD ["/home/main.py"] 9 | WORKDIR /home 10 | 11 | #### 12 | ARG UID 13 | ARG GID 14 | ## must use ; here to ignore user exist status code 15 | RUN [ ${GID} -gt 0 ] && groupadd -f -g ${GID} docker; \ 16 | [ ${UID} -gt 0 ] && useradd -d /home/docker -g ${GID} -K UID_MAX=${UID} -K UID_MIN=${UID} docker; \ 17 | chown -R ${UID}:${GID} /home 18 | USER ${UID} 19 | #### 20 | -------------------------------------------------------------------------------- /xcode-server/XeonE3/Dockerfile: -------------------------------------------------------------------------------- 1 | # tc_xcode_xeone3 2 | 3 | FROM openvisualcloud/xeone3-ubuntu1804-media-ffmpeg:21.3 4 | 5 | RUN DEBIAN_FRONTEND=noninteractive apt-get update && apt-get install -y -q --no-install-recommends python3-tornado python3-kafka python3-kazoo python3-psutil && rm -rf /var/lib/apt/lists/* 6 | 7 | COPY --from=tc_common /home/ /home/ 8 | COPY *.py /home/ 9 | CMD ["/home/main.py"] 10 | WORKDIR /home 11 | 12 | #### 13 | ARG UID 14 | ARG GID 15 | ## must use ; here to ignore user exist status code 16 | RUN [ ${GID} -gt 0 ] && groupadd -f -g ${GID} docker; \ 17 | [ ${UID} -gt 0 ] && useradd -d /home/docker -g ${GID} -K UID_MAX=${UID} -K UID_MIN=${UID} docker; \ 18 | chown -R ${UID}:${GID} /home 19 | USER ${UID} 20 | #### 21 | -------------------------------------------------------------------------------- /xcode-server/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | DIR=$(dirname $(readlink -f "$0")) 4 | 5 | . "${DIR}/../script/build.sh" 6 | -------------------------------------------------------------------------------- /xcode-server/main.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | from os.path import isfile 4 | from subprocess import call 5 | import subprocess 6 | from os import makedirs 7 | from zkstate import ZKState 8 | from messaging import Consumer, Producer 9 | from ffmpegcmd import FFMpegCmd 10 | import traceback 11 | import time 12 | import json 13 | import re 14 | from datetime import datetime, timedelta 15 | import random 16 | import psutil 17 | import os 18 | 19 | KAFKA_TOPIC = "content_provider_sched" 20 | KAFKA_GROUP = "content_provider_dash_hls_creator" 21 | KAFKA_WORKLOAD_TOPIC = "transcoding" 22 | 23 | ARCHIVE_ROOT = "/var/www/archive" 24 | VIDEO_ROOT = "/var/www/video/" 25 | DASH_ROOT = "/var/www/video/dash" 26 | HLS_ROOT = "/var/www/video/hls" 27 | MP4_ROOT = "/var/www/video/mp4" 28 | CACHE_RAW_YUV_ROOT = "/var/www/video/rawyuv" 29 | CACHE_DECODED_YUV_ROOT = "/var/www/video/decodedyuv" 30 | 31 | HW_ACC_TYPE=os.getenv("HW_ACC_TYPE","sw") 32 | HW_DEVICE=os.getenv("HW_DEVICE",None) 33 | SCENARIO=os.getenv("SCENARIO","transcode") 34 | 35 | fps_regex = re.compile( 36 | r"\s*frame=\s*(?P\d+)\s*fps=\s*(?P\d+\.?\d*).*" 37 | r"time=(?P\d+:\d+:\d+\.\d+).*speed=\s*(?P\d+\.\d+)x") 38 | 39 | producer = Producer() 40 | 41 | def get_fps(next_line,start_time): 42 | matched = fps_regex.match(next_line) 43 | if (matched): 44 | fps = float(matched.group('fps')) 45 | speed = float(matched.group("speed")) 46 | frame_count = int(matched.group("frame_count")) 47 | time_value = datetime.strptime( 48 | matched.group("duration"), "%H:%M:%S.%f") 49 | duration = timedelta( 50 | hours=time_value.hour, 51 | minutes=time_value.minute, 52 | seconds=time_value.second, 53 | microseconds=time_value.microsecond) 54 | if fps < 0: 55 | fps = (frame_count / (duration.total_seconds())) * speed 56 | now=time.time() 57 | return {"fps":round(fps,1), "speed":round(speed,3), "frames":frame_count, "start":round(start_time,3), "duration":round(now-start_time,3), "end":round(now,3), "status": "active"} 58 | return {} 59 | 60 | def execute(idx, name, cmd,kafka=True): 61 | p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, bufsize=1, universal_newlines=True) 62 | p.poll() 63 | start_time=time.time() 64 | sinfo={"id": str(idx), "stream":name} 65 | p1=psutil.Process(p.pid) 66 | while p.returncode is None: 67 | next_line = p.stderr.readline() 68 | r=get_fps(next_line,start_time) 69 | if r: 70 | sinfo.update({"cpu": round(p1.cpu_percent(),2), "mem": round(p1.memory_percent(),2)}) 71 | sinfo.update(r) 72 | print(sinfo, flush=True) 73 | try: 74 | producer.send(KAFKA_WORKLOAD_TOPIC, json.dumps(sinfo)) 75 | except Exception as e: 76 | print("Exception: {}".format(e)) 77 | continue 78 | p.poll() 79 | try: 80 | if p.returncode: 81 | sinfo.update({"status": "aborted"}) 82 | else: 83 | sinfo.update({"status": "completed"}) 84 | print(sinfo, flush=True) 85 | producer.send(KAFKA_WORKLOAD_TOPIC, json.dumps(sinfo)) 86 | except Exception as e: 87 | print("Exception: {}".format(e)) 88 | return p.returncode 89 | 90 | def decode_yuv(yuv_path,in_stream_name,nframes=None): 91 | try: 92 | yuv_name=yuv_path+"/"+in_stream_name.split("/")[-1].replace(".mp4",".yuv") 93 | if not os.path.exists(path): makedirs(yuv_path) 94 | if os.path.exists(yuv_name): return 95 | if nframes: 96 | cmd = ["ffmpeg", "-hide_banner", "-i",in_stream_name, "-vcodec rawvideo", "-an","-frames:v", str(nframes),"-y",yuv_name] 97 | else: 98 | cmd = ["ffmpeg", "-hide_banner", "-i",in_stream_name, "-vcodec rawvideo", "-an","-y",yuv_name] 99 | print(cmd, flush=True) 100 | execute(5001,"decoded",cmd,kafka=False) 101 | except Exception as e: 102 | print("Exception: {}".format(e)) 103 | pass 104 | return yuv_name 105 | 106 | def measure_quality_vamf(idx,name, raw_mp4_path, target_mp4_path,width,height, nframes=100): 107 | vmaf_score=None 108 | model_path="/home/models/" 109 | try: 110 | if width >=1920 and height >=1080: 111 | model_name=model_path+"vmaf_4k_v0.6.1.json" 112 | else: 113 | model_name=model_path+"vmaf_v0.6.1.json" 114 | log_path=target_mp4_path+".json" 115 | framerate=24 116 | cmd = ["ffmpeg", "-r", str(framerate), "-i",raw_mp4_path, "-r", str(framerate), "-i", target_mp4_path, "-lavfi", "[0:v]trim=end_frame={},scale={}:{}:flags=bicubic,setpts=PTS-STARTPTS[reference];[1:v]trim=end_frame={},setpts=PTS-STARTPTS[distorted];[distorted][reference]libvmaf=log_fmt=json:log_path={}:model_path={}".format(nframes,width,height,nframes,log_path,model_name),"-f", "null", "-"] 117 | print(cmd,flush=True) 118 | execute(str(idx+1000),name,cmd,kafka=False) 119 | with open(log_path) as f: 120 | obj = json.load(f) 121 | sinfo={"id": str(idx+1000), "stream":name} 122 | vmaf_score=float(obj["pooled_metrics"]["vmaf"]["mean"]) 123 | sinfo.update({"vmaf":vmaf_score}) 124 | producer.send(KAFKA_WORKLOAD_TOPIC, json.dumps(sinfo)) 125 | 126 | except Exception as e: 127 | print("Exception: {}".format(e)) 128 | return vmaf_score 129 | 130 | def process_stream_vods(msg): 131 | stream_name=msg["name"] 132 | stream_type=msg["output"]["type"] 133 | stream_parameters=msg["parameters"] 134 | loop= msg["loop"] 135 | idx=msg["idx"] if "idx" in msg.keys() else int(random.random()*10000) 136 | stream=stream_type+"/"+stream_name 137 | 138 | print("VOD transcode:",stream , flush=True) 139 | if not isfile(ARCHIVE_ROOT+"/"+stream_name): 140 | return 141 | 142 | zk = ZKState("/content_provider_transcoder/"+ARCHIVE_ROOT+"/vods/"+stream) 143 | if zk.processed(): 144 | zk.close() 145 | return 146 | 147 | target_root=VIDEO_ROOT+stream_type 148 | 149 | try: 150 | makedirs(target_root+"/"+stream_name) 151 | except: 152 | pass 153 | 154 | if zk.process_start(): 155 | try: 156 | input_stream=ARCHIVE_ROOT+"/"+stream_name 157 | cmd = FFMpegCmd(input_stream, target_root+"/"+stream_name, stream_type, params=stream_parameters, acc_type=HW_ACC_TYPE, loop=loop, device=HW_DEVICE).cmd() 158 | if cmd: 159 | print(cmd, flush=True) 160 | r = execute(idx, stream_name, cmd) 161 | if r: 162 | raise Exception("status code: "+str(r)) 163 | zk.process_end() 164 | except: 165 | print(traceback.format_exc(), flush=True) 166 | zk.process_abort() 167 | 168 | zk.close() 169 | 170 | def process_stream_lives(msg): 171 | stream_name=msg["name"] 172 | stream_parameters=msg["parameters"] 173 | codec=stream_parameters["codec_type"] 174 | stream_type=msg["output"]["type"] 175 | target=msg["output"]["target"] 176 | loop= msg["loop"] 177 | idx=int(msg["idx"]) if "idx" in msg.keys() else int(random.random()*10000) 178 | stream=stream_type+"/"+stream_name 179 | 180 | if not isfile(ARCHIVE_ROOT+"/"+stream_name): 181 | return 182 | 183 | target_root=VIDEO_ROOT+stream_type 184 | 185 | try: 186 | makedirs(target_root+"/"+stream_name) 187 | except: 188 | pass 189 | 190 | if target != "file": 191 | target_name=target+stream_type +"/media_" + str(idx)+"_" 192 | else: 193 | target_name=target_root+"/"+stream_name 194 | 195 | print("LIVE transcode:",target_name , stream_type, flush=True) 196 | zk = ZKState("/content_provider_transcoder/"+ARCHIVE_ROOT+"/lives/"+str(idx)+"/"+stream) 197 | if zk.processed(): 198 | zk.close() 199 | return 200 | 201 | if zk.process_start(): 202 | try: 203 | input_stream = ARCHIVE_ROOT+"/"+stream_name 204 | cmd = FFMpegCmd(input_stream, target_name, stream_type, params=stream_parameters, acc_type=HW_ACC_TYPE, loop=loop, device=HW_DEVICE).cmd() 205 | 206 | if cmd: 207 | print(cmd, flush=True) 208 | r = execute(idx, stream_name, cmd) 209 | if r: 210 | raise Exception("status code: "+str(r)) 211 | if SCENARIO == "encode": 212 | width=stream_parameters["renditions"][0][0] 213 | height=stream_parameters["renditions"][0][1] 214 | mp4_file=cmd[-1] 215 | measure_quality_vamf(idx,stream_name,raw_mp4_path=input_stream,target_mp4_path=mp4_file,width=width,height=height) 216 | zk.process_end() 217 | except: 218 | print(traceback.format_exc(), flush=True) 219 | zk.process_abort() 220 | 221 | zk.close() 222 | 223 | def process_stream(msg): 224 | if msg["live_vod"] == "vod": 225 | process_stream_vods(msg) 226 | else: 227 | process_stream_lives(msg) 228 | 229 | if __name__ == "__main__": 230 | c = Consumer(KAFKA_GROUP) 231 | while True: 232 | try: 233 | for message in c.messages(KAFKA_TOPIC): 234 | process_stream(json.loads(message)) 235 | except: 236 | print(traceback.format_exc(), flush=True) 237 | time.sleep(2) 238 | c.close() 239 | -------------------------------------------------------------------------------- /xcode-server/models/vmaf_v0.6.1.json: -------------------------------------------------------------------------------- 1 | { 2 | "param_dict": { 3 | "C": 4.0, 4 | "score_transform": { 5 | "p2": -0.00705305, 6 | "out_gte_in": "true", 7 | "p0": 1.70674692, 8 | "p1": 1.72643844 9 | }, 10 | "norm_type": "clip_0to1", 11 | "score_clip": [ 12 | 0.0, 13 | 100.0 14 | ], 15 | "nu": 0.9, 16 | "gamma": 0.04 17 | }, 18 | "model_dict": { 19 | "model": "svm_type nu_svr\nkernel_type rbf\ngamma 0.04\nnr_class 2\ntotal_sv 211\nrho -1.33133\nSV\n-4 1:0.65734273 2:0.34681232 3:0.093755557 4:0.60913934 5:0.69117362 6:0.73495824 \n4 1:0.8727433 2:0.49612229 3:0.59146724 4:0.78105663 5:0.84916292 6:0.8882561 \n4 1:0.89890005 2:0.49612229 3:0.66823667 4:0.86050887 5:0.90873162 6:0.93335071 \n4 1:0.20371751 2:0.49612229 3:0.10534315 4:-1.110223e-16 6:2.220446e-16 \n4 1:0.33913836 2:0.49612229 3:0.14024497 4:0.074708413 5:0.10231651 6:0.1259153 \n4 1:0.66426757 2:0.49612229 3:0.35268026 4:0.4805681 5:0.59603341 6:0.67408692 \n4 1:0.59561632 2:0.49612229 3:0.27561601 4:0.33977371 5:0.4325213 6:0.50244952 \n4 1:0.50821444 2:0.49612229 3:0.20276685 4:0.2004308 5:0.25758651 6:0.30054029 \n4 1:0.77877298 2:0.49612229 3:0.444392 4:0.61630491 5:0.71210086 6:0.77386496 \n4 1:0.71666017 2:0.49612229 3:0.35967401 4:0.47825205 5:0.57045236 6:0.63752441 \n4 1:0.64025669 2:0.49612229 3:0.27766156 4:0.33407105 5:0.40732401 6:0.46359154 \n4 1:0.88343983 2:0.23066177 3:0.65873851 4:0.86090402 5:0.90661213 6:0.93008753 \n4 1:0.90822691 2:0.23066177 3:0.71439481 4:0.90904598 5:0.94146542 6:0.95674338 \n-4 1:0.49037399 2:0.23066177 3:0.32329421 4:0.33686197 5:0.39456977 6:0.44944683 \n-4 1:0.69044383 2:0.23066177 3:0.43933868 4:0.56327049 5:0.65339511 6:0.71348696 \n-4 1:0.62390093 2:0.23066177 3:0.3800888 4:0.44927578 5:0.52327759 6:0.57907725 \n4 1:0.81887942 2:0.23066177 3:0.56208506 4:0.76164281 5:0.83176644 6:0.86914911 \n4 1:0.77189471 2:0.23066177 3:0.50145055 4:0.66525882 5:0.74327951 6:0.79017822 \n4 1:0.71405433 2:0.23066177 3:0.43952897 4:0.55736023 5:0.63319876 6:0.68402869 \n4 1:0.92114073 3:0.45198963 4:0.97703695 5:0.9907273 6:0.99510256 \n4 1:1 3:0.83319067 4:0.98956086 5:0.99577089 6:0.99784595 \n4 4:0.10344019 5:0.34323945 6:0.63855969 \n4 1:0.19531482 3:0.034330388 4:0.25480402 5:0.54197045 6:0.78020579 \n4 1:0.48394064 3:0.11866359 4:0.58816959 5:0.86435738 6:0.96191842 \n4 1:0.47628079 3:0.11185039 4:0.56180003 5:0.83415721 6:0.93617329 \n4 1:0.46278632 3:0.10308547 4:0.52247575 5:0.78583924 6:0.89392193 \n4 1:0.7038079 3:0.2174879 4:0.84423613 5:0.9662906 6:0.98430594 \n4 1:0.69596686 3:0.20657211 4:0.81196884 5:0.94140702 6:0.96680805 \n4 1:0.68404358 3:0.19261438 4:0.76066415 5:0.89973293 6:0.93660362 \n4 1:0.84073022 2:0.34681232 3:0.22411304 4:0.88845644 5:0.94169671 6:0.96221395 \n-4 1:0.33900937 2:0.34681232 3:0.027607294 4:0.40659646 5:0.45456869 6:0.48256597 \n-4 1:0.44593129 2:0.34681232 3:0.041939301 4:0.45284872 5:0.5157613 6:0.55335821 \n-4 1:0.67301747 2:0.34681232 3:0.11526222 4:0.68549511 5:0.78556255 6:0.83507583 \n-4 1:0.62833533 2:0.34681232 3:0.092281981 4:0.61278125 5:0.70626575 6:0.75613977 \n-4 1:0.57196879 2:0.34681232 3:0.067548447 4:0.53383404 5:0.61287548 6:0.65468717 \n-0.3312466607741135 1:0.75125028 2:0.34681232 3:0.1457048 4:0.75791308 5:0.84155109 6:0.88132116 \n-4 1:0.71121936 2:0.34681232 3:0.12095689 4:0.68834617 5:0.77453583 6:0.81892861 \n-4 1:0.80269544 2:0.25207203 3:0.3681723 4:0.80658472 5:0.8702283 6:0.90583519 \n-4 1:0.86095387 2:0.25207203 3:0.52475418 4:0.85053413 5:0.90454501 6:0.93093678 \n-4 1:0.5008963 2:0.25207203 3:0.2005129 4:0.41516485 5:0.45282017 6:0.47396143 \n-4 1:0.56977992 2:0.25207203 3:0.21631076 4:0.45848604 5:0.51102137 6:0.53823055 \n-4 1:0.72779828 2:0.25207203 3:0.3051639 4:0.67537297 5:0.75767261 6:0.80327187 \n-4 1:0.68848569 2:0.25207203 3:0.27393051 4:0.60399854 5:0.68000038 6:0.72275152 \n-4 1:0.64121401 2:0.25207203 3:0.23994344 4:0.52538719 5:0.5891732 6:0.62164073 \n-4 1:0.76673633 2:0.25207203 3:0.33053889 4:0.73085549 5:0.80341439 6:0.84546456 \n-4 1:0.73041172 2:0.25207203 3:0.29691153 4:0.66166141 5:0.73408074 6:0.77757209 \n-4 1:0.68529047 2:0.25207203 3:0.26283557 4:0.58611788 5:0.65192525 6:0.69015011 \n4 1:0.86902267 2:0.48885268 3:0.5143645 4:0.8587242 5:0.91841685 6:0.94498293 \n4 1:0.89266106 2:0.48885268 3:0.55208861 4:0.89938377 5:0.94642982 6:0.96615102 \n-4 1:0.42554844 2:0.48885268 3:0.2554221 4:0.36916892 5:0.43100226 6:0.50888404 \n-4 1:0.52520274 2:0.48885268 3:0.27824915 4:0.42915458 5:0.50850476 6:0.58585271 \n-4 1:0.69357445 2:0.48885268 3:0.35289928 4:0.61359907 5:0.7217863 6:0.78790011 \n-4 1:0.64679648 2:0.48885268 3:0.31268451 4:0.5167094 5:0.61224976 6:0.68477529 \n4 1:0.80595874 2:0.48885268 3:0.44075432 4:0.7803455 5:0.86328719 6:0.90222545 \n-4 1:0.7715192 2:0.48885268 3:0.4012577 4:0.70792536 5:0.80063653 6:0.85083872 \n4 1:0.82199966 2:0.20629643 3:0.30562098 4:0.80541317 5:0.89285836 6:0.92907353 \n-4 1:0.84774006 2:0.20629643 3:0.36755712 4:0.8681203 5:0.93297792 6:0.95700049 \n4 1:0.26631905 2:0.20629643 3:0.076468978 4:0.29833807 5:0.37989948 6:0.4576277 \n-4 1:0.65439648 2:0.20629643 3:0.19487894 4:0.63045155 5:0.76931142 6:0.83706632 \n4 1:0.55295603 2:0.20629643 3:0.13877412 4:0.4724047 5:0.59295828 6:0.66834832 \n4 1:0.75448924 2:0.20629643 3:0.24707248 4:0.72284103 5:0.83178838 6:0.88053503 \n4 1:0.83852041 2:0.15600331 3:0.1625414 4:0.81948421 5:0.90185357 6:0.9347395 \n4 1:0.85805266 2:0.15600331 3:0.19693206 4:0.86294641 5:0.92990351 6:0.95498998 \n-4 1:0.43384835 2:0.15600331 3:0.030541611 4:0.37279112 5:0.4588284 6:0.52004828 \n-4 1:0.72588966 2:0.48885268 3:0.35394597 4:0.61189191 5:0.70897304 6:0.77099691 \n-4 1:0.65865915 2:0.20629643 3:0.1796405 4:0.56432133 5:0.68049028 6:0.74616621 \n-4 1:0.53095193 2:0.15600331 3:0.046271684 4:0.4328793 5:0.5309142 6:0.59282089 \n-4 1:0.71891465 2:0.15600331 3:0.11085278 4:0.68794624 5:0.80350923 6:0.85660483 \n-4 1:0.68635753 2:0.15600331 3:0.091457045 4:0.60849701 5:0.72282659 6:0.78137183 \n-4 1:0.64162333 2:0.15600331 3:0.068820233 4:0.51732819 5:0.62198733 6:0.67977328 \n4 1:0.78395225 2:0.15600331 3:0.13401869 4:0.75274384 5:0.8506531 6:0.89321405 \n-4 1:0.75276337 2:0.15600331 3:0.11289462 4:0.67598462 5:0.78117168 6:0.83259364 \n-4 1:0.71345342 2:0.15600331 3:0.089218917 4:0.58797907 5:0.69284768 6:0.74971699 \n4 1:0.93500967 2:0.08765484 3:0.72226864 4:0.93291747 5:0.960644 6:0.97304054 \n4 1:0.95150668 2:0.08765484 3:0.77391346 4:0.95596295 5:0.97544784 6:0.98405871 \n-4 1:0.48148634 2:0.08765484 3:0.36628046 4:0.45852823 5:0.56005228 6:0.65708595 \n-4 1:0.59853216 2:0.08765484 3:0.42071301 4:0.56376512 5:0.66454599 6:0.741236 \n-4 1:0.79297271 2:0.08765484 3:0.5597726 4:0.80653689 5:0.88996341 6:0.92691132 \n-4 1:0.76798941 2:0.08765484 3:0.52069978 4:0.74484555 5:0.83431246 6:0.87935204 \n-4 1:0.73225133 2:0.08765484 3:0.47011786 4:0.66069877 5:0.75226598 6:0.80539407 \n-4 1:0.87240592 2:0.08765484 3:0.62680052 4:0.88208508 5:0.93041565 6:0.9505376 \n-4 1:0.84834872 2:0.08765484 3:0.58154998 4:0.82429855 5:0.8858516 6:0.91563291 \n-4 1:0.84365382 2:0.93973481 3:0.36718425 4:0.81512123 5:0.88887359 6:0.92320992 \n-4 1:0.89242364 2:0.93973481 3:0.41336953 4:0.88038833 5:0.93688884 6:0.95992879 \n-4 1:0.31373571 2:0.93973481 3:0.18757116 4:0.34864297 5:0.3777168 6:0.38922611 \n-4 1:0.42490775 2:0.93973481 3:0.20295859 4:0.39290035 5:0.43632323 6:0.45871216 \n-4 1:0.66865444 2:0.93973481 3:0.28594627 4:0.63969879 5:0.73360583 6:0.78380069 \n-4 1:0.62642524 2:0.93973481 3:0.26141889 4:0.56602175 5:0.64775366 6:0.69263211 \n-4 1:0.57430455 2:0.93973481 3:0.23537634 4:0.48984694 5:0.55363885 6:0.5853905 \n-4 1:0.76178555 2:0.93973481 3:0.32205372 4:0.7176044 5:0.80237787 6:0.84588741 \n-4 1:0.72282163 2:0.93973481 3:0.29554025 4:0.64471949 5:0.72634443 6:0.77062686 \n-4 1:0.67693861 2:0.93973481 3:0.2669659 4:0.56720118 5:0.63868728 6:0.67673331 \n4 1:0.86023804 2:0.49739676 3:0.53966638 4:0.77392585 5:0.84784447 6:0.89031641 \n1.296591709971377 1:0.31779385 2:0.49739676 3:0.17094319 4:0.12195679 5:0.13277563 6:0.14165413 \n4 1:0.68317784 2:0.49739676 3:0.37192301 4:0.52750491 5:0.62426522 6:0.6929947 \n4 1:0.55611181 2:0.49739676 3:0.24752355 4:0.28326524 5:0.33261781 6:0.37104424 \n4 1:0.7772257 2:0.49739676 3:0.43832146 4:0.63397606 5:0.7240692 6:0.78367237 \n4 1:0.66186286 2:0.49739676 3:0.30599867 4:0.39201262 5:0.45927759 6:0.51239284 \n4 1:0.94601776 2:0.04579546 3:0.69472114 4:0.97790884 5:0.9891237 6:0.993277 \n4 1:0.98838404 2:0.04579546 3:0.90293444 4:0.99181622 5:0.99642641 6:0.9978864 \n4 1:0.30006056 2:0.04579546 3:0.31879 4:0.45852885 5:0.59717781 6:0.71487885 \n-4 1:0.44902891 2:0.04579546 3:0.35412414 4:0.55926446 5:0.70175505 6:0.79649177 \n-4 1:0.69856222 2:0.04579546 3:0.45989947 4:0.82115248 5:0.92520734 6:0.9594384 \n-4 1:0.67730161 2:0.04579546 3:0.44400319 4:0.77920819 5:0.88713866 6:0.92903178 \n-4 1:0.64419192 2:0.04579546 3:0.42297435 4:0.72390263 5:0.83364665 6:0.88344569 \n-4 1:0.80781899 2:0.04579546 3:0.52334234 4:0.88859427 5:0.94013924 6:0.95946903 \n-4 1:0.78080761 2:0.04579546 3:0.499439 4:0.84012074 5:0.90229375 6:0.92936693 \n4 1:0.97128596 2:0.014623935 3:0.90135809 4:0.99584619 5:0.9970631 6:0.99757649 \n4 1:0.99645027 2:0.014623935 3:1 4:1 5:1 6:1 \n-4 1:0.5326065 2:0.014623935 3:0.75468972 4:0.76017077 5:0.83753774 6:0.92265059 \n-4 1:0.62757004 2:0.014623935 3:0.77708563 4:0.84258654 5:0.91016348 6:0.95440359 \n-4 1:0.79306842 2:0.014623935 3:0.78900741 4:0.90386551 5:0.96905764 6:0.98466408 \n-4 1:0.77722867 2:0.014623935 3:0.78701408 4:0.89679281 5:0.96056131 6:0.977629 \n-4 1:0.75934622 2:0.014623935 3:0.78422805 4:0.88268036 5:0.94383829 6:0.96596858 \n-4 1:0.8878718 2:0.014623935 3:0.81445984 4:0.96615706 5:0.98858241 6:0.99176534 \n-4 1:0.88211614 2:0.014623935 3:0.81253935 4:0.95982371 5:0.98309178 6:0.9870796 \n4 1:0.83805466 2:0.22767235 3:0.31750162 4:0.85145925 5:0.9121085 6:0.93772147 \n4 1:0.86620985 2:0.22767235 3:0.35742938 4:0.89821492 5:0.94339974 6:0.96076173 \n4 1:0.39289606 2:0.22767235 3:0.12019254 4:0.3951559 5:0.44657802 6:0.46771549 \n4 1:0.48692411 2:0.22767235 3:0.13362033 4:0.43434224 5:0.49900609 6:0.53177669 \n4 1:0.69743918 2:0.22767235 3:0.2263303 4:0.68859985 5:0.78706365 6:0.83662428 \n4 1:0.65237548 2:0.22767235 3:0.19328493 4:0.60107975 5:0.69684945 6:0.74949279 \n4 1:0.59461718 2:0.22767235 3:0.15963705 4:0.51010642 5:0.59283393 6:0.63883591 \n4 1:0.77302727 2:0.22767235 3:0.26078021 4:0.76359704 5:0.8470807 6:0.8858359 \n4 1:0.72953038 2:0.22767235 3:0.22331233 4:0.67735915 5:0.77029889 6:0.81802539 \n4 1:0.87210923 2:0.16787772 3:0.69408521 4:0.91495146 5:0.94890261 6:0.96269344 \n-4 1:0.81595959 2:0.08765484 3:0.52947327 4:0.7501341 5:0.82294191 6:0.86264385 \n4 1:0.72562415 2:0.49739676 3:0.37130724 4:0.51472366 5:0.59961357 6:0.66258291 \n-4 1:0.87135693 2:0.014623935 3:0.80905852 4:0.94637428 5:0.97242826 6:0.97946694 \n-4 1:0.48910215 2:0.16787772 3:0.49792761 4:0.59161372 5:0.62979552 6:0.64254584 \n-4 1:0.5685964 2:0.16787772 3:0.5149767 4:0.63026581 5:0.67890679 6:0.69964851 \n-4 1:0.75935478 2:0.16787772 3:0.60695536 4:0.80906778 5:0.87125816 6:0.89810007 \n-4 1:0.71788601 2:0.16787772 3:0.57600091 4:0.75310216 5:0.81471966 6:0.84249923 \n-4 1:0.66516668 2:0.16787772 3:0.54473368 4:0.69254626 5:0.74796983 6:0.77177867 \n4 1:0.81880869 2:0.16787772 3:0.64309172 4:0.86078024 5:0.90892223 6:0.92908907 \n-4 1:0.78054558 2:0.16787772 3:0.60849279 4:0.80724494 5:0.86183239 6:0.88618408 \n4 1:0.95353512 2:0.055921852 3:0.61526026 4:0.94655706 5:0.97211195 6:0.98210701 \n4 1:0.98368527 2:0.055921852 3:0.7405327 4:0.96928567 5:0.9853799 6:0.99080378 \n4 1:0.11318821 2:0.055921852 3:0.1590151 4:0.30536689 5:0.48614515 6:0.64344462 \n4 1:0.30298819 2:0.055921852 3:0.19401703 4:0.41679982 5:0.61495039 6:0.74140301 \n4 1:0.60614412 2:0.055921852 3:0.31791569 4:0.72365433 5:0.88324129 6:0.93484545 \n4 1:0.58738733 2:0.055921852 3:0.29301498 4:0.67070014 5:0.83429953 6:0.89348041 \n4 1:0.79496816 2:0.055921852 3:0.42192974 4:0.86711004 5:0.94030868 6:0.96084539 \n4 1:0.77749763 2:0.055921852 3:0.38714172 4:0.81340799 5:0.90059649 6:0.93006702 \n4 1:0.75215882 2:0.055921852 3:0.34721658 4:0.73960747 5:0.84370247 6:0.88485372 \n4 1:0.89732805 2:0.58937038 3:0.58823535 4:0.80035053 5:0.86988422 6:0.90533033 \n-4 1:0.9228759 2:0.58937038 3:0.65797705 4:0.87169952 5:0.92200942 6:0.94454256 \n4 1:0.19504362 2:0.58937038 3:0.21585801 4:0.1754362 5:0.20844015 6:0.23846443 \n4 1:0.34425894 2:0.58937038 3:0.24672569 4:0.24188506 5:0.29544562 6:0.33843061 \n4 1:0.66407117 2:0.58937038 3:0.40045124 4:0.55415203 5:0.66628031 6:0.73418465 \n4 1:0.60780044 2:0.58937038 3:0.34931828 4:0.4519606 5:0.54893247 6:0.61355219 \n4 1:0.53476258 2:0.58937038 3:0.29851601 4:0.34826788 5:0.42168642 6:0.47203603 \n4 1:0.79195776 2:0.58937038 3:0.47493233 4:0.66775916 5:0.76196439 6:0.81489875 \n4 1:0.7415564 2:0.58937038 3:0.41507439 4:0.56413083 5:0.65815516 6:0.7166999 \n4 1:0.82021207 2:1 3:0.37381485 4:0.7891612 5:0.87031145 6:0.90944281 \n-3.795805084530972 1:0.85903236 2:1 3:0.43235998 4:0.86707094 5:0.92632217 6:0.95151451 \n-4 1:0.25243046 2:1 3:0.084027451 4:0.15537936 5:0.17410072 6:0.17212333 \n-4 1:0.35643487 2:1 3:0.10644455 4:0.21484368 5:0.25587544 6:0.27527817 \n-4 1:0.57605414 2:1 3:0.19031962 4:0.43030863 5:0.5277316 6:0.59069772 \n-4 1:0.49071444 2:1 3:0.14452095 4:0.31406915 5:0.38353445 6:0.42653517 \n4 1:0.73255545 2:1 3:0.28883701 4:0.65284485 5:0.75623242 6:0.81297442 \n0.4082706381617505 1:0.67015395 2:1 3:0.2367756 4:0.5367057 5:0.64063877 6:0.70451767 \n-4 1:0.84450653 2:0.083369236 3:0.57279245 4:0.85249389 5:0.91751611 6:0.94621989 \n-4 1:0.39559773 2:0.083369236 3:0.28184137 4:0.37025203 5:0.46733936 6:0.53517338 \n-4 1:0.70621493 2:0.083369236 3:0.42718441 4:0.69347659 5:0.81124449 6:0.87136343 \n-4 1:0.65615861 2:0.083369236 3:0.37833052 4:0.59301482 5:0.71772587 6:0.7905538 \n-4 1:0.58837863 2:0.083369236 3:0.33229353 4:0.48675881 5:0.60141743 6:0.67458413 \n-4 1:0.77687144 2:0.083369236 3:0.48094343 4:0.76665994 5:0.86191893 6:0.90760934 \n-1.966116876631112 1:0.72849768 2:0.083369236 3:0.42082971 4:0.66591147 5:0.77995959 6:0.84260661 \n-3.906831378063804 1:0.66320082 2:0.083369236 3:0.36350305 4:0.54888271 5:0.66506794 6:0.73685112 \n4 1:0.84500499 2:0.42532178 3:0.43562507 4:0.80721931 5:0.87934044 6:0.91434143 \n4 1:0.8874543 2:0.42532178 3:0.50912639 4:0.87959883 5:0.93223488 6:0.95450335 \n4 1:0.31032192 2:0.42532178 3:0.18976794 4:0.30662908 5:0.34637104 6:0.3661022 \n4 1:0.41026349 2:0.42532178 3:0.20589097 4:0.35241209 5:0.40358156 6:0.42577381 \n4 1:0.67552108 2:0.42532178 3:0.30879992 4:0.60375124 5:0.70097073 6:0.75507206 \n4 1:0.62772585 2:0.42532178 3:0.27349745 4:0.5196735 5:0.60339149 6:0.65103342 \n4 1:0.5741386 2:0.42532178 3:0.24033766 4:0.43855753 5:0.50243186 6:0.53322825 \n4 1:0.7629976 2:0.42532178 3:0.35347476 4:0.69239941 5:0.78245146 6:0.83117443 \n4 1:0.71746409 2:0.42532178 3:0.31296983 4:0.60525302 5:0.69243388 6:0.7432587 \n-4 1:0.73137955 2:0.16787772 3:0.57222383 4:0.74405775 5:0.79993424 6:0.82484891 \n4 1:0.67383121 2:0.58937038 3:0.35481019 4:0.45269287 5:0.53578336 6:0.59116487 \n4 1:0.5905971 2:1 3:0.18559792 4:0.41535212 5:0.50422336 6:0.56173557 \n4 1:0.66157018 2:0.42532178 3:0.27479904 4:0.51802649 5:0.59270541 6:0.63560969 \n-4 1:0.66827754 2:0.54342577 3:0.18169339 4:0.50290989 5:0.59875259 6:0.65332628 \n4 1:0.85027066 2:0.20820673 3:0.40997978 4:0.82462749 5:0.89794736 6:0.93142825 \n4 1:0.87892054 2:0.20820673 3:0.45891267 4:0.87823329 5:0.93535353 6:0.95883927 \n4 1:0.3986268 2:0.20820673 3:0.17753958 4:0.33495583 5:0.39777832 6:0.44399359 \n-4 1:0.48997993 2:0.20820673 3:0.20172681 4:0.39715881 5:0.47368229 6:0.52781628 \n4 1:0.7022939 2:0.20820673 3:0.31094767 4:0.6676259 5:0.77726116 6:0.83518027 \n-4 1:0.65773092 2:0.20820673 3:0.27420721 4:0.57889989 5:0.68485118 6:0.74837036 \n0.2951376518668717 1:0.60031736 2:0.20820673 3:0.23419121 4:0.48018865 5:0.57200972 6:0.63197473 \n4 1:0.77623676 2:0.20820673 3:0.3510016 4:0.74206651 5:0.83508543 6:0.88101902 \n4 1:0.73562396 2:0.20820673 3:0.31004997 4:0.6557112 5:0.75585014 6:0.81164989 \n-4 1:0.67923081 2:0.20820673 3:0.26679137 4:0.55816547 5:0.65579282 6:0.71593631 \n4 1:0.83968539 2:0.54342577 3:0.32439292 4:0.78747769 5:0.87303614 6:0.91271252 \n4 1:0.86656342 2:0.54342577 3:0.37898741 4:0.85252726 5:0.92049615 6:0.94848246 \n-4 1:0.42728303 2:0.54342577 3:0.10123262 4:0.31581962 5:0.38571265 6:0.42827036 \n-4 1:0.63194526 2:0.54342577 3:0.18169045 4:0.51611903 5:0.62179755 6:0.68216176 \n4 1:0.56954706 2:0.54342577 3:0.14271477 4:0.41491191 5:0.50173488 6:0.55220392 \n4 1:0.76753176 2:0.54342577 3:0.26295318 4:0.6905031 5:0.79291823 6:0.84469464 \n-4 1:0.72348649 2:0.54342577 3:0.22334634 4:0.60145902 5:0.70573225 6:0.76318544 \n4 1:0.83584492 2:0.047285912 3:0.53826775 4:0.933335 5:0.95948954 6:0.96870909 \n4 1:0.85530855 2:0.047285912 3:0.55323777 4:0.95113339 5:0.97249918 6:0.9795177 \n-4 1:0.53835734 2:0.047285912 3:0.41965074 4:0.71632669 5:0.73953043 6:0.73487553 \n-4 1:0.59175144 2:0.047285912 3:0.43113594 4:0.74141738 5:0.76929188 6:0.77018949 \n-4 1:0.75962366 2:0.047285912 3:0.49613729 4:0.87838146 5:0.91688438 6:0.93150362 \n-4 1:0.72043129 2:0.047285912 3:0.47217411 4:0.83138845 5:0.8704229 6:0.88419439 \n-4 1:0.67287449 2:0.047285912 3:0.44652268 4:0.77691812 5:0.81043483 6:0.8177009 \n-4 1:0.8023177 2:0.047285912 3:0.51559706 4:0.90512389 5:0.93743101 6:0.9492968 \n-4 1:0.76751376 2:0.047285912 3:0.49225957 4:0.86357299 5:0.89948127 6:0.91221155 \n-4 1:0.72124785 2:0.047285912 3:0.46606653 4:0.81323145 5:0.84847474 6:0.85892657 \n", 20 | "score_transform": { 21 | "p2": -0.00705305, 22 | "out_gte_in": "true", 23 | "p0": 1.70674692, 24 | "p1": 1.72643844 25 | }, 26 | "norm_type": "linear_rescale", 27 | "score_clip": [ 28 | 0.0, 29 | 100.0 30 | ], 31 | "feature_names": [ 32 | "VMAF_integer_feature_adm2_score", 33 | "VMAF_integer_feature_motion2_score", 34 | "VMAF_integer_feature_vif_scale0_score", 35 | "VMAF_integer_feature_vif_scale1_score", 36 | "VMAF_integer_feature_vif_scale2_score", 37 | "VMAF_integer_feature_vif_scale3_score" 38 | ], 39 | "intercepts": [ 40 | -0.3092981927591963, 41 | -1.7993968597186747, 42 | -0.003017198086831897, 43 | -0.1728125095425364, 44 | -0.5294309090081222, 45 | -0.7577185792093722, 46 | -1.083428597549764 47 | ], 48 | "model_type": "LIBSVMNUSVR", 49 | "slopes": [ 50 | 0.012020766332648465, 51 | 2.8098077502505414, 52 | 0.06264407466686016, 53 | 1.222763456258933, 54 | 1.5360318811084146, 55 | 1.7620864995501058, 56 | 2.08656468286432 57 | ], 58 | "feature_dict": { 59 | "VMAF_integer_feature": [ 60 | "vif_scale0", 61 | "vif_scale1", 62 | "vif_scale2", 63 | "vif_scale3", 64 | "adm2", 65 | "motion2" 66 | ] 67 | } 68 | } 69 | } -------------------------------------------------------------------------------- /xcode-server/shell.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | DIR=$(dirname $(readlink -f "$0")) 4 | 5 | . "${DIR}/../script/shell.sh" 6 | --------------------------------------------------------------------------------