├── .dockerignore ├── .gitignore ├── Dockerfile ├── Dockerfile.worker ├── LICENSE ├── README.md ├── awcy_server.ts ├── bd_rate_jm.m ├── bd_rate_jm.py ├── bd_rate_report.py ├── bd_rate_report_as.py ├── build_av1_analyzer.sh ├── build_codec.sh ├── convexhull_framework ├── README.TXT ├── bin │ ├── HDRConvScalerY4MFile.cfg │ ├── HDRConvert │ ├── Research-v1.0.4-anchor.xlsm │ ├── aomdec │ ├── aomenc │ ├── vbaProject-AV2.bin │ ├── vmaf │ ├── vmaf_v0.6.1.pkl │ └── vmaf_v0.6.1.pkl.model └── src │ ├── AV2CTCTest.py │ ├── AV2CTCVideo.py │ ├── AWCYConvexHullTest.py │ ├── CalcBDRate.py │ ├── CalcQtyWithVmafTool.py │ ├── CalculateQualityMetrics.py │ ├── Config.py │ ├── ConvexHullBDRate.py │ ├── ConvexHullTest.py │ ├── EncDecUpscale.py │ ├── PostAnalysis_Summary.py │ ├── ScalingTest.py │ ├── Utils.py │ ├── VideoDecoder.py │ ├── VideoEncoder.py │ ├── VideoScaler.py │ └── log.txt ├── create_test_branch.sh ├── csv_export.py ├── distortion.m ├── dump_convex_hull.py ├── etc ├── awcy.sql ├── entrypoint ├── entrypoint.worker └── service │ ├── awcy │ └── run │ └── job-scheduler │ └── run ├── generate_list.ts ├── graph_over_time.py ├── nuke_branch.sh ├── package-lock.json ├── package.json ├── pull_running_jobs.py ├── push_jobs.py ├── quantizer_log.m ├── rate_delta_point.py ├── run_awcy.bat ├── run_video_test.sh ├── setup.sh ├── stats.py ├── subjective_metrics.py ├── submit_awcy.py ├── tsconfig.json ├── update.sh ├── update_analyzer.sh └── www ├── README.md ├── config └── webpack.config.js ├── img ├── beer.png ├── bottle.png └── mug.png ├── index.html ├── lib ├── bootstrap.min.css ├── d3.js ├── d3.v4.js ├── d3.v4.min.js ├── dragscroll.js ├── fonts │ ├── glyphicons-halflings-regular.eot │ ├── glyphicons-halflings-regular.ttf │ ├── glyphicons-halflings-regular.woff │ ├── glyphicons-halflings-regular.woff2 │ ├── icomoon.eot │ ├── icomoon.svg │ ├── icomoon.ttf │ └── icomoon.woff ├── mousetrap.min.js ├── react-dom.js ├── react-dom.min.js ├── react.js ├── react.min.js ├── tinycolor.js └── tinycolor.min.js ├── package-lock.json ├── package.json ├── react-select.css ├── src ├── App.tsx ├── components │ ├── AppLogs.tsx │ ├── AppStatus.tsx │ ├── Debug.tsx │ ├── FullReport.tsx │ ├── Job.tsx │ ├── JobLog.tsx │ ├── JobSelector.tsx │ ├── Jobs.tsx │ ├── Plot.tsx │ ├── Report.tsx │ ├── SubmitJobForm.tsx │ └── Widgets.tsx ├── index.tsx └── stores │ └── Stores.ts ├── tsconfig.json ├── xiph-community.svg └── xiphbar.css /.dockerignore: -------------------------------------------------------------------------------- 1 | # ignore vim swap files 2 | **/*.swp 3 | **/*.swn 4 | **/*.swo 5 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | awcy_server.js 2 | generate_list.js 3 | 4 | ab_paths.json 5 | config.json 6 | list.json 7 | 8 | av1/ 9 | av1-rt 10 | vp10/ 11 | vp9/ 12 | daala/ 13 | daalatool/ 14 | rd_tool/ 15 | thor/ 16 | 17 | # Logs 18 | logs 19 | *.log 20 | 21 | # Runtime data 22 | pids 23 | *.pid 24 | *.seed 25 | 26 | # Directory for instrumented libs generated by jscoverage/JSCover 27 | lib-cov 28 | 29 | # Coverage directory used by tools like istanbul 30 | coverage 31 | 32 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) 33 | .grunt 34 | 35 | # Compiled binary addons (http://nodejs.org/api/addons.html) 36 | build/Release 37 | 38 | # Dependency directory 39 | # Deployed apps should consider commenting this line out: 40 | # see https://npmjs.org/doc/faq.html#Should-I-check-my-node_modules-folder-into-git 41 | node_modules 42 | secret_key 43 | runs 44 | 45 | # Source maps 46 | *.js.map 47 | 48 | # Files generated by Webpack 49 | www/dist 50 | 51 | # vim swap files 52 | *.swp 53 | *.swn 54 | *.swo 55 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ubuntu:focal 2 | 3 | # environment variables 4 | ENV \ 5 | APP_USER=xiph \ 6 | APP_DIR=/opt/app \ 7 | LC_ALL=C.UTF-8 \ 8 | LANG=C.UTF-8 \ 9 | LANGUAGE=C.UTF-8 \ 10 | DEBIAN_FRONTEND=noninteractive \ 11 | GPG_SERVERS="hkp://keys.openpgp.org:80 hkp://p80.pool.sks-keyservers.net:80 hkp://keyserver.ubuntu.com:80" 12 | 13 | # add runtime user 14 | RUN \ 15 | groupadd --gid 1000 ${APP_USER} && \ 16 | useradd --uid 1000 --gid ${APP_USER} --shell /bin/bash --create-home ${APP_USER} 17 | 18 | 19 | # install common/useful packages 20 | RUN \ 21 | ARCH=`uname -m` && \ 22 | if [ "$ARCH" = "x86_64" ]; then \ 23 | echo "deb http://archive.ubuntu.com/ubuntu/ focal main restricted universe multiverse" >/etc/apt/sources.list && \ 24 | echo "deb http://security.ubuntu.com/ubuntu focal-security main restricted universe multiverse" >>/etc/apt/sources.list && \ 25 | echo "deb http://archive.ubuntu.com/ubuntu/ focal-updates main restricted universe multiverse" >>/etc/apt/sources.list; \ 26 | else \ 27 | echo "deb http://ports.ubuntu.com/ubuntu-ports/ focal main restricted universe multiverse" >/etc/apt/sources.list && \ 28 | echo "deb http://ports.ubuntu.com/ubuntu-ports focal-security main restricted universe multiverse" >>/etc/apt/sources.list && \ 29 | echo "deb http://ports.ubuntu.com/ubuntu-ports/ focal-updates main restricted universe multiverse" >>/etc/apt/sources.list; \ 30 | fi 31 | 32 | RUN \ 33 | apt-get update && \ 34 | apt-get install -y --no-install-recommends \ 35 | autoconf \ 36 | automake \ 37 | build-essential \ 38 | bzip2 \ 39 | ca-certificates \ 40 | check \ 41 | cmake \ 42 | cmake-extras \ 43 | ctags \ 44 | curl \ 45 | dirmngr \ 46 | file \ 47 | gettext-base \ 48 | git-core \ 49 | gpg \ 50 | gpg-agent \ 51 | g++-aarch64-linux-gnu \ 52 | iproute2 \ 53 | iputils-ping \ 54 | jq \ 55 | less \ 56 | libicu-dev \ 57 | libjpeg-dev \ 58 | libogg-dev \ 59 | libpng-dev \ 60 | libtool \ 61 | locales \ 62 | netcat-openbsd \ 63 | net-tools \ 64 | openjdk-8-jdk-headless \ 65 | openssl \ 66 | pkg-config \ 67 | procps \ 68 | psmisc \ 69 | python2.7 \ 70 | rsync \ 71 | runit \ 72 | sqlite3 \ 73 | strace \ 74 | tcpdump \ 75 | tzdata \ 76 | unzip \ 77 | uuid \ 78 | vim \ 79 | wget \ 80 | xz-utils \ 81 | yasm \ 82 | && \ 83 | apt-get clean && \ 84 | rm -rf /var/lib/apt/lists 85 | 86 | # install nasm 87 | RUN \ 88 | ARCH=`uname -m` && \ 89 | if [ "$ARCH" = "x86_64" ]; then \ 90 | DIR=/tmp/nasm && \ 91 | NASM_URL=http://debian-archive.trafficmanager.net/debian/pool/main/n/nasm && \ 92 | NASM_VERSION=2.15.05-1 && \ 93 | NASM_DEB=nasm_${NASM_VERSION}_amd64.deb && \ 94 | NASM_SUM=c860caec653b865d5b83359452d97b11f1b3ba5b18b07cac554cf72550b3bfc9 && \ 95 | mkdir -p ${DIR} && \ 96 | cd ${DIR} && \ 97 | curl -O ${NASM_URL}/${NASM_DEB} && \ 98 | echo ${NASM_SUM} ${NASM_DEB} | sha256sum --check && \ 99 | dpkg -i ${NASM_DEB} && \ 100 | rm -rf ${DIR}; \ 101 | fi 102 | 103 | # set working directory 104 | WORKDIR ${APP_DIR} 105 | 106 | # prepare rust installation 107 | ENV \ 108 | RUSTUP_HOME=/usr/local/rustup \ 109 | CARGO_HOME=/usr/local/cargo \ 110 | PATH=/usr/local/cargo/bin:${PATH} 111 | 112 | # install rust 113 | RUN \ 114 | ARCH=`uname -m` && \ 115 | RUST_VERSION=1.64.0 && \ 116 | curl -sSf --output /tmp/rustup-init https://static.rust-lang.org/rustup/archive/1.25.0/${ARCH}-unknown-linux-gnu/rustup-init && \ 117 | chmod +x /tmp/rustup-init && \ 118 | /tmp/rustup-init -y --no-modify-path --default-toolchain ${RUST_VERSION} && \ 119 | rm -vf /tmp/rustup-init 120 | 121 | # install node 18.x 122 | RUN \ 123 | NODE_VERSION=18.6.0 && \ 124 | ARCH=`uname -m` && \ 125 | if [ "$ARCH" = "aarch64" ]; then \ 126 | ARCH='arm64'; \ 127 | else \ 128 | ARCH='x64'; \ 129 | fi && \ 130 | for key in \ 131 | 4ED778F539E3634C779C87C6D7062848A1AB005C \ 132 | B9E2F5981AA6E0CD28160D9FF13993A75599653C \ 133 | 94AE36675C464D64BAFA68DD7434390BDBE9B9C5 \ 134 | B9AE9905FFD7803F25714661B63B535A4C206CA9 \ 135 | 77984A986EBC2AA786BC0F66B01FBB92821C587A \ 136 | 71DCFD284A79C3B38668286BC97EC7A07EDE3FC1 \ 137 | 8FCCA13FEF1D0C2E91008E09770F7A9A5AE15600 \ 138 | C4F0DFFF4E8C1A8236409D08E73BC641CC11F4C8 \ 139 | DD8F2338BAE7501E3DD5AC78C273792F7D83545D \ 140 | A48C2BEE680E841632CD4E44F07496B3EB3C1762 \ 141 | ; do \ 142 | for server in $(shuf -e ${GPG_SERVERS}) ; do \ 143 | http_proxy= gpg --keyserver "$server" --recv-keys "${key}" && break || : ; \ 144 | done ; \ 145 | done && \ 146 | echo "https://nodejs.org/dist/v${NODE_VERSION}/node-v${NODE_VERSION}-linux-${ARCH}.tar.xz" && \ 147 | curl -fSLO "https://nodejs.org/dist/v${NODE_VERSION}/node-v${NODE_VERSION}-linux-${ARCH}.tar.xz" && \ 148 | curl -fSLO "https://nodejs.org/dist/v${NODE_VERSION}/SHASUMS256.txt.asc" && \ 149 | gpg --batch --decrypt --output SHASUMS256.txt SHASUMS256.txt.asc && \ 150 | grep " node-v${NODE_VERSION}-linux-${ARCH}.tar.xz\$" SHASUMS256.txt | sha256sum -c - && \ 151 | tar xJf "node-v${NODE_VERSION}-linux-${ARCH}.tar.xz" -C /usr --strip-components=1 --no-same-owner && \ 152 | rm -vf "node-v${NODE_VERSION}-linux-${ARCH}.tar.xz" SHASUMS256.txt.asc SHASUMS256.txt && \ 153 | ln -s /usr/bin/node /usr/bin/nodejs 154 | 155 | # install emscripten 156 | RUN \ 157 | ARCH=`uname -m` && \ 158 | if [ "$ARCH" = "x86_64" ]; then \ 159 | EMSDK_VERSION=1.40.1; \ 160 | EMSDK_HASH=e88a3c5bbfef172a5b947768204ef734e2fb6e04; \ 161 | else \ 162 | EMSDK_VERSION=3.1.9; \ 163 | EMSDK_HASH=e34773a0d1a2f32dd3ba90d408a30fae89aa3c5a; \ 164 | fi && \ 165 | EMSDK_VERSION=3.1.9 && \ 166 | EMSDK_HASH=e34773a0d1a2f32dd3ba90d408a30fae89aa3c5a && \ 167 | mkdir -p /opt/emsdk && \ 168 | curl -sSL https://github.com/emscripten-core/emsdk/archive/${EMSDK_HASH}.tar.gz | tar zxf - -C /opt/emsdk --strip-components=1 && \ 169 | cd /opt/emsdk && \ 170 | ./emsdk update && \ 171 | ./emsdk install ${EMSDK_VERSION} && \ 172 | ./emsdk activate ${EMSDK_VERSION} && \ 173 | echo "hack emscript config getter (em-config)" && \ 174 | cp .emscripten /home/${APP_USER}/.emscripten && \ 175 | printf '#!/usr/bin/env python\nimport os, sys\nexecfile(os.getenv("HOME")+"/.emscripten")\nprint eval(sys.argv[1])\n' >/usr/local/bin/em-config && \ 176 | chmod a+x /usr/local/bin/em-config 177 | 178 | # install tini 179 | RUN \ 180 | TINI_VERSION=v0.18.0 && \ 181 | ARCH=`uname -m` && \ 182 | if [ "$ARCH" = "x86_64" ]; then \ 183 | TINI_FILE='tini'; \ 184 | else \ 185 | TINI_FILE='tini-arm64'; \ 186 | fi && \ 187 | for server in $(shuf -e ${GPG_SERVERS}) ; do \ 188 | http_proxy= gpg --keyserver "$server" --recv-keys 0527A9B7 && break || : ; \ 189 | done && \ 190 | wget -O/usr/bin/tini "https://github.com/krallin/tini/releases/download/${TINI_VERSION}/${TINI_FILE}" && \ 191 | wget -O/usr/bin/tini.asc "https://github.com/krallin/tini/releases/download/${TINI_VERSION}/${TINI_FILE}.asc" && \ 192 | gpg --verify /usr/bin/tini.asc && \ 193 | rm -f /usr/bin/tini.asc && \ 194 | chmod a+x /usr/bin/tini 195 | 196 | # install gosu 197 | RUN \ 198 | GOSU_VERSION=1.11 && \ 199 | for server in $(shuf -e ${GPG_SERVERS}); do \ 200 | http_proxy= gpg --keyserver "$server" --recv-keys B42F6819007F00F88E364FD4036A9C25BF357DD4 && break || : ; \ 201 | done && \ 202 | wget -O/usr/bin/gosu "https://github.com/tianon/gosu/releases/download/${GOSU_VERSION}/gosu-amd64" && \ 203 | wget -O/usr/bin/gosu.asc "https://github.com/tianon/gosu/releases/download/${GOSU_VERSION}/gosu-amd64.asc" && \ 204 | gpg --verify /usr/bin/gosu.asc && \ 205 | rm -f /usr/bin/gosu.asc && \ 206 | chmod a+x /usr/bin/gosu 207 | 208 | # install daalatool 209 | ENV \ 210 | DAALATOOL_DIR=/opt/daalatool 211 | 212 | RUN \ 213 | mkdir -p $(dirname ${DAALATOOL_DIR}) && \ 214 | git clone https://gitlab.xiph.org/xiph/daala.git ${DAALATOOL_DIR} && \ 215 | cd ${DAALATOOL_DIR} && \ 216 | ./autogen.sh && \ 217 | ./configure --disable-player && \ 218 | make tools -j4 219 | 220 | # install ciede2000 221 | ENV \ 222 | CIEDE2000_DIR=/opt/dump_ciede2000 223 | 224 | RUN \ 225 | mkdir -p $(dirname ${CIEDE2000_DIR}) && \ 226 | git clone https://github.com/KyleSiefring/dump_ciede2000.git ${CIEDE2000_DIR} && \ 227 | cd ${CIEDE2000_DIR} && \ 228 | cargo build --release 229 | 230 | # install hdrtools 231 | ENV \ 232 | HDRTOOLS_DIR=/opt/hdrtools \ 233 | HDRTOOLS_VERSION=0.22 234 | 235 | RUN \ 236 | ARCH=`uname -m` && \ 237 | mkdir -p ${HDRTOOLS_DIR} && \ 238 | curl -sSfL --output HDRTools.tar.bz2 https://gitlab.com/standards/HDRTools/-/archive/v${HDRTOOLS_VERSION}/HDRTools-v${HDRTOOLS_VERSION}.tar.bz2 && \ 239 | tar -xvf HDRTools.tar.bz2 --strip-components=1 -C ${HDRTOOLS_DIR} && \ 240 | cd ${HDRTOOLS_DIR} && \ 241 | sed -i 's/std::modff/modff/g' common/src/OutputY4M.cpp && \ 242 | sed -i 's/using ::hdrtoolslib::Y_COMP;//g' projects/HDRConvScaler/src/HDRConvScalerYUV.cpp && \ 243 | sed -i 's/\[Y_COMP\]/\[hdrtoolslib::Y_COMP\]/g' projects/HDRConvScaler/src/HDRConvScalerYUV.cpp && \ 244 | if [ "$ARCH" = "aarch64" ]; then \ 245 | # temporary patches until ARM support is upstream 246 | sed -i 's/-msse2//g' common/Makefile projects/*/Makefile; \ 247 | sed -i 's/-mfpmath=sse//g' common/Makefile projects/*/Makefile; \ 248 | sed -i 's/#include //g' common/src/ResizeBiCubic.cpp common/src/DistortionMetricVQM.cpp; \ 249 | sed -i 's/#include //g' common/src/DistortionMetricVQM.cpp; \ 250 | sed -i 's/#if defined(ENABLE_SSE_OPT)/#if ENABLE_SSE_OPT/g' common/src/ResizeBiCubic.cpp; \ 251 | fi && \ 252 | make # -j is broken 253 | 254 | # install rd_tool and dependencies 255 | ENV \ 256 | RD_TOOL_DIR=/opt/rd_tool 257 | 258 | RUN \ 259 | apt-get update && \ 260 | apt-get install -y --no-install-recommends \ 261 | bc \ 262 | python3-boto3 \ 263 | python3-numpy \ 264 | python3-scipy \ 265 | python3-tornado \ 266 | ssh \ 267 | time \ 268 | && \ 269 | mkdir -p ${RD_TOOL_DIR} && \ 270 | rm -vf /etc/ssh/ssh_host_* && \ 271 | curl -sSL https://github.com/xiph/rd_tool/tarball/master | tar zxf - -C ${RD_TOOL_DIR} --strip-components=1 272 | 273 | # install meson 274 | RUN \ 275 | apt-get install -y python3 python3-pip python3-setuptools python3-wheel ninja-build && \ 276 | pip3 install meson 277 | 278 | # install dav1d and dependencies 279 | ENV \ 280 | DAV1D_DIR=/opt/dav1d 281 | 282 | RUN \ 283 | git clone https://code.videolan.org/videolan/dav1d.git ${DAV1D_DIR} && \ 284 | cd ${DAV1D_DIR} && \ 285 | mkdir build && cd build && \ 286 | meson .. --default-library static --buildtype release && \ 287 | ninja install 288 | 289 | # install VMAF 290 | ENV \ 291 | VMAF_DIR=/opt/vmaf \ 292 | VMAF_VERSION=v3.0.0 293 | 294 | RUN \ 295 | mkdir -p ${VMAF_DIR} && \ 296 | curl -sSL https://github.com/Netflix/vmaf/archive/refs/tags/${VMAF_VERSION}.tar.gz | tar zxf - -C ${VMAF_DIR} --strip-components=1 && \ 297 | cd ${VMAF_DIR}/libvmaf && \ 298 | meson build --buildtype release && \ 299 | ninja -C build && \ 300 | ninja -C build install 301 | 302 | # clear package manager cache 303 | RUN \ 304 | apt-get clean && \ 305 | rm -rf /var/lib/apt/lists 306 | 307 | # add code 308 | ADD package.json package-lock.json *.ts tsconfig.json ${APP_DIR}/ 309 | ADD www ${APP_DIR}/www 310 | 311 | # compile typescript/nodejs code 312 | RUN \ 313 | cd ${APP_DIR} && \ 314 | export PYTHON=python2.7 && \ 315 | alias python=python2.7 && \ 316 | rm -rf node_modules && \ 317 | npm install && \ 318 | npm run tsc 319 | 320 | RUN \ 321 | cd ${APP_DIR}/www && \ 322 | npm install && \ 323 | npm run build 324 | 325 | # add scripts 326 | ADD *.m *.sh *.py ${APP_DIR}/ 327 | 328 | # AOM_CTC: Install Openpyxl 329 | RUN \ 330 | pip3 install openpyxl xlrd==1.2.0 xlsxwriter matplotlib 331 | 332 | 333 | # environment variables 334 | ENV \ 335 | CONFIG_DIR=/data/conf \ 336 | CODECS_SRC_DIR=/data/src \ 337 | RUNS_DST_DIR=/data/runs \ 338 | WORK_DIR=/data/work \ 339 | MEDIAS_SRC_DIR=/data/media \ 340 | LOCAL_WORKER_ENABLED=false \ 341 | IRC_CHANNEL=none \ 342 | AWCY_API_KEY=awcy_api_key \ 343 | AWCY_SERVER_PORT=3000 \ 344 | RD_SERVER_PORT=4000 345 | 346 | # add configuration scripts 347 | ADD etc/* /etc/ 348 | ADD etc/service/awcy /etc/service/awcy 349 | ADD etc/service/job-scheduler /etc/service/job-scheduler 350 | 351 | # set entrypoint 352 | ENTRYPOINT [ "/etc/entrypoint" ] 353 | 354 | -------------------------------------------------------------------------------- /Dockerfile.worker: -------------------------------------------------------------------------------- 1 | FROM ubuntu:focal 2 | 3 | # environment variables 4 | ENV \ 5 | APP_USER=xiph \ 6 | APP_DIR=/opt/app \ 7 | LC_ALL=C.UTF-8 \ 8 | LANG=C.UTF-8 \ 9 | LANGUAGE=C.UTF-8 \ 10 | DEBIAN_FRONTEND=noninteractive 11 | 12 | # add runtime user 13 | RUN \ 14 | groupadd --gid 1000 ${APP_USER} && \ 15 | useradd --uid 1000 --gid ${APP_USER} --shell /bin/bash --create-home ${APP_USER} 16 | 17 | # install common/useful packages 18 | RUN \ 19 | ARCH=`uname -m` && \ 20 | if [ "$ARCH" = "x86_64" ]; then \ 21 | echo "deb http://archive.ubuntu.com/ubuntu/ focal main restricted universe multiverse" >/etc/apt/sources.list && \ 22 | echo "deb http://security.ubuntu.com/ubuntu focal-security main restricted universe multiverse" >>/etc/apt/sources.list && \ 23 | echo "deb http://archive.ubuntu.com/ubuntu/ focal-updates main restricted universe multiverse" >>/etc/apt/sources.list; \ 24 | echo "deb http://security.ubuntu.com/ubuntu bionic-security main" >>/etc/apt/sources.list; \ 25 | else \ 26 | echo "deb http://ports.ubuntu.com/ubuntu-ports/ focal main restricted universe multiverse" >/etc/apt/sources.list && \ 27 | echo "deb http://ports.ubuntu.com/ubuntu-ports focal-security main restricted universe multiverse" >>/etc/apt/sources.list && \ 28 | echo "deb http://ports.ubuntu.com/ubuntu-ports/ focal-updates main restricted universe multiverse" >>/etc/apt/sources.list; \ 29 | fi 30 | 31 | RUN \ 32 | apt-get update && \ 33 | apt-get install -y --no-install-recommends \ 34 | autoconf \ 35 | automake \ 36 | build-essential \ 37 | bzip2 \ 38 | ca-certificates \ 39 | check \ 40 | ctags \ 41 | curl \ 42 | file \ 43 | gettext-base \ 44 | git-core \ 45 | iproute2 \ 46 | iputils-ping \ 47 | jq \ 48 | less \ 49 | libjpeg-dev \ 50 | libogg-dev \ 51 | libpng-dev \ 52 | libtool \ 53 | locales \ 54 | netcat-openbsd \ 55 | net-tools \ 56 | openssl \ 57 | pkg-config \ 58 | procps \ 59 | psmisc \ 60 | rsync \ 61 | strace \ 62 | tcpdump \ 63 | tzdata \ 64 | unzip \ 65 | uuid \ 66 | vim \ 67 | wget \ 68 | linux-tools-$(uname -r) \ 69 | linux-tools-generic \ 70 | xz-utils && \ 71 | apt-get clean && \ 72 | rm -rf /var/lib/apt/lists 73 | 74 | # install nasm 75 | RUN \ 76 | ARCH=`uname -m` && \ 77 | if [ "$ARCH" = "x86_64" ]; then \ 78 | DIR=/tmp/nasm && \ 79 | NASM_URL=http://debian-archive.trafficmanager.net/debian/pool/main/n/nasm && \ 80 | NASM_VERSION=2.15.05-1 && \ 81 | NASM_DEB=nasm_${NASM_VERSION}_amd64.deb && \ 82 | NASM_SUM=c860caec653b865d5b83359452d97b11f1b3ba5b18b07cac554cf72550b3bfc9 && \ 83 | mkdir -p ${DIR} && \ 84 | cd ${DIR} && \ 85 | curl -O ${NASM_URL}/${NASM_DEB} && \ 86 | echo ${NASM_SUM} ${NASM_DEB} | sha256sum --check && \ 87 | dpkg -i ${NASM_DEB} && \ 88 | rm -rf ${DIR}; \ 89 | fi 90 | 91 | # prepare rust installation 92 | ENV \ 93 | RUSTUP_HOME=/usr/local/rustup \ 94 | CARGO_HOME=/usr/local/cargo \ 95 | PATH=/usr/local/cargo/bin:${PATH} 96 | 97 | # install rust 98 | RUN \ 99 | ARCH=`uname -m` && \ 100 | RUST_VERSION=1.64.0 && \ 101 | curl -sSf --output /tmp/rustup-init https://static.rust-lang.org/rustup/archive/1.25.0/${ARCH}-unknown-linux-gnu/rustup-init && \ 102 | chmod +x /tmp/rustup-init && \ 103 | /tmp/rustup-init -y --no-modify-path --default-toolchain ${RUST_VERSION} && \ 104 | rm -vf /tmp/rustup-init 105 | 106 | # install daalatool 107 | ENV \ 108 | DAALATOOL_DIR=/opt/daalatool 109 | 110 | RUN \ 111 | mkdir -p $(dirname ${DAALATOOL_DIR}) && \ 112 | git clone https://gitlab.xiph.org/xiph/daala.git ${DAALATOOL_DIR} && \ 113 | cd ${DAALATOOL_DIR} && \ 114 | ./autogen.sh && \ 115 | ./configure --disable-player && \ 116 | make tools -j4 117 | 118 | # install ciede2000 119 | ENV \ 120 | CIEDE2000_DIR=/opt/dump_ciede2000 121 | 122 | RUN \ 123 | mkdir -p $(dirname ${CIEDE2000_DIR}) && \ 124 | git clone https://github.com/KyleSiefring/dump_ciede2000.git ${CIEDE2000_DIR} && \ 125 | cd ${CIEDE2000_DIR} && \ 126 | cargo build --release 127 | 128 | # install hdrtools 129 | ENV \ 130 | HDRTOOLS_DIR=/opt/hdrtools \ 131 | HDRTOOLS_VERSION=0.22 132 | 133 | RUN \ 134 | ARCH=`uname -m` && \ 135 | mkdir -p ${HDRTOOLS_DIR} && \ 136 | curl -sSfL --output HDRTools.tar.bz2 https://gitlab.com/standards/HDRTools/-/archive/v${HDRTOOLS_VERSION}/HDRTools-v${HDRTOOLS_VERSION}.tar.bz2 && \ 137 | tar -xvf HDRTools.tar.bz2 --strip-components=1 -C ${HDRTOOLS_DIR} && \ 138 | cd ${HDRTOOLS_DIR} && \ 139 | sed -i 's/std::modff/modff/g' common/src/OutputY4M.cpp && \ 140 | sed -i 's/using ::hdrtoolslib::Y_COMP;//g' projects/HDRConvScaler/src/HDRConvScalerYUV.cpp && \ 141 | sed -i 's/\[Y_COMP\]/\[hdrtoolslib::Y_COMP\]/g' projects/HDRConvScaler/src/HDRConvScalerYUV.cpp && \ 142 | if [ "$ARCH" = "aarch64" ]; then \ 143 | # temporary patches until ARM support is upstream 144 | sed -i 's/-msse2//g' common/Makefile projects/*/Makefile; \ 145 | sed -i 's/-mfpmath=sse//g' common/Makefile projects/*/Makefile; \ 146 | sed -i 's/#include //g' common/src/ResizeBiCubic.cpp common/src/DistortionMetricVQM.cpp; \ 147 | sed -i 's/#include //g' common/src/DistortionMetricVQM.cpp; \ 148 | sed -i 's/#if defined(ENABLE_SSE_OPT)/#if ENABLE_SSE_OPT/g' common/src/ResizeBiCubic.cpp; \ 149 | fi && \ 150 | make # -j is broken 151 | 152 | # install rd_tool dependencies 153 | RUN \ 154 | apt-get update && \ 155 | apt-get install -y --no-install-recommends \ 156 | bc \ 157 | python3-numpy \ 158 | python3-scipy \ 159 | python3-pip \ 160 | python3-setuptools \ 161 | python3-wheel \ 162 | ninja-build \ 163 | ssh \ 164 | time \ 165 | && \ 166 | rm -vf /etc/ssh/ssh_host_* 167 | 168 | # install dav1d and dependencies 169 | ENV \ 170 | DAV1D_DIR=/opt/dav1d 171 | 172 | RUN \ 173 | pip3 install meson && \ 174 | git clone https://code.videolan.org/videolan/dav1d.git ${DAV1D_DIR} && \ 175 | cd ${DAV1D_DIR} && \ 176 | mkdir build && cd build && \ 177 | meson .. --default-library static --buildtype release && \ 178 | ninja install 179 | 180 | # install VMAF 181 | ENV \ 182 | VMAF_DIR=/opt/vmaf \ 183 | VMAF_VERSION=v3.0.0 184 | 185 | RUN \ 186 | mkdir -p ${VMAF_DIR} && \ 187 | curl -sSL https://github.com/Netflix/vmaf/archive/refs/tags/${VMAF_VERSION}.tar.gz | tar zxf - -C ${VMAF_DIR} --strip-components=1 && \ 188 | cd ${VMAF_DIR}/libvmaf && \ 189 | meson build --buildtype release && \ 190 | ninja -C build && \ 191 | ninja -C build install 192 | 193 | # Intall FFmpeg 6.0 with reduced VMAF 194 | # Custom FFmpeg 6.0 Static autobuild with GPL license 195 | # Reduced VMAF = LIBVMAF - {CIEDE2000, MS-SSIM, CAMBI, PSNR_HVS} 196 | RUN \ 197 | wget https://github.com/vibhoothi/FFmpeg-Builds/releases/download/latest/ffmpeg-n6.0-latest-linux64-gpl-6.0.tar.xz && \ 198 | tar -xf ffmpeg-n6.0-latest-linux64-gpl-6.0.tar.xz && \ 199 | cp ffmpeg-n6.0-latest-linux64-gpl-6.0/bin/ffmpeg /usr/local/bin/ffmpeg 200 | 201 | # clear package manager cache 202 | RUN \ 203 | apt-get clean && \ 204 | rm -rf /var/lib/apt/lists 205 | 206 | # set working directory 207 | WORKDIR /home/${APP_USER} 208 | 209 | # environment variables 210 | ENV \ 211 | WORK_DIR=/data/work 212 | 213 | # create symbolic links 214 | RUN \ 215 | mkdir /home/${APP_USER}/awcy_temp && \ 216 | chown -R ${APP_USER}:${APP_USER} /home/${APP_USER}/awcy_temp && \ 217 | ln -s /opt/daalatool /home/${APP_USER}/awcy_temp/daalatool && \ 218 | ln -s /opt/vmaf /home/${APP_USER}/awcy_temp/vmaf && \ 219 | ln -s /opt/dump_ciede2000 /home/${APP_USER}/awcy_temp/dump_ciede2000 && \ 220 | ln -s /opt/dav1d /home/${APP_USER}/awcy_temp/dav1d 221 | 222 | # set entrypoint 223 | ADD etc/entrypoint.worker /etc/entrypoint.worker 224 | ENTRYPOINT [ "/etc/entrypoint.worker" ] 225 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2014 Thomas Daede 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /bd_rate_jm.m: -------------------------------------------------------------------------------- 1 | #!/usr/bin/octave --quiet 2 | a = load('-ascii', argv(){1}); 3 | b = load('-ascii', argv(){2}); 4 | rates = [0.005 0.02 0.06 0.2]; 5 | ra = a(:,3)*8./a(:,2); 6 | rb = b(:,3)*8./b(:,2); 7 | interp_type = 'spline'; 8 | met_name = {' PSNR', ' PSNRHVS', ' SSIM', 'FASTSSIM'}; 9 | printf(" LOW (%%) MEDIUM (%%) HIGH (%%)\n"); 10 | for m=1:4 11 | ya = a(:,3+m); 12 | yb = b(:,3+m); 13 | p = interp1(ra, ya, rates, interp_type); 14 | for k=1:length(rates)-1 15 | a_rate = interp1(ya, log(ra), p(k):.01:p(k+1), interp_type); 16 | b_rate = interp1(yb, log(rb), p(k):.01:p(k+1), interp_type); 17 | if !length(a_rate) || !length(b_rate) 18 | bdr(m,k) = NaN(); 19 | else 20 | bdr(m,k)=100 * (exp(mean(b_rate-a_rate))-1); 21 | end 22 | end 23 | printf("%s %f %f %f\n", met_name{m}, bdr(m, 1), bdr(m, 2), bdr(m, 3)); 24 | end 25 | -------------------------------------------------------------------------------- /bd_rate_jm.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import sys 4 | 5 | from numpy import * 6 | from scipy import * 7 | from scipy.interpolate import interp1d 8 | 9 | a = genfromtxt(sys.argv[1]) 10 | b = genfromtxt(sys.argv[2]) 11 | rates = [0.005, 0.02, 0.06, 0.2] 12 | ra = a[:, 2] * 8.0 / a[:, 1] 13 | rb = b[:, 2] * 8.0 / b[:, 1] 14 | interp_type = "cubic" 15 | met_name = [" PSNR", " PSNRHVS", " SSIM", "FASTSSIM"] 16 | print(" LOW (%%)\tMEDIUM (%%)\tHIGH (%%)") 17 | bdr = zeros((4, 4)) 18 | for m in range(0, 4): 19 | ya = a[:, 3 + m] 20 | yb = b[:, 3 + m] 21 | for k in range(0, len(rates) - 1): 22 | try: 23 | p0 = interp1d(ra, ya, interp_type)(rates[k]) 24 | p1 = interp1d(ra, ya, interp_type)(rates[k + 1]) 25 | except ValueError: 26 | bdr[m, k] = NaN 27 | continue 28 | a_rate = interp1d(ya, log(ra), interp_type)(arange(p0, p1, 0.01)) 29 | b_rate = interp1d(yb, log(rb), interp_type)(arange(p0, p1, 0.01)) 30 | if not len(a_rate) or not len(b_rate): 31 | bdr[m, k] = NaN 32 | else: 33 | bdr[m, k] = 100 * (exp(mean(b_rate - a_rate)) - 1) 34 | print("%s\t%4f%%\t%4f%%\t%4f%%" % (met_name[m], bdr[m, 0], bdr[m, 1], bdr[m, 2])) 35 | -------------------------------------------------------------------------------- /build_av1_analyzer.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # exit on failure 4 | set -e 5 | 6 | # exit on unassigned variable 7 | set -u 8 | 9 | echo "Building Analyzer" 10 | 11 | # add emscripten tools to PATH 12 | export PATH=${PATH}:$(em-config EMSCRIPTEN_ROOT):$(em-config LLVM_ROOT) 13 | 14 | cd ${CODECS_SRC_DIR}/${CODEC} 15 | 16 | rm -rf asm/ 17 | mkdir -p asm 18 | pushd asm 19 | 20 | if [[ "${BUILD_OPTIONS}" == *"--enable"* ]]; then 21 | #legacy configure 22 | popd 23 | make distclean || true 24 | pushd asm 25 | emconfigure ../configure --disable-multithread --disable-runtime-cpu-detect --target=generic-gnu --enable-accounting --enable-inspection --disable-docs --disable-webm-io --extra-cflags="-D_POSIX_SOURCE" ${BUILD_OPTIONS} 26 | emmake make -j$(nproc) 27 | cp examples/inspect inspect.bc 28 | emcc -O3 inspect.bc -o inspect.js -s TOTAL_MEMORY=402653184 -s MODULARIZE=1 -s EXPORT_NAME="'DecoderModule'" --post-js "../tools/inspect-post.js" --memory-init-file 0 29 | popd 30 | cp asm/inspect.js ./aomanalyzer.js 31 | else 32 | cmake ../ -DAOM_TARGET_CPU=generic -DCONFIG_MULTITHREAD=0 -DCONFIG_RUNTIME_CPU_DETECT=0 -DCONFIG_ACCOUNTING=1 -DCONFIG_INSPECTION=1 -DENABLE_DOCS=0 -DCONFIG_WEBM_IO=0 -DENABLE_TESTS=0 -DCMAKE_TOOLCHAIN_FILE=$(em-config EMSCRIPTEN_ROOT)/cmake/Modules/Platform/Emscripten.cmake ${BUILD_OPTIONS} 33 | emmake make -j$(nproc) 34 | popd 35 | cp asm/examples/inspect.js ./aomanalyzer.js 36 | fi 37 | -------------------------------------------------------------------------------- /build_codec.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # exit on failure 4 | set -e 5 | 6 | # exit on unassigned variable 7 | set -u 8 | 9 | echo "Building codec '${CODEC}' (using BUILD_OPTIONS '${BUILD_OPTIONS}')" 10 | 11 | case "${CODEC}" in 12 | daala) 13 | cd ${CODECS_SRC_DIR}/${CODEC} 14 | gcc -print-prog-name=cc1 15 | gcc -print-search-dirs 16 | ./autogen.sh 17 | ./configure --enable-static --disable-shared --disable-player --disable-dump-images --enable-logging --enable-dump-recons ${BUILD_OPTIONS} 18 | make -j$(nproc) 19 | ;; 20 | 21 | thor | thor-rt) 22 | cd ${CODECS_SRC_DIR}/${CODEC} 23 | make 24 | ;; 25 | 26 | x264) 27 | cd ${CODECS_SRC_DIR}/x264 28 | ./configure ${BUILD_OPTIONS} --enable-pic 29 | make 30 | ;; 31 | 32 | x265 | x265-rt) 33 | cd ${CODECS_SRC_DIR}/x265/build/linux 34 | cmake -D ENABLE_SHARED=no ${BUILD_OPTIONS} ../../source/ 35 | make 36 | ;; 37 | 38 | xvc) 39 | mkdir -p xvc/build 40 | pushd xvc/build 41 | cmake -DCMAKE_BUILD_TYPE=Release -DBUILD_TESTS=OFF -DENABLE_ASSERTIONS=OFF $BUILD_OPTIONS .. 42 | make -j4 43 | popd 44 | ;; 45 | 46 | xvc) 47 | mkdir -p xvc/build 48 | pushd xvc/build 49 | cmake -DCMAKE_BUILD_TYPE=Release -DBUILD_TESTS=OFF -DENABLE_ASSERTIONS=OFF $BUILD_OPTIONS .. 50 | make -j4 51 | popd 52 | ;; 53 | 54 | vp10 | vp10-rt) 55 | cd ${CODECS_SRC_DIR}/${CODEC} 56 | ./configure --enable-vp10 ${BUILD_OPTIONS} 57 | make 58 | ;; 59 | 60 | av1 | av1-rt) 61 | cd ${CODECS_SRC_DIR}/${CODEC} 62 | echo "-- Starting x86_64 Build --" 63 | if [[ "${BUILD_OPTIONS}" == *"--enable"* ]]; then 64 | # legacy configure build 65 | ./configure --enable-av1 --enable-debug --disable-unit-tests --disable-docs ${BUILD_OPTIONS} 66 | make -j$(nproc) 67 | mkdir -p x86_64 68 | mv aomenc aomdec x86_64/ 69 | else 70 | rm -rf cmake-build || true 71 | mkdir cmake-build 72 | pushd cmake-build 73 | cmake ../ -DENABLE_TESTS=0 -DENABLE_DOCS=0 -DCMAKE_BUILD_TYPE=Release -DAOM_EXTRA_C_FLAGS=-UNDEBUG -DAOM_EXTRA_CXX_FLAGS=-UNDEBUG ${BUILD_OPTIONS} 74 | make -j$(nproc) 75 | popd 76 | mkdir -p x86_64 77 | mv cmake-build/aomenc cmake-build/aomdec x86_64/ 78 | fi 79 | 80 | echo "-- Finished x86_64 Build --" 81 | echo "-- Starting Analyzer Build --" 82 | ${APP_DIR}/build_av1_analyzer.sh || true 83 | echo "-- Finished Analyzer Build --" 84 | echo "Note: Analyzer errors will not prevent the run from completing." 85 | mv x86_64/* ./ 86 | ;; 87 | 88 | av2* | avm*) 89 | cd ${CODECS_SRC_DIR}/av2 90 | echo "-- Starting x86_64 Build --" 91 | rm -rf cmake-build || true 92 | mkdir cmake-build 93 | pushd cmake-build 94 | if [[ "${ARCH}" == "aarch64" ]]; then 95 | ARCH_OPTIONS="-DCMAKE_TOOLCHAIN_FILE=../build/cmake/toolchains/arm64-linux-gcc.cmake" 96 | else 97 | ARCH_OPTIONS="" 98 | fi 99 | cmake ../ -DENABLE_TESTS=0 -DENABLE_DOCS=0 -DCMAKE_BUILD_TYPE=Release ${ARCH_OPTIONS} ${BUILD_OPTIONS} 100 | make -j$(nproc) 101 | popd 102 | mkdir -p "$ARCH" 103 | mv cmake-build/aomenc cmake-build/aomdec ./ 104 | echo "-- Finished $ARCH Build --" 105 | ;; 106 | 107 | 108 | vp8 | vp8-rt) 109 | cd ${CODECS_SRC_DIR}/vp8 110 | CONFIG_DIR=${CODECS_SRC_DIR}/vp8/ ./configure --enable-vp8 --disable-vp9 ${BUILD_OPTIONS} 111 | CONFIG_DIR=${CODECS_SRC_DIR}/vp8/ make 112 | ;; 113 | 114 | vp9 | vp9-rt) 115 | cd ${CODECS_SRC_DIR}/vp9 116 | CONFIG_DIR=${CODECS_SRC_DIR}/vp9/ ./configure --enable-vp9 --disable-vp8 ${BUILD_OPTIONS} 117 | CONFIG_DIR=${CODECS_SRC_DIR}/vp9/ make 118 | ;; 119 | 120 | rav1e) 121 | cd ${CODECS_SRC_DIR}/rav1e 122 | cargo build --release ${BUILD_OPTIONS} 123 | ;; 124 | 125 | svt-av1*) 126 | cd ${CODECS_SRC_DIR}/svt-av1 127 | cd Build/linux 128 | ./build.sh --cc=gcc --cxx=g++ --release --static 129 | ;; 130 | 131 | vvc-vtm*) 132 | cd ${CODECS_SRC_DIR}/vvc-vtm 133 | mkdir build 134 | pushd build 135 | cmake .. -DCMAKE_BUILD_TYPE=Release 136 | make -j 8 137 | ;; 138 | *) 139 | echo "Unknown codec '${CODEC}'" >&2 140 | exit 1 141 | esac 142 | -------------------------------------------------------------------------------- /convexhull_framework/bin/HDRConvScalerY4MFile.cfg: -------------------------------------------------------------------------------- 1 | # HDRConvert default configuration file 2 | # format: parameter=value or parameter="stringvalue", no SPACES! 3 | 4 | ############################################################################### 5 | # 6 | # Input/output file parameters 7 | # 8 | ############################################################################### 9 | ScaleOnly=1 10 | ScalingMode=12 11 | 12 | NumberOfFrames=1 # number of frames to process 13 | SourceFile="D:\YUVs\AV2-CTC\a1_4k\Crosswalk_3840x2160_5994fps_10bit_420.y4m" 14 | OutputFile="Crosswalk_1920x1080_5994fps_10bit_420.y4m" # Scaled YUV file 15 | 16 | SourceWidth=3840 # input frame height 17 | SourceHeight=2160 # input frame height 18 | OutputWidth=1920 # input frame height 19 | OutputHeight=1080 # input frame height 20 | SourceRate=0 # input frame rate 21 | OutputRate=0 # output frame rate 22 | 23 | SourceInterleaved=0 # Interleaved or Planar data 24 | SourceChromaFormat=1 # Input chroma format 25 | # 0 : 400 26 | # 1 : 420 27 | # 2 : 422 28 | # 3 : 444 29 | SourceBitDepthCmp0=10 # Input bit depth of luma component 30 | SourceBitDepthCmp1=10 # Input bit depth of u/cb component 31 | SourceBitDepthCmp2=10 # Input bit depth of v/cr component 32 | SourceFourCCCode=6 # FourCC code for input source 33 | # 0: UYVY 34 | # 1: YUY2 35 | # 2: YUYV 36 | # 3: YVYU 37 | # 4: BGR 38 | # 5: V210 39 | # 6: Y444I 40 | SourceColorSpace=0 # 0: CM_YUV 41 | # 1: CM_RGB 42 | # 2: CM_XYZ 43 | 44 | SourceColorPrimaries=0 # 0: BT.709 45 | # 1: BT.2020 46 | # 2: P3D60 47 | # 3: P3D65 48 | # 4: None 49 | SourceTransferFunction=12 50 | OutputChromaFormat=1 # Output Chroma format 51 | # 0 : 400 52 | # 1 : 420 53 | # 2 : 422 54 | # 3 : 444 55 | 56 | OutputBitDepthCmp0=10 # bit depth of luma component for distortion computation 57 | OutputBitDepthCmp1=10 # bit depth of u/cb component for distortion computation 58 | OutputBitDepthCmp2=10 # bit depth of v/cr component for distortion computation 59 | OutputColorSpace=0 # 0: CM_YUV 60 | # 1: CM_RGB 61 | # 2: CM_XYZ 62 | 63 | OutputColorPrimaries=0 # 0: BT.709 64 | # 1: BT.2020 65 | # 2: P3D60 66 | # 3: P3D65 67 | # 4: None 68 | 69 | InputFileHeader=0 # Input File header to be skipped (in bytes). 70 | StartFrame=0 # Number of frames to skip before start 71 | 72 | SilentMode=1 # Enable Silent mode 73 | 74 | OutputTransferFunction=12 # Transfer Function 75 | # 0: NULL (no new TF applied) 76 | # 1: PQ 77 | # 2: PH 78 | -------------------------------------------------------------------------------- /convexhull_framework/bin/HDRConvert: -------------------------------------------------------------------------------- 1 | /home/thomas/sandbox/HDRTools/bin/HDRConvert -------------------------------------------------------------------------------- /convexhull_framework/bin/Research-v1.0.4-anchor.xlsm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xiph/awcy/89efab681545dcd8f47665a283bea10bb3ca4af3/convexhull_framework/bin/Research-v1.0.4-anchor.xlsm -------------------------------------------------------------------------------- /convexhull_framework/bin/aomdec: -------------------------------------------------------------------------------- 1 | /home/thomas/sandbox/aom/aom_build/aomdec -------------------------------------------------------------------------------- /convexhull_framework/bin/aomenc: -------------------------------------------------------------------------------- 1 | /home/thomas/sandbox/aom/aom_build/aomenc -------------------------------------------------------------------------------- /convexhull_framework/bin/vbaProject-AV2.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xiph/awcy/89efab681545dcd8f47665a283bea10bb3ca4af3/convexhull_framework/bin/vbaProject-AV2.bin -------------------------------------------------------------------------------- /convexhull_framework/bin/vmaf: -------------------------------------------------------------------------------- 1 | /home/thomas/sandbox/vmaf/libvmaf/build/tools/vmaf -------------------------------------------------------------------------------- /convexhull_framework/bin/vmaf_v0.6.1.pkl: -------------------------------------------------------------------------------- 1 | (dp0 2 | S'param_dict' 3 | p1 4 | (dp2 5 | S'C' 6 | p3 7 | F4.0 8 | sS'score_transform' 9 | p4 10 | (dp5 11 | S'p2' 12 | p6 13 | F-0.00705305 14 | sS'out_gte_in' 15 | p7 16 | S'true' 17 | p8 18 | sS'p0' 19 | p9 20 | F1.70674692 21 | sS'p1' 22 | p10 23 | F1.72643844 24 | ssS'norm_type' 25 | p11 26 | S'clip_0to1' 27 | p12 28 | sS'score_clip' 29 | p13 30 | (lp14 31 | F0.0 32 | aF100.0 33 | asS'nu' 34 | p15 35 | F0.9 36 | sS'gamma' 37 | p16 38 | F0.04 39 | ssS'model_dict' 40 | p17 41 | (dp18 42 | S'model' 43 | p19 44 | Nsg4 45 | g5 46 | sg11 47 | S'linear_rescale' 48 | p20 49 | sg13 50 | g14 51 | sS'feature_names' 52 | p21 53 | (lp22 54 | S'VMAF_feature_adm2_score' 55 | p23 56 | aS'VMAF_feature_motion2_score' 57 | p24 58 | aS'VMAF_feature_vif_scale0_score' 59 | p25 60 | aS'VMAF_feature_vif_scale1_score' 61 | p26 62 | aS'VMAF_feature_vif_scale2_score' 63 | p27 64 | aS'VMAF_feature_vif_scale3_score' 65 | p28 66 | asS'intercepts' 67 | p29 68 | (lp30 69 | F-0.3092981927591963 70 | aF-1.7993968597186747 71 | aF-0.003017198086831897 72 | aF-0.1728125095425364 73 | aF-0.5294309090081222 74 | aF-0.7577185792093722 75 | aF-1.083428597549764 76 | asS'model_type' 77 | p31 78 | S'LIBSVMNUSVR' 79 | p32 80 | sS'slopes' 81 | p33 82 | (lp34 83 | F0.012020766332648465 84 | aF2.8098077502505414 85 | aF0.06264407466686016 86 | aF1.222763456258933 87 | aF1.5360318811084146 88 | aF1.7620864995501058 89 | aF2.08656468286432 90 | asS'feature_dict' 91 | p35 92 | (dp36 93 | S'VMAF_feature' 94 | p37 95 | (lp38 96 | S'vif_scale0' 97 | p39 98 | aS'vif_scale1' 99 | p40 100 | aS'vif_scale2' 101 | p41 102 | aS'vif_scale3' 103 | p42 104 | aS'adm2' 105 | p43 106 | aS'motion2' 107 | p44 108 | asss. -------------------------------------------------------------------------------- /convexhull_framework/src/AWCYConvexHullTest.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | ## Copyright (c) 2019, Alliance for Open Media. All rights reserved 3 | ## 4 | ## This source code is subject to the terms of the BSD 2 Clause License and 5 | ## the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License 6 | ## was not distributed with this source code in the LICENSE file, you can 7 | ## obtain it at www.aomedia.org/license/software. If the Alliance for Open 8 | ## Media Patent License 1.0 was not distributed with this source code in the 9 | ## PATENTS file, you can obtain it at www.aomedia.org/license/patent. 10 | ## 11 | __author__ = "maggie.sun@intel.com, ryan.lei@intel.com, td@videolan.org" 12 | 13 | import os 14 | import sys 15 | import xlsxwriter 16 | import argparse 17 | import numpy as np 18 | import scipy.interpolate 19 | 20 | from EncDecUpscale import Run_EncDec_Upscale, GetBsReconFileName 21 | from VideoScaler import GetDownScaledOutFile, DownScaling 22 | from CalculateQualityMetrics import CalculateQualityMetric, GatherQualityMetrics 23 | from Utils import GetShortContentName, CreateChart_Scatter,\ 24 | AddSeriesToChart_Scatter, InsertChartsToSheet, CreateNewSubfolder,\ 25 | SetupLogging, UpdateChart, AddSeriesToChart_Scatter_Rows,\ 26 | Cleanfolder, CreateClipList, Clip, GatherPerfInfo, GetEncLogFile, \ 27 | GetRDResultCsvFile, GatherPerframeStat 28 | from PostAnalysis_Summary import GenerateSumRDExcelFile,\ 29 | GenerateSumCvxHullExcelFile 30 | from ScalingTest import Run_Scaling_Test, SaveScalingResultsToExcel 31 | import Utils 32 | from operator import itemgetter 33 | from Config import LogLevels, FrameNum, QPs, CvxH_WtCols,\ 34 | CvxH_WtRows, QualityList, LineColors, SummaryOutPath, WorkPath, \ 35 | Path_RDResults, DnScalingAlgos, UpScalingAlgos, ConvexHullColor, \ 36 | EncodeMethods, CodecNames, LoggerName, DnScaleRatio, TargetQtyMetrics, \ 37 | CvxHDataRows, CvxHDataStartRow, CvxHDataStartCol, CvxHDataNum, \ 38 | Int_ConvexHullColor, EnablePreInterpolation 39 | import ConvexHullTest 40 | from ConvexHullTest import SaveConvexHullResultsToExcel 41 | 42 | if __name__ == "__main__": 43 | global Function, KeepUpscaledOutput, SaveMemory, LogLevel, CodecName,\ 44 | EncodeMethod, EncodePreset, LogCmdOnly 45 | parser = argparse.ArgumentParser(description='Produce convex hull bd-rate report') 46 | parser.add_argument('run',nargs=1,help='Run folders to compare') 47 | parser.add_argument('-l', "--LoggingLevel", dest='LogLevel', type=int, 48 | default=3, choices=range(len(LogLevels)), metavar='', 49 | help="logging level: 0:No Logging, 1: Critical, 2: Error," 50 | " 3: Warning, 4: Info, 5: Debug") 51 | args = parser.parse_args() 52 | LogCmdOnly = False 53 | Path_TestLog = args.run[0] 54 | 55 | SetupLogging(args.LogLevel, LogCmdOnly, LoggerName, Path_TestLog) 56 | clip_list = CreateClipList('AS') 57 | EncodeMethod = 'aom' 58 | ConvexHullTest.EncodeMethod = EncodeMethod 59 | CodecName = 'av1' 60 | ConvexHullTest.CodecName = CodecName 61 | test_cfg = 'AS' 62 | EncodePreset = 0 63 | ConvexHullTest.EncodePreset = EncodePreset 64 | ConvexHullTest.Path_Bitstreams = os.path.join(args.run[0], 'aomctc-a1-4k-as') 65 | ConvexHullTest.Path_QualityLog = ConvexHullTest.Path_Bitstreams 66 | ConvexHullTest.Path_PerfLog = ConvexHullTest.Path_Bitstreams 67 | ConvexHullTest.Path_RDResults = ConvexHullTest.Path_Bitstreams 68 | 69 | filename = "RDResults_%s_%s_%s_Preset_%s.csv" % \ 70 | (EncodeMethod, CodecName, test_cfg, EncodePreset) 71 | csv_file = os.path.join(args.run[0], filename) 72 | filename = "Perframe_RDResults_%s_%s_%s_Preset_%s.csv" % \ 73 | (EncodeMethod, CodecName, test_cfg, EncodePreset) 74 | perframe_csvfile = os.path.join(args.run[0], filename) 75 | 76 | csv = open(csv_file, "wt") 77 | csv.write("TestCfg,EncodeMethod,CodecName,EncodePreset,Class,Res,Name,FPS," \ 78 | "Bit Depth,QP,Bitrate(kbps)") 79 | for qty in QualityList: 80 | csv.write(',' + qty) 81 | csv.write(",EncT[s],DecT[s],EncT[h]\n") 82 | 83 | perframe_csv = open(perframe_csvfile, 'wt') 84 | perframe_csv.write("TestCfg,EncodeMethod,CodecName,EncodePreset,Class,Res,Name,FPS," \ 85 | "Bit Depth,QP,POC,FrameType,qindex,FrameSize") 86 | for qty in QualityList: 87 | if (qty != "Overall_PSNR" and qty != "Overall_APSNR" and not qty.startswith("APSNR")): 88 | perframe_csv.write(',' + qty) 89 | perframe_csv.write('\n') 90 | 91 | for clip in clip_list: 92 | SaveConvexHullResultsToExcel(clip, DnScalingAlgos, UpScalingAlgos, csv, perframe_csv, 93 | EnablePreInterpolation) 94 | csv.close() 95 | perframe_csv.close() 96 | -------------------------------------------------------------------------------- /convexhull_framework/src/CalcBDRate.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | ## Copyright (c) 2019, Alliance for Open Media. All rights reserved 3 | ## 4 | ## This source code is subject to the terms of the BSD 2 Clause License and 5 | ## the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License 6 | ## was not distributed with this source code in the LICENSE file, you can 7 | ## obtain it at www.aomedia.org/license/software. If the Alliance for Open 8 | ## Media Patent License 1.0 was not distributed with this source code in the 9 | ## PATENTS file, you can obtain it at www.aomedia.org/license/patent. 10 | ## 11 | __author__ = "maggie.sun@intel.com, ryan.lei@intel.com" 12 | 13 | import numpy as np 14 | import math 15 | import scipy.interpolate 16 | import logging 17 | from Config import LoggerName 18 | from operator import itemgetter 19 | from Utils import plot_rd_curve 20 | 21 | subloggername = "CalcBDRate" 22 | loggername = LoggerName + '.' + '%s' % subloggername 23 | logger = logging.getLogger(loggername) 24 | 25 | def non_decreasing(L): 26 | return all(x<=y for x, y in zip(L, L[1:])) 27 | 28 | def check_monotonicity(RDPoints): 29 | ''' 30 | check if the input list of RD points are monotonic, assuming the input 31 | has been sorted in the quality value non-decreasing order. expect the bit 32 | rate should also be in the non-decreasing order 33 | ''' 34 | br = [RDPoints[i][0] for i in range(len(RDPoints))] 35 | qty = [RDPoints[i][1] for i in range(len(RDPoints))] 36 | return non_decreasing(br) and non_decreasing(qty) 37 | 38 | def filter_vmaf_non_monotonic(br_qty_pairs): 39 | ''' 40 | To solve the problem with VMAF non-monotonicity in a flat (saturated) 41 | region of the curve, if VMAF non-monotonicity happens at VMAF value 42 | 99.5 or above, the non-monotonic value and the values corresponding 43 | to bitrates higher than the non-monotonic value are excluded from the 44 | BD-rate calculation. The VMAF BD-rate number is still reported and 45 | used in the VMAF metric average. 46 | ''' 47 | #first sort input RD pairs by bit rate 48 | out_br_qty_pairs = [] 49 | br_qty_pairs.sort(key = itemgetter(0, 1)) 50 | for i in range(len(br_qty_pairs)): 51 | if (i != 0 and 52 | br_qty_pairs[i][0] >= out_br_qty_pairs[-1][0] and 53 | br_qty_pairs[i][1] < out_br_qty_pairs[-1][1] and 54 | out_br_qty_pairs[-1][1] >= 99.5): 55 | break 56 | else: 57 | out_br_qty_pairs.append(br_qty_pairs[i]) 58 | return out_br_qty_pairs 59 | 60 | # BJONTEGAARD Bjontegaard metric 61 | # Calculation is adapted from Google implementation 62 | # PCHIP method - Piecewise Cubic Hermite Interpolating Polynomial interpolation 63 | def BD_RATE(qty_type, br1, qtyMtrc1, br2, qtyMtrc2): 64 | brqtypairs1 = []; brqtypairs2 = [] 65 | for i in range(min(len(qtyMtrc1), len(br1))): 66 | if (br1[i] != '' and qtyMtrc1[i] != ''): 67 | brqtypairs1.append((br1[i], qtyMtrc1[i])) 68 | for i in range(min(len(qtyMtrc2), len(br2))): 69 | if (br2[i] != '' and qtyMtrc2[i] != ''): 70 | brqtypairs2.append((br2[i], qtyMtrc2[i])) 71 | 72 | if (qty_type == 'VMAF_Y' or qty_type == 'VMAF_Y-NEG'): 73 | brqtypairs1 = filter_vmaf_non_monotonic(brqtypairs1) 74 | brqtypairs2 = filter_vmaf_non_monotonic(brqtypairs2) 75 | 76 | # sort the pair based on quality metric values in increasing order 77 | # if quality metric values are the same, then sort the bit rate in increasing order 78 | brqtypairs1.sort(key = itemgetter(1, 0)) 79 | brqtypairs2.sort(key = itemgetter(1, 0)) 80 | 81 | rd1_monotonic = check_monotonicity(brqtypairs1) 82 | rd2_monotonic = check_monotonicity(brqtypairs2) 83 | if (rd1_monotonic == False or rd2_monotonic == False): 84 | return "Non-monotonic Error" 85 | 86 | logbr1 = [math.log(x[0]) for x in brqtypairs1] 87 | qmetrics1 = [100.0 if x[1] == float('inf') else x[1] for x in brqtypairs1] 88 | logbr2 = [math.log(x[0]) for x in brqtypairs2] 89 | qmetrics2 = [100.0 if x[1] == float('inf') else x[1] for x in brqtypairs2] 90 | 91 | if not brqtypairs1 or not brqtypairs2: 92 | logger.info("one of input lists is empty!") 93 | return 0.0 94 | 95 | # remove duplicated quality metric value, the RD point with higher bit rate is removed 96 | dup_idx = [i for i in range(1, len(qmetrics1)) if qmetrics1[i - 1] == qmetrics1[i]] 97 | for idx in sorted(dup_idx, reverse=True): 98 | del qmetrics1[idx] 99 | del logbr1[idx] 100 | dup_idx = [i for i in range(1, len(qmetrics2)) if qmetrics2[i - 1] == qmetrics2[i]] 101 | for idx in sorted(dup_idx, reverse=True): 102 | del qmetrics2[idx] 103 | del logbr2[idx] 104 | 105 | # find max and min of quality metrics 106 | min_int = max(min(qmetrics1), min(qmetrics2)) 107 | max_int = min(max(qmetrics1), max(qmetrics2)) 108 | if min_int >= max_int: 109 | logger.info("no overlap from input 2 lists of quality metrics!") 110 | return 0.0 111 | 112 | # generate samples between max and min of quality metrics 113 | lin = np.linspace(min_int, max_int, num=100, retstep=True) 114 | interval = lin[1] 115 | samples = lin[0] 116 | 117 | # interpolation 118 | v1 = scipy.interpolate.pchip_interpolate(qmetrics1, logbr1, samples) 119 | v2 = scipy.interpolate.pchip_interpolate(qmetrics2, logbr2, samples) 120 | 121 | # Calculate the integral using the trapezoid method on the samples. 122 | int1 = np.trapz(v1, dx=interval) 123 | int2 = np.trapz(v2, dx=interval) 124 | 125 | # find avg diff 126 | avg_exp_diff = (int2 - int1) / (max_int - min_int) 127 | avg_diff = (math.exp(avg_exp_diff) - 1) * 100 128 | 129 | return avg_diff 130 | 131 | ''' 132 | if __name__ == "__main__": 133 | br1 = [9563.04, 6923.28, 4894.8, 3304.32, 2108.4, 1299.84] 134 | #qty1 = [50.0198, 46.9709, 43.4791, 39.6659, 35.8063, 32.3055] 135 | #qty1 = [50.0198, 46.9709, 43.4791, 48.0000, 35.8063, 32.3055] 136 | qty1 = [99.8198, 99.7709, 98.4791, 99.5000, 98.8063, 98.3055] 137 | br2 = [9758.88, 7111.68, 5073.36, 3446.4, 2178, 1306.56] 138 | #qty2 = [49.6767, 46.7027, 43.2038, 39.297, 35.2944, 31.5938] 139 | qty2 = [99.8767, 99.7027, 99.2038, 99.200, 98.2944, 97.5938] 140 | qty_type = 'VMAF-Y' 141 | 142 | plot_rd_curve(br1, qty1, qty_type, 'r', '-', 'o') 143 | plot_rd_curve(br2, qty2, qty_type, 'b', '-', '*') 144 | plt.show() 145 | 146 | bdrate = BD_RATE('VMAF_Y', br1, qty1, br2, qty2) 147 | if bdrate != 'Non-monotonic Error': 148 | print("bdrate calculated is %3.3f%%" % bdrate) 149 | else: 150 | print("there is Non-monotonic Error in bdrate calculation") 151 | ''' 152 | -------------------------------------------------------------------------------- /convexhull_framework/src/CalcQtyWithVmafTool.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | ## Copyright (c) 2019, Alliance for Open Media. All rights reserved 3 | ## 4 | ## This source code is subject to the terms of the BSD 2 Clause License and 5 | ## the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License 6 | ## was not distributed with this source code in the LICENSE file, you can 7 | ## obtain it at www.aomedia.org/license/software. If the Alliance for Open 8 | ## Media Patent License 1.0 was not distributed with this source code in the 9 | ## PATENTS file, you can obtain it at www.aomedia.org/license/patent. 10 | ## 11 | __author__ = "maggie.sun@intel.com, ryan.lei@intel.com" 12 | 13 | import os 14 | import re 15 | import logging 16 | import math 17 | from Config import BinPath, LoggerName, VMAF 18 | from Utils import GetShortContentName, ExecuteCmd 19 | 20 | subloggername = "CalcQtyMetrics_VMAFTool" 21 | loggername = LoggerName + '.' + '%s' % subloggername 22 | logger = logging.getLogger(loggername) 23 | 24 | Model_Pkg_File = os.path.join(BinPath, 'vmaf_v0.6.1.pkl') 25 | VMAFMetricsFullList = ['VMAF_Y','VMAF_Y-NEG','PSNR_Y','PSNR_U','PSNR_V','SSIM_Y(dB)', 26 | 'MS-SSIM_Y(dB)','PSNR-HVS','CIEDE2000','APSNR_Y','APSNR_U','APSNR_V'] 27 | 28 | def ParseVMAFLogFile(vmaf_log): 29 | floats = len(VMAFMetricsFullList) * [0.0] 30 | per_frame_log = [] 31 | flog = open(vmaf_log, 'r') 32 | for line in flog: 33 | m = re.search(r" 94 | m = re.search(r"aggregate_metrics\s+apsnr_y=\"(\d+\.?\d*)\"\s+apsnr_cb=\"(\d+\.?\d*)\"\s+apsnr_cr=\"(\d+\.?\d*)\"", line) 95 | if m: 96 | floats[9] = m.group(1) 97 | floats[10] = m.group(2) 98 | floats[11] = m.group(3) 99 | flog.close() 100 | floats = [float(i) for i in floats] 101 | print_str = "VMAF quality metrics: " 102 | for metrics, idx in zip(VMAFMetricsFullList, range(len(VMAFMetricsFullList))): 103 | print_str += "%s = %2.5f, " % (metrics, floats[idx]) 104 | logger.info(print_str) 105 | 106 | return floats[0:len(VMAFMetricsFullList)], per_frame_log 107 | 108 | 109 | def GetVMAFLogFile(recfile, path): 110 | filename = recfile + '-libvmaf.xml' 111 | file = os.path.join(path, filename) 112 | return file 113 | 114 | ################################################################################ 115 | ##################### Exposed Functions ######################################## 116 | def VMAF_CalQualityMetrics(origfile, recfile, fmt, num, w, h, bit_depth, 117 | logfilePath, LogCmdOnly=False): 118 | vmaf_log = GetVMAFLogFile(recfile, logfilePath) 119 | args = " -r %s -d %s --aom_ctc v1.0 -q --threads 4 -o %s" \ 120 | % (origfile, recfile, vmaf_log) 121 | cmd = VMAF + args 122 | ExecuteCmd(cmd, LogCmdOnly) 123 | 124 | def VMAF_GatherQualityMetrics(recfile, logfilePath): 125 | vmaf_log = GetVMAFLogFile(recfile, logfilePath) 126 | results, per_frame_log = ParseVMAFLogFile(vmaf_log) 127 | return results, per_frame_log 128 | -------------------------------------------------------------------------------- /convexhull_framework/src/CalculateQualityMetrics.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | ## Copyright (c) 2019, Alliance for Open Media. All rights reserved 3 | ## 4 | ## This source code is subject to the terms of the BSD 2 Clause License and 5 | ## the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License 6 | ## was not distributed with this source code in the LICENSE file, you can 7 | ## obtain it at www.aomedia.org/license/software. If the Alliance for Open 8 | ## Media Patent License 1.0 was not distributed with this source code in the 9 | ## PATENTS file, you can obtain it at www.aomedia.org/license/patent. 10 | ## 11 | __author__ = "maggie.sun@intel.com, ryan.lei@intel.com" 12 | 13 | import logging 14 | from Config import QualityList, LoggerName 15 | import Utils 16 | from CalcQtyWithVmafTool import VMAF_CalQualityMetrics, VMAF_GatherQualityMetrics,\ 17 | VMAFMetricsFullList 18 | 19 | subloggername = "CalcQtyMetrics" 20 | loggername = LoggerName + '.' + '%s' % subloggername 21 | logger = logging.getLogger(loggername) 22 | 23 | def CalculateQualityMetric(src_file, framenum, reconYUV, fmt, width, height, 24 | bit_depth, logfilePath, LogCmdOnly=False): 25 | Utils.CmdLogger.write("::Quality Metrics\n") 26 | VMAF_CalQualityMetrics(src_file, reconYUV, fmt, framenum, width, height, 27 | bit_depth, logfilePath, LogCmdOnly) 28 | 29 | def GatherQualityMetrics(reconYUV, logfilePath): 30 | qresult, per_frame_log = VMAF_GatherQualityMetrics(reconYUV, logfilePath) 31 | results = [] 32 | for metric in QualityList: 33 | if metric in VMAFMetricsFullList: 34 | indx = VMAFMetricsFullList.index(metric) 35 | results.append(qresult[indx]) 36 | else: 37 | logger.error("invalid quality metrics in QualityList") 38 | results.append(0.0) 39 | 40 | return results, per_frame_log 41 | -------------------------------------------------------------------------------- /convexhull_framework/src/Config.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | ## Copyright (c) 2019, Alliance for Open Media. All rights reserved 3 | ## 4 | ## This source code is subject to the terms of the BSD 2 Clause License and 5 | ## the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License 6 | ## was not distributed with this source code in the LICENSE file, you can 7 | ## obtain it at www.aomedia.org/license/software. If the Alliance for Open 8 | ## Media Patent License 1.0 was not distributed with this source code in the 9 | ## PATENTS file, you can obtain it at www.aomedia.org/license/patent. 10 | ## 11 | __author__ = "maggie.sun@intel.com, ryan.lei@intel.com" 12 | 13 | import os 14 | import platform 15 | import AV2CTCVideo 16 | 17 | #TEST_CONFIGURATIONS = ["RA","LD", "AS"] 18 | TEST_CONFIGURATIONS = ["LD", "RA", "AI", "STILL"] 19 | 20 | ###################################### 21 | # configuration settings 22 | ###################################### 23 | RootPath = ".." 24 | BinPath = os.path.join(RootPath, 'bin') 25 | WorkPath = os.path.join(RootPath, 'test') 26 | SMOKE_TEST = False # override some parameters to do a quick smoke test 27 | FrameNum = { 28 | "LD" : 130, 29 | "RA" : 130, 30 | "AI" : 30, 31 | "AS" : 130, 32 | "STILL" : 1, 33 | } 34 | EnableTimingInfo = True 35 | UsePerfUtil = False 36 | Platform = platform.system() 37 | 38 | ############ test contents ####################################### 39 | ContentPath = "/mnt/runs/sets/" 40 | ############## Scaling settings ############################################ 41 | # down scaling ratio 42 | DnScaleRatio = [1.0, 1.5, 2.0, 3.0, 4.0, 6.0] # downscale ratio 43 | #down and up scaling algorithm, the 2 lists should be of same size 44 | DnScalingAlgos = ['lanczos'] #['bicubic', 'bilinear', 'gauss', 'lanczos', 'sinc'] 45 | UpScalingAlgos = ['lanczos'] #['bicubic', 'bilinear', 'gauss', 'lanczos', 'sinc'] 46 | 47 | if SMOKE_TEST: 48 | DnScalingAlgos = ['bicubic', 'lanczos', 'sinc'] 49 | UpScalingAlgos = ['bicubic', 'lanczos', 'sinc'] 50 | HDRToolsConfigFileTemplate = os.path.join(BinPath, 'HDRConvScalerY4MFile.cfg') 51 | HDRConvert = os.path.join(BinPath, 'HDRConvert') 52 | 53 | ##################### Encode Config ######################################## 54 | EncodeMethods = ["aom", "svt"] 55 | CodecNames = ["av1"] 56 | SUFFIX = {"av1": ".obu"} 57 | FFMPEG = os.path.join(BinPath, 'ffmpeg.exe') 58 | AOMENC = os.path.join(BinPath, 'aomenc') 59 | SVTAV1 = os.path.join(BinPath, 'SvtAv1EncApp.exe') 60 | AV1DEC = os.path.join(BinPath, 'av1dec') 61 | AOMDEC = os.path.join(BinPath, 'aomdec') 62 | QPs = { 63 | "LD" : [23, 31, 39, 47, 55, 63], 64 | "RA" : [23, 31, 39, 47, 55, 63], 65 | "AI" : [15, 23, 31, 39, 47, 55], 66 | "AS" : [110, 135, 160, 185, 210, 235], 67 | "STILL" : [15, 23, 31, 39, 47, 55], 68 | } 69 | MIN_GOP_LENGTH = 16 70 | AS_DOWNSCALE_ON_THE_FLY = False 71 | 72 | ######################## quality evalution config ############################# 73 | QualityList = ['PSNR_Y','PSNR_U','PSNR_V','SSIM_Y(dB)','MS-SSIM_Y(dB)','VMAF_Y', 74 | 'VMAF_Y-NEG','PSNR-HVS','CIEDE2000','APSNR_Y','APSNR_U','APSNR_V'] 75 | VMAF = os.path.join(BinPath, 'vmaf.exe') 76 | CalcBDRateInExcel = True 77 | EnablePreInterpolation = True 78 | 79 | ######################## config for exporting data to excel ################# 80 | #https://xlsxwriter.readthedocs.io/working_with_colors.html#colors 81 | # line color used, number of colors >= len(DnScaledRes) 82 | LineColors = ['blue', 'red', 'green', 'orange', 'pink', 'yellow'] 83 | ConvexHullColor = 'white' 84 | Int_ConvexHullColor = 'cyan' 85 | 86 | # find out QP/Resolution with specified qty metrics 87 | TargetQtyMetrics = {'VMAF_Y': [60, 70, 80, 90], 88 | 'PSNR_Y': [30, 35, 38, 40, 41]} 89 | 90 | # format for exported excel of convexhull test 91 | # if to modify below 3 variables, need to modify function 92 | # SaveConvexHullResultsToExcel accordingly 93 | CvxH_startCol = 1; CvxH_startRow = 2; CvxH_colInterval = 2 94 | CvxH_WtCols = [(CvxH_colInterval + 1 + len(QualityList)) * i + CvxH_startCol 95 | for i in range(len(DnScaleRatio))] 96 | CvxH_WtRows = [CvxH_startRow + i for i in range(len(QPs['AS']))] 97 | CvxH_WtLastCol = CvxH_WtCols[-1] + len(QualityList) 98 | CvxH_WtLastRow = CvxH_WtRows[-1] 99 | 100 | # format for writing convexhull curve data 101 | CvxHDataStartRow = CvxH_WtRows[-1] + 2; CvxHDataStartCol = 0 102 | CvxHDataNum = 7 # qty, bitrate, qp, resolution, int_qty, int_bitrate, 1 empty row as internal 103 | CvxHDataRows = [CvxHDataStartRow + 1 + CvxHDataNum * i for i in range(len(QualityList))] 104 | 105 | ######################## post analysis ######################################### 106 | PostAnalysis_Path = os.path.join(RootPath, 'analysis') 107 | Path_RDResults = os.path.join(PostAnalysis_Path, 'rdresult') 108 | SummaryOutPath = os.path.join(PostAnalysis_Path, 'summary') 109 | Path_ScalingResults = os.path.join(PostAnalysis_Path, 'scalingresult') 110 | # vba file needed when to calculate bdrate 111 | #VbaBinFile = os.path.join(BinPath, 'vbaProject_JVET-L0242.bin') 112 | VbaBinFile = os.path.join(BinPath, 'vbaProject-AV2.bin') 113 | 114 | # format for exported excel of scaling quality test 115 | # if to modify below 3 variables, need to modify function SaveScalingResultsToExcel 116 | # accordingly 117 | ScalQty_startCol = 6; ScalQty_startRow = 2; ScalQty_colInterval = 1 118 | ScalSumQty_startCol = 7 119 | ScalQty_WtCols = [(ScalQty_colInterval + 120 | len(QualityList)) * i + ScalQty_startCol 121 | for i in range(len(DnScalingAlgos))] 122 | ScalSumQty_WtCols = [(ScalQty_colInterval + 123 | len(QualityList)) * i + ScalQty_startCol + 1 124 | for i in range(len(DnScalingAlgos))] 125 | ######################## logging ######################################### 126 | LoggerName = "AV2CTC" 127 | LogLevels = ['NONE', 'CRITICAL', 'ERROR', 'WARNING', 'INFO', 'DEBUG'] 128 | -------------------------------------------------------------------------------- /convexhull_framework/src/EncDecUpscale.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | ## Copyright (c) 2019, Alliance for Open Media. All rights reserved 3 | ## 4 | ## This source code is subject to the terms of the BSD 2 Clause License and 5 | ## the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License 6 | ## was not distributed with this source code in the LICENSE file, you can 7 | ## obtain it at www.aomedia.org/license/software. If the Alliance for Open 8 | ## Media Patent License 1.0 was not distributed with this source code in the 9 | ## PATENTS file, you can obtain it at www.aomedia.org/license/patent. 10 | ## 11 | __author__ = "maggie.sun@intel.com, ryan.lei@intel.com" 12 | 13 | import os 14 | from VideoEncoder import VideoEncode 15 | from VideoDecoder import VideoDecode 16 | from VideoScaler import UpScaling, GetDownScaledOutFile, GetUpScaledOutFile 17 | from Config import SUFFIX, LoggerName 18 | from Utils import GetShortContentName, Clip, GetEncLogFile, GetDecPerfFile, \ 19 | GetEncPerfFile 20 | import logging 21 | 22 | subloggername = "EncDecUpscale" 23 | loggername = LoggerName + '.' + '%s' % subloggername 24 | logger = logging.getLogger(loggername) 25 | 26 | ################################################################################ 27 | ##################### Internal Helper Functions ################################ 28 | def GetBitstreamFile(method, codec, test_cfg, preset, yuvfile, qp, outpath): 29 | bs_suffix = SUFFIX[codec] 30 | Prefix_EncodeCfg = '_%s_%s_%s_Preset_%s' % (method, codec, test_cfg, preset) 31 | filename = GetShortContentName(yuvfile, False) + Prefix_EncodeCfg + "_QP_"\ 32 | + str(qp) + bs_suffix 33 | filename = os.path.join(outpath, filename) 34 | return filename 35 | 36 | def GetDecodedFile(bsfile, outpath, decode_to_yuv): 37 | suffix = ".yuv" if decode_to_yuv else ".y4m" 38 | filename = GetShortContentName(bsfile, False) + '_Decoded' + suffix 39 | decodedfile = os.path.join(outpath, filename) 40 | return decodedfile 41 | 42 | ################################################################################ 43 | ##################### Major Functions ########################################## 44 | def Encode(method, codec, preset, clip, test_cfg, qp, num, bs_path, perf_path, 45 | log_path, LogCmdOnly=False): 46 | bsfile = GetBitstreamFile(method, codec, test_cfg, preset, clip.file_path, 47 | qp, bs_path) 48 | enc_perf = GetEncPerfFile(bsfile, perf_path) 49 | enc_log = GetEncLogFile(bsfile, log_path) 50 | # call VideoEncoder to do the encoding 51 | VideoEncode(method, codec, clip, test_cfg, qp, num, bsfile, preset, enc_perf, 52 | enc_log, LogCmdOnly) 53 | return bsfile 54 | 55 | def Decode(method, test_cfg, codec, bsfile, path, perf_path, decode_to_yuv, LogCmdOnly=False): 56 | decodedfile = GetDecodedFile(bsfile, path, decode_to_yuv) 57 | dec_perf = GetDecPerfFile(bsfile, perf_path) 58 | #call VideoDecoder to do the decoding 59 | VideoDecode(method, test_cfg, codec, bsfile, decodedfile, dec_perf, decode_to_yuv, LogCmdOnly) 60 | return decodedfile 61 | 62 | def Run_EncDec_Upscale(method, codec, preset, clip, test_cfg, QP, num, outw, 63 | outh, path_bs, path_decoded, path_upscaled, path_cfg, 64 | path_perf, path_enc_log, upscale_algo, LogCmdOnly = False): 65 | logger.info("%s %s start encode file %s with QP = %d" % 66 | (method, codec, clip.file_name, QP)) 67 | bsFile = Encode(method, codec, preset, clip, test_cfg, QP, num, path_bs, 68 | path_perf, path_enc_log, LogCmdOnly) 69 | logger.info("start decode file %s" % os.path.basename(bsFile)) 70 | decodedYUV = Decode(method, test_cfg, codec, bsFile, path_decoded, path_perf, False, 71 | LogCmdOnly) 72 | logger.info("start upscale file %s" % os.path.basename(decodedYUV)) 73 | #hard code frame rate to 0 before upscaling. 74 | #TODO: change to real frame rate after decoder fix the issue 75 | dec_clip = Clip(GetShortContentName(decodedYUV, False) + ".y4m", 76 | decodedYUV, clip.file_class, clip.width, clip.height, 77 | clip.fmt, 0, 0, clip.bit_depth) 78 | upscaledYUV = UpScaling(dec_clip, num, outw, outh, path_upscaled, path_cfg, 79 | upscale_algo, LogCmdOnly) 80 | logger.info("finish Run Encode, Decode and Upscale") 81 | return upscaledYUV 82 | 83 | 84 | def GetBsReconFileName(encmethod, codecname, test_cfg, preset, clip, dw, dh, 85 | dnScAlgo, upScAlgo, qp, path_bs, ds_on_the_fly=True, ratio_idx=0): 86 | dsyuv_name = GetDownScaledOutFile(clip, dw, dh, path_bs, dnScAlgo, ds_on_the_fly, ratio_idx) 87 | # return bitstream file with absolute path 88 | bs = GetBitstreamFile(encmethod, codecname, test_cfg, preset, dsyuv_name, 89 | qp, path_bs) 90 | decoded = GetDecodedFile(bs, path_bs, False) 91 | ds_clip = Clip(GetShortContentName(decoded, False) + ".y4m", 92 | decoded, clip.file_class, dw, dh, clip.fmt, clip.fps_num, 93 | clip.fps_denom, clip.bit_depth) 94 | reconfilename = GetUpScaledOutFile(ds_clip, clip.width, clip.height, 95 | upScAlgo, path_bs) 96 | # return only Recon yuv file name w/o path 97 | reconfilename = GetShortContentName(reconfilename, False) + ".y4m" 98 | return bs, reconfilename 99 | -------------------------------------------------------------------------------- /convexhull_framework/src/VideoDecoder.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | ## Copyright (c) 2019, Alliance for Open Media. All rights reserved 3 | ## 4 | ## This source code is subject to the terms of the BSD 2 Clause License and 5 | ## the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License 6 | ## was not distributed with this source code in the LICENSE file, you can 7 | ## obtain it at www.aomedia.org/license/software. If the Alliance for Open 8 | ## Media Patent License 1.0 was not distributed with this source code in the 9 | ## PATENTS file, you can obtain it at www.aomedia.org/license/patent. 10 | ## 11 | __author__ = "maggie.sun@intel.com, ryan.lei@intel.com" 12 | 13 | import Utils 14 | from Config import AOMDEC, AV1DEC, EnableTimingInfo, Platform, UsePerfUtil 15 | from Utils import ExecuteCmd 16 | 17 | def DecodeWithAOM(test_cfg, infile, outfile, dec_perf, decode_to_yuv, LogCmdOnly=False): 18 | if decode_to_yuv: 19 | args = " --codec=av1 --summary --rawvideo -o %s %s" % (outfile, infile) 20 | else: 21 | args = " --codec=av1 --summary -o %s %s" % (outfile, infile) 22 | cmd = AOMDEC + args 23 | if EnableTimingInfo: 24 | if Platform == "Windows": 25 | cmd = "ptime " + cmd + " >%s"%dec_perf 26 | elif Platform == "Darwin": 27 | cmd = "gtime --verbose --output=%s "%dec_perf + cmd 28 | else: 29 | if UsePerfUtil: 30 | cmd = "3>%s perf stat --log-fd 3 "%dec_perf +cmd 31 | else: 32 | cmd = "/usr/bin/time --verbose --output=%s "%dec_perf + cmd 33 | 34 | ExecuteCmd(cmd, LogCmdOnly) 35 | 36 | def DecodeWithAV1(test_cfg, infile, outfile, dec_perf, decode_to_yuv, LogCmdOnly=False): 37 | if decode_to_yuv: 38 | args = " --codec=av1 --summary --rawvideo -o %s %s" % (outfile, infile) 39 | else: 40 | args = " --codec=av1 --summary -o %s %s" % (outfile, infile) 41 | cmd = AV1DEC + args 42 | if EnableTimingInfo: 43 | if Platform == "Windows": 44 | cmd = "ptime " + cmd + " >%s" % dec_perf 45 | elif Platform == "Darwin": 46 | cmd = "gtime --verbose --output=%s " % dec_perf + cmd 47 | else: 48 | if UsePerfUtil: 49 | cmd = "3>%s perf stat --log-fd 3 " % dec_perf + cmd 50 | else: 51 | cmd = "/usr/bin/time --verbose --output=%s " % dec_perf + cmd 52 | 53 | ExecuteCmd(cmd, LogCmdOnly) 54 | 55 | def VideoDecode(method, test_cfg, codec, infile, outfile, dec_perf, decode_to_yuv, LogCmdOnly=False): 56 | Utils.CmdLogger.write("::Decode\n") 57 | if codec == 'av1' and method == 'aom': 58 | DecodeWithAOM(test_cfg, infile, outfile, dec_perf, decode_to_yuv, LogCmdOnly) 59 | elif codec == 'av1' and method == 'svt': 60 | DecodeWithAV1(test_cfg, infile, outfile, dec_perf, decode_to_yuv, LogCmdOnly) 61 | else: 62 | raise ValueError("invalid parameter for decode.") 63 | -------------------------------------------------------------------------------- /convexhull_framework/src/VideoEncoder.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | ## Copyright (c) 2019, Alliance for Open Media. All rights reserved 3 | ## 4 | ## This source code is subject to the terms of the BSD 2 Clause License and 5 | ## the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License 6 | ## was not distributed with this source code in the LICENSE file, you can 7 | ## obtain it at www.aomedia.org/license/software. If the Alliance for Open 8 | ## Media Patent License 1.0 was not distributed with this source code in the 9 | ## PATENTS file, you can obtain it at www.aomedia.org/license/patent. 10 | ## 11 | __author__ = "maggie.sun@intel.com, ryan.lei@intel.com" 12 | 13 | import Utils 14 | from Config import AOMENC, SVTAV1, EnableTimingInfo, Platform, UsePerfUtil 15 | from Utils import ExecuteCmd 16 | 17 | def get_qindex_from_QP(QP): 18 | quantizer_to_qindex = [ 19 | 0, 4, 8, 12, 16, 20, 24, 28, 32, 36, 40, 44, 48, 20 | 52, 56, 60, 64, 68, 72, 76, 80, 84, 88, 92, 96, 100, 21 | 104, 108, 112, 116, 120, 124, 128, 132, 136, 140, 144, 148, 152, 22 | 156, 160, 164, 168, 172, 176, 180, 184, 188, 192, 196, 200, 204, 23 | 208, 212, 216, 220, 224, 228, 232, 236, 240, 244, 249, 255] 24 | if (QP > 63): 25 | print(" QP %d is out of range (0 to 63), clamp to 63", QP) 26 | return quantizer_to_qindex[63] 27 | return quantizer_to_qindex[QP] 28 | 29 | def EncodeWithAOM_AV1(clip, test_cfg, QP, framenum, outfile, preset, enc_perf, 30 | enc_log, LogCmdOnly=False): 31 | args = " --verbose --codec=av1 -v --psnr --obu --frame-parallel=0" \ 32 | " --cpu-used=%s --limit=%d --passes=1 --end-usage=q --i%s " \ 33 | " --use-fixed-qp-offsets=1 --deltaq-mode=0 " \ 34 | " --enable-tpl-model=0 --enable-keyframe-filtering=0 --fps=%d/%d " \ 35 | " --input-bit-depth=%d --bit-depth=%d --cq-level=%d -w %d -h %d" \ 36 | % (preset, framenum, clip.fmt, clip.fps_num, clip.fps_denom, 37 | clip.bit_depth, clip.bit_depth, QP, clip.width, clip.height) 38 | 39 | # For 4K clip, encode with 2 tile columns using two threads. 40 | # --tile-columns value is in log2. 41 | if (clip.width >= 3840 and clip.height >= 2160): 42 | args += " --tile-columns=1 --threads=2 --row-mt=0 " 43 | else: 44 | args += " --tile-columns=0 --threads=1 " 45 | 46 | if test_cfg == "AI" or test_cfg == "STILL": 47 | args += " --kf-min-dist=0 --kf-max-dist=0 " 48 | elif test_cfg == "RA" or test_cfg == "AS": 49 | args += " --min-gf-interval=16 --max-gf-interval=16 --gf-min-pyr-height=4" \ 50 | " --gf-max-pyr-height=4 --kf-min-dist=65 --kf-max-dist=65" \ 51 | " --lag-in-frames=19 --auto-alt-ref=1 " 52 | elif test_cfg == "LD": 53 | args += " --kf-min-dist=9999 --kf-max-dist=9999 --lag-in-frames=0" \ 54 | " --min-gf-interval=16 --max-gf-interval=16 --gf-min-pyr-height=4 " \ 55 | " --gf-max-pyr-height=4 --subgop-config-str=ld " 56 | else: 57 | print("Unsupported Test Configuration %s" % test_cfg) 58 | 59 | if (clip.file_class == 'G1' or clip.file_class == 'G2'): 60 | args += "--color-primaries=bt2020 --transfer-characteristics=smpte2084 "\ 61 | "--matrix-coefficients=bt2020ncl --chroma-sample-position=colocated " 62 | 63 | args += " -o %s %s" % (outfile, clip.file_path) 64 | cmd = AOMENC + args + "> %s 2>&1"%enc_log 65 | if (EnableTimingInfo): 66 | if Platform == "Windows": 67 | cmd = "ptime " + cmd + " >%s"%enc_perf 68 | elif Platform == "Darwin": 69 | cmd = "gtime --verbose --output=%s "%enc_perf + cmd 70 | else: 71 | if UsePerfUtil: 72 | cmd = "3>%s perf stat --log-fd 3 " % enc_perf + cmd 73 | else: 74 | cmd = "/usr/bin/time --verbose --output=%s "%enc_perf + cmd 75 | ExecuteCmd(cmd, LogCmdOnly) 76 | 77 | def EncodeWithSVT_AV1(clip, test_cfg, QP, framenum, outfile, preset, enc_perf, 78 | enc_log, LogCmdOnly=False): 79 | #TODO: update svt parameters 80 | args = " --preset %s --scm 2 --lookahead 0 -n %d " \ 81 | " --rc 0 -q %d -w %d -h %d --irefresh-type 2 "\ 82 | " --fps-num %d --fps-denom %d --input-depth %d " \ 83 | " --aq-mode 0 " \ 84 | % (str(preset), framenum, QP, clip.width, clip.height, 85 | clip.fps_num, clip.fps_denom, clip.bit_depth) 86 | 87 | # For 4K clip, encode with 2 tile columns using two threads. 88 | # --tile-columns value is in log2. 89 | if (clip.width >= 3840 and clip.height >= 2160): 90 | args += " --tile-columns 1 " 91 | else: 92 | args += " --tile-columns 0 " 93 | 94 | if test_cfg == "AI" or test_cfg == "STILL": 95 | args += " --keyint 255 " 96 | elif test_cfg == "RA" or test_cfg == "AS": 97 | args += " --keyint 64 --hierarchical-levels 4 --pred-struct 2 " 98 | elif test_cfg == "LD": 99 | args += " --keyint 9999 --hierarchical-levels 4 --pred-struct 1 " 100 | else: 101 | print("Unsupported Test Configuration %s" % test_cfg) 102 | 103 | if (clip.file_class == 'G1' or clip.file_class == 'G2'): 104 | args += "--enable-hdr 1 " 105 | 106 | args += "-i %s -b %s"%(clip.file_path, outfile) 107 | cmd = SVTAV1 + args + "> %s 2>&1"%enc_log 108 | if EnableTimingInfo: 109 | if Platform == "Windows": 110 | cmd = "ptime " + cmd + " >%s"%enc_perf 111 | elif Platform == "Darwin": 112 | cmd = "gtime --verbose --output=%s "%enc_perf + cmd 113 | else: 114 | if UsePerfUtil: 115 | cmd = "3>%s perf stat --log-fd 3 " % enc_perf + cmd 116 | else: 117 | cmd = "/usr/bin/time --verbose --output=%s "%enc_perf + cmd 118 | ExecuteCmd(cmd, LogCmdOnly) 119 | 120 | def VideoEncode(EncodeMethod, CodecName, clip, test_cfg, QP, framenum, outfile, 121 | preset, enc_perf, enc_log, LogCmdOnly=False): 122 | Utils.CmdLogger.write("::Encode\n") 123 | if CodecName == 'av1': 124 | if EncodeMethod == "aom": 125 | EncodeWithAOM_AV1(clip, test_cfg, QP, framenum, outfile, preset, 126 | enc_perf, enc_log, LogCmdOnly) 127 | elif EncodeMethod == "svt": 128 | EncodeWithSVT_AV1(clip, test_cfg, QP, framenum, outfile, preset, 129 | enc_perf, enc_log, LogCmdOnly) 130 | else: 131 | raise ValueError("invalid parameter for encode.") 132 | else: 133 | raise ValueError("invalid parameter for encode.") 134 | -------------------------------------------------------------------------------- /convexhull_framework/src/VideoScaler.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | ## Copyright (c) 2019, Alliance for Open Media. All rights reserved 3 | ## 4 | ## This source code is subject to the terms of the BSD 2 Clause License and 5 | ## the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License 6 | ## was not distributed with this source code in the LICENSE file, you can 7 | ## obtain it at www.aomedia.org/license/software. If the Alliance for Open 8 | ## Media Patent License 1.0 was not distributed with this source code in the 9 | ## PATENTS file, you can obtain it at www.aomedia.org/license/patent. 10 | ## 11 | __author__ = "maggie.sun@intel.com, ryan.lei@intel.com" 12 | 13 | import os 14 | import Utils 15 | import logging 16 | import fileinput 17 | from shutil import copyfile 18 | from Config import LoggerName, FFMPEG, HDRToolsConfigFileTemplate, HDRConvert, Platform, \ 19 | ContentPath 20 | from Utils import GetShortContentName, ExecuteCmd, md5 21 | from AV2CTCVideo import AS_Downscaled_Clips 22 | 23 | subloggername = "VideoScaler" 24 | loggername = LoggerName + '.' + '%s' % subloggername 25 | logger = logging.getLogger(loggername) 26 | 27 | def GenerateCfgFile(clip, outw, outh, algo, outfile, num, configpath): 28 | contentBaseName = GetShortContentName(clip.file_name, False) 29 | cfg_filename = contentBaseName + ('_Scaled_%s_%dx%d.cfg'% (algo, outw, outh)) 30 | fmt = 1 31 | if (clip.fmt == '400'): 32 | fmt = 0 33 | elif (clip.fmt == '420'): 34 | fmt = 1 35 | elif (clip.fmt == '422'): 36 | fmt = 2 37 | elif (clip.fmt == '444'): 38 | fmt = 3 39 | 40 | fps = 0 41 | if (clip.fps_num == 0): 42 | fps = 0 43 | else: 44 | fps = (float)(clip.fps_num / clip.fps_denom) 45 | 46 | cfgfile = os.path.join(configpath, cfg_filename) 47 | copyfile(HDRToolsConfigFileTemplate, cfgfile) 48 | fp = fileinput.input(cfgfile, inplace=1) 49 | for line in fp: 50 | if 'SourceFile=' in line: 51 | line = 'SourceFile="%s"\n' % clip.file_path 52 | if 'OutputFile=' in line: 53 | line = 'OutputFile="%s"\n' % outfile 54 | if 'SourceWidth=' in line: 55 | line = 'SourceWidth=%d\n' % clip.width 56 | if 'SourceHeight=' in line: 57 | line = 'SourceHeight=%d\n' % clip.height 58 | if 'OutputWidth=' in line: 59 | line = 'OutputWidth=%d\n' % outw 60 | if 'OutputHeight=' in line: 61 | line = 'OutputHeight=%d\n' % outh 62 | if 'SourceRate=' in line: 63 | line = 'SourceRate=%4.3f\n' % fps 64 | if 'SourceChromaFormat=' in line: 65 | line = 'SourceChromaFormat=%d\n' % fmt 66 | if 'SourceBitDepthCmp0=' in line: 67 | line = 'SourceBitDepthCmp0=%d\n' % clip.bit_depth 68 | if 'SourceBitDepthCmp1=' in line: 69 | line = 'SourceBitDepthCmp1=%d\n' % clip.bit_depth 70 | if 'SourceBitDepthCmp2=' in line: 71 | line = 'SourceBitDepthCmp2=%d\n' % clip.bit_depth 72 | if 'OutputRate=' in line: 73 | line = 'OutputRate=%4.3f\n' % fps 74 | if 'OutputChromaFormat=' in line: 75 | line = 'OutputChromaFormat=%d\n' % fmt 76 | if 'OutputBitDepthCmp0=' in line: 77 | line = 'OutputBitDepthCmp0=%d\n' % clip.bit_depth 78 | if 'OutputBitDepthCmp1=' in line: 79 | line = 'OutputBitDepthCmp1=%d\n' % clip.bit_depth 80 | if 'OutputBitDepthCmp2=' in line: 81 | line = 'OutputBitDepthCmp2=%d\n' % clip.bit_depth 82 | if 'NumberOfFrames=' in line: 83 | line = 'NumberOfFrames=%d\n' % num 84 | print(line, end='') 85 | fp.close() 86 | return cfgfile 87 | 88 | def RescaleWithHDRTool(clip, outw, outh, algo, outfile, num, cfg_path, 89 | LogCmdOnly = False): 90 | cfg_file = GenerateCfgFile(clip, outw, outh, algo, outfile, num, cfg_path) 91 | args = " -f %s" % cfg_file 92 | cmd = HDRConvert + args 93 | ExecuteCmd(cmd, LogCmdOnly) 94 | 95 | def VideoRescaling(clip, num, outw, outh, outfile, algo, cfg_path, 96 | LogCmdOnly = False): 97 | RescaleWithHDRTool(clip, outw, outh, algo, outfile, num, cfg_path, LogCmdOnly) 98 | # add other tools for scaling here later 99 | 100 | #################################################################################### 101 | ##################### Major Functions ################################################ 102 | def GetDownScaledOutFile(clip, dnw, dnh, path, algo, ds_on_the_fly=True, ratio_idx=0): 103 | contentBaseName = GetShortContentName(clip.file_name, False) 104 | dnscaledout = clip.file_path 105 | if clip.width != dnw or clip.height != dnh: 106 | if ds_on_the_fly: 107 | filename = contentBaseName + ('_Scaled_%s_%dx%d.y4m' % (algo, dnw, dnh)) 108 | dnscaledout = os.path.join(path, filename) 109 | else: 110 | dnscaledout = ContentPath + "/A1_downscaled/" + \ 111 | AS_Downscaled_Clips[contentBaseName][ratio_idx-1] 112 | 113 | return dnscaledout 114 | 115 | def GetUpScaledOutFile(clip, outw, outh, algo, path): 116 | contentBaseName = GetShortContentName(clip.file_name, False) 117 | upscaledout = clip.file_path 118 | if clip.width != outw or clip.height != outh: 119 | filename = contentBaseName + ('_Scaled_%s_%dx%d.y4m' % (algo, outw, outh)) 120 | upscaledout = os.path.join(path, filename) 121 | return upscaledout 122 | 123 | def GetDownScaledMD5File(clip, dnw, dnh, path, algo): 124 | contentBaseName = GetShortContentName(clip.file_name, False) 125 | filename = contentBaseName + ".md5" 126 | if clip.width != dnw or clip.height != dnh: 127 | filename = contentBaseName + ('_Scaled_%s_%dx%d.md5' % (algo, dnw, dnh)) 128 | dnscaledmd5 = os.path.join(path, filename) 129 | return dnscaledmd5 130 | 131 | def CalculateDownScaledMD5(clip, dnw, dnh, path, algo, LogCmdOnly): 132 | dnScaleMD5 = GetDownScaledMD5File(clip, dnw, dnh, path, algo) 133 | if LogCmdOnly == 1: 134 | if Platform == "Linux": 135 | cmd = "md5sum %s &> %s" % (clip.file_path, dnScaleMD5) 136 | ExecuteCmd(cmd, 1) 137 | else: 138 | f = open(dnScaleMD5, 'wt') 139 | dnScaledOut = GetDownScaledOutFile(clip, dnw, dnh, path, algo) 140 | MD5 = md5(dnScaledOut) 141 | f.write(MD5) 142 | f.close() 143 | 144 | def DownScaling(clip, num, outw, outh, path, cfg_path, algo, LogCmdOnly = False): 145 | dnScaledOut = GetDownScaledOutFile(clip, outw, outh, path, algo) 146 | 147 | Utils.CmdLogger.write("::Downscaling\n") 148 | if (clip.width != outw or clip.height != outh): 149 | # call separate process to do the downscaling 150 | VideoRescaling(clip, num, outw, outh, dnScaledOut, algo, cfg_path, 151 | LogCmdOnly) 152 | 153 | CalculateDownScaledMD5(clip, outw, outh, path, algo, LogCmdOnly) 154 | 155 | return dnScaledOut 156 | 157 | def UpScaling(clip, num, outw, outh, path, cfg_path, algo, LogCmdOnly = False): 158 | upScaleOut = GetUpScaledOutFile(clip, outw, outh, algo, path) 159 | Utils.CmdLogger.write("::Upscaling\n") 160 | if (clip.width != outw or clip.height != outh): 161 | # call separate process to do the upscaling 162 | VideoRescaling(clip, num, outw, outh, upScaleOut, algo, cfg_path, 163 | LogCmdOnly) 164 | return upScaleOut 165 | -------------------------------------------------------------------------------- /create_test_branch.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # exit on failure 4 | set -e 5 | 6 | # exit on unassigned variable 7 | set -u 8 | 9 | COMMIT="$1" 10 | TESTNAME="$2" 11 | CODEC="$3" 12 | BRANCH=t-$(echo "${TESTNAME}" | sed "s/:/_/g") 13 | 14 | cd ${CODECS_SRC_DIR}/${CODEC} 15 | git reset --hard 16 | 17 | if [[ ${COMMIT} == "HEAD" ]];then 18 | echo "Fetching latest changes from origin, and checking out to current HEAD" 19 | git fetch origin 20 | git checkout origin/HEAD 21 | fi 22 | 23 | if git checkout ${COMMIT}; then 24 | echo "Commit found, skipping fetch." 25 | else 26 | echo "Checking for commit in remotes..." 27 | git fetch --all 28 | git checkout ${COMMIT} 29 | fi 30 | 31 | git checkout -b ${BRANCH} 32 | git clean -d -x -f 33 | 34 | cd ${APP_DIR} 35 | mkdir -p ${RUNS_DST_DIR}/${TESTNAME} 36 | 37 | ./build_codec.sh 38 | -------------------------------------------------------------------------------- /distortion.m: -------------------------------------------------------------------------------- 1 | #!/usr/bin/octave -qf 2 | 3 | warning("off","Octave:nested-functions-coerced"); 4 | 5 | args=argv(); 6 | 7 | if size(args,1)!=2 8 | printf("usage: ./distortion.m \n"); 9 | return 10 | end 11 | 12 | rd1=load("-ascii",args{1}); 13 | chosen_rate_log = log(str2double(args{2})); 14 | 15 | rd1=flipud(sortrows(rd1,1)); 16 | 17 | rate1_log=log(rd1(:,3)*8./rd1(:,2)); 18 | psnr1=rd1(:,4); 19 | psnrhvs1=rd1(:,5); 20 | ssim1=rd1(:,6); 21 | fastssim1=rd1(:,7); 22 | 23 | pin = program_invocation_name; 24 | 25 | chdir(pin(1:(length(pin)-length(program_name)))); 26 | 27 | printf("%0.5f\n",interp1(rate1_log,psnr1,chosen_rate_log)); 28 | printf("%0.5f\n",interp1(rate1_log,psnrhvs1,chosen_rate_log)); 29 | printf("%0.5f\n",interp1(rate1_log,ssim1,chosen_rate_log)); 30 | printf("%0.5f\n",interp1(rate1_log,fastssim1,chosen_rate_log)); 31 | 32 | 33 | -------------------------------------------------------------------------------- /dump_convex_hull.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | from __future__ import print_function 4 | 5 | import argparse 6 | import json 7 | 8 | import numpy as np 9 | import xlrd 10 | from numpy import * 11 | from scipy import * 12 | from scipy._lib._util import _asarray_validated 13 | from scipy.interpolate import BPoly, interp1d, pchip 14 | 15 | parser = argparse.ArgumentParser(description="Dump convex hull") 16 | parser.add_argument("xls", nargs=1, help="xls file to dump") 17 | args = parser.parse_args() 18 | 19 | met_index_as = { 20 | "PSNR Y (libvmaf)": 11, 21 | "PSNR Cb (libvmaf)": 18, 22 | "PSNR Cr (libvmaf)": 25, 23 | "CIEDE2000 (libvmaf)": 74, 24 | "SSIM (libvmaf)": 39, 25 | "MS-SSIM (libvmaf)": 46, 26 | "PSNR-HVS (libvmaf)": 67, 27 | "VMAF": 53, 28 | "VMAF-NEG": 60, 29 | } 30 | 31 | resolutions = ["3840x2160", "2560x1440", "1920x1080", "1280x720", "960x540", "640x360"] 32 | 33 | error_strings = [] 34 | 35 | 36 | def dump_as(file1): 37 | ret = {} 38 | a_xls = xlrd.open_workbook(file1) 39 | a_sh = a_xls.sheet_by_index(0) 40 | for metric in met_index_as: 41 | if metric not in met_index_as: 42 | return 43 | ra = [] 44 | ya = [] 45 | for c in range(1, a_sh.ncols): 46 | y = a_sh.cell_value(colx=c, rowx=met_index_as[metric] - 1 + 4) 47 | if y == "": 48 | continue 49 | ya.append(y) 50 | ra.append(a_sh.cell_value(colx=c, rowx=met_index_as[metric] - 1 + 5)) 51 | ra = np.flipud(ra) 52 | ya = np.flipud(ya) 53 | ret[metric] = {"Bitrate": ra.tolist(), "Metric": ya.tolist()} 54 | return ret 55 | 56 | 57 | # generate xls for each of the two runs: 58 | # or run in args.run: 59 | # subprocess.run(['python3', 'convexhull_framework/src/AWCYConvexHullTest.py', run], check=True) 60 | 61 | ret = dump_as(args.xls[0]) 62 | print(json.dumps(ret)) 63 | -------------------------------------------------------------------------------- /etc/awcy.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE "votes" ( `decoders` TEXT, `videos` TEXT, `selected` INTEGER, `id` TEXT, `metrics` TEXT, `voter` TEXT, `ip` TEXT ); 2 | -------------------------------------------------------------------------------- /etc/entrypoint: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # exit on failure 4 | set -e 5 | 6 | # exit on unassigned variable 7 | set -u 8 | 9 | # run command if specified (and skip the rest of this script) 10 | if [ -n "${1:-}" ]; then 11 | exec "$@" 12 | fi 13 | 14 | APP_DIR="${APP_DIR:-/opt/app}" 15 | 16 | # create data directories 17 | for dir in \ 18 | "${CONFIG_DIR}" \ 19 | "${CODECS_SRC_DIR}" \ 20 | "${RUNS_DST_DIR}" \ 21 | "${WORK_DIR}" \ 22 | "${MEDIAS_SRC_DIR}" \ 23 | ; do 24 | mkdir -p ${dir} 25 | chown ${APP_USER}:${APP_USER} ${dir} 26 | done 27 | 28 | # CTC result storage folder 29 | mkdir -p "${RUNS_DST_DIR}/ctc_results" 30 | 31 | # detect main IP external address if not set/forced 32 | net_iface=$(awk '{ if ($2 == "00000000") { print $1; exit; } }' /proc/net/route) 33 | net_ip_addr=$(ip addr show dev ${net_iface} | awk -F'[ \t/]+' '/inet / { print $3; exit; }') 34 | export EXTERNAL_ADDR=${EXTERNAL_ADDR:-${net_ip_addr}} 35 | 36 | # prepare awcy server configuration if needed 37 | if [ ! -f "${CONFIG_DIR}/config.json" ]; then 38 | cat >${CONFIG_DIR}/config.json <<-EOF 39 | { 40 | "channel": "${IRC_CHANNEL}", 41 | "have_aws": false, 42 | "port": ${AWCY_SERVER_PORT}, 43 | "rd_server_url": "http://${EXTERNAL_ADDR}:${RD_SERVER_PORT}" 44 | } 45 | EOF 46 | chown ${APP_USER}:${APP_USER} ${CONFIG_DIR}/config.json 47 | fi 48 | 49 | # prepare awcy key file if needed 50 | if [ ! -f "${CONFIG_DIR}/secret_key" ]; then 51 | echo "${AWCY_API_KEY}" >${CONFIG_DIR}/secret_key 52 | chown ${APP_USER}:${APP_USER} ${CONFIG_DIR}/secret_key 53 | fi 54 | 55 | # prepare awcy sqlite database if needed 56 | if [ ! -f "${CONFIG_DIR}/subjective.sqlite3" ]; then 57 | cat /etc/awcy.sql | sqlite3 ${CONFIG_DIR}/subjective.sqlite3 58 | chown ${APP_USER}:${APP_USER} ${CONFIG_DIR}/subjective.sqlite3 59 | fi 60 | 61 | # prepare awcy sets file if needed (and inject a quicktest set for scheduling testing) 62 | if [ ! -f "${CONFIG_DIR}/sets.json" ]; then 63 | mv ${RD_TOOL_DIR}/sets.json ${RD_TOOL_DIR}/sets.dist.json 64 | 65 | # create fast set for testing 66 | set_name=awcy-builder-quicktest 67 | rm -rf ${MEDIAS_SRC_DIR}/${set_name} 68 | mkdir -p ${MEDIAS_SRC_DIR}/${set_name} 69 | wget --output-document ${MEDIAS_SRC_DIR}/${set_name}/test_frame.y4m https://media.xiph.org/video/av2ctc/test_set/f2_still_MidRes/part1/Claudette.y4m 70 | jq '. + {"'${set_name}'": {"type": "image", "sources": ["test_frame.y4m"]}}' ${RD_TOOL_DIR}/sets.dist.json >${CONFIG_DIR}/sets.json 71 | chown -R ${APP_USER}:${APP_USER} ${MEDIAS_SRC_DIR}/${set_name} 72 | fi 73 | 74 | if [ ! -f "${APP_DIR}/www/sets.json" ]; then 75 | cp ${CONFIG_DIR}/sets.json ${APP_DIR}/www/sets.json 76 | fi 77 | 78 | # prepare awcy list file if needed 79 | if [ ! -f "${CONFIG_DIR}/list.json" ]; then 80 | echo "[]" >${CONFIG_DIR}/list.json 81 | chown ${APP_USER}:${APP_USER} ${CONFIG_DIR}/list.json 82 | fi 83 | 84 | # download sources if needed 85 | if [ ! -d "${CODECS_SRC_DIR}/av1" ]; then 86 | gosu ${APP_USER}:${APP_USER} git clone https://aomedia.googlesource.com/aom ${CODECS_SRC_DIR}/av1 87 | ln -s ${CODECS_SRC_DIR}/av1 ${CODECS_SRC_DIR}/av1-rt 88 | fi 89 | 90 | if [ ! -d "${CODECS_SRC_DIR}/av2" ]; then 91 | gosu ${APP_USER}:${APP_USER} git clone https://gitlab.com/AOMediaCodec/avm.git ${CODECS_SRC_DIR}/av2 92 | ln -s ${CODECS_SRC_DIR}/av2 ${CODECS_SRC_DIR}/av2-ra 93 | ln -s ${CODECS_SRC_DIR}/av2 ${CODECS_SRC_DIR}/av2-ra-st 94 | ln -s ${CODECS_SRC_DIR}/av2 ${CODECS_SRC_DIR}/av2-ld 95 | ln -s ${CODECS_SRC_DIR}/av2 ${CODECS_SRC_DIR}/av2-ai 96 | fi 97 | 98 | if [ ! -d "${CODECS_SRC_DIR}/daala" ]; then 99 | gosu ${APP_USER}:${APP_USER} git clone https://github.com/xiph/daala.git ${CODECS_SRC_DIR}/daala 100 | fi 101 | 102 | if [ ! -d "${CODECS_SRC_DIR}/rav1e" ]; then 103 | gosu ${APP_USER}:${APP_USER} git clone https://github.com/xiph/rav1e.git ${CODECS_SRC_DIR}/rav1e 104 | fi 105 | 106 | if [ ! -d "${CODECS_SRC_DIR}/svt-av1" ]; then 107 | gosu ${APP_USER}:${APP_USER} git clone https://gitlab.com/AOMediaCodec/SVT-AV1.git ${CODECS_SRC_DIR}/svt-av1 108 | ln -s ${CODECS_SRC_DIR}/svt-av1 ${CODECS_SRC_DIR}/svt-av1-ra 109 | ln -s ${CODECS_SRC_DIR}/svt-av1 ${CODECS_SRC_DIR}/svt-av1-ra-crf 110 | ln -s ${CODECS_SRC_DIR}/svt-av1 ${CODECS_SRC_DIR}/svt-av1-ra-vbr 111 | ln -s ${CODECS_SRC_DIR}/svt-av1 ${CODECS_SRC_DIR}/svt-av1-ra-vbr-2p 112 | ln -s ${CODECS_SRC_DIR}/svt-av1 ${CODECS_SRC_DIR}/svt-av1-ld-cbr 113 | ln -s ${CODECS_SRC_DIR}/svt-av1 ${CODECS_SRC_DIR}/svt-av1-ra-cq 114 | ln -s ${CODECS_SRC_DIR}/svt-av1 ${CODECS_SRC_DIR}/svt-av1-as 115 | ln -s ${CODECS_SRC_DIR}/svt-av1 ${CODECS_SRC_DIR}/svt-av1-as-ctc 116 | fi 117 | 118 | if [ ! -d "${CODECS_SRC_DIR}/vvc-vtm" ]; then 119 | gosu ${APP_USER}:${APP_USER} git clone https://vcgit.hhi.fraunhofer.de/jvet/VVCSoftware_VTM.git ${CODECS_SRC_DIR}/vvc-vtm 120 | ln -s ${CODECS_SRC_DIR}/vvc-vtm ${CODECS_SRC_DIR}/vvc-vtm-ra 121 | ln -s ${CODECS_SRC_DIR}/vvc-vtm ${CODECS_SRC_DIR}/vvc-vtm-ra-ctc 122 | ln -s ${CODECS_SRC_DIR}/vvc-vtm ${CODECS_SRC_DIR}/vvc-vtm-as-ctc 123 | ln -s ${CODECS_SRC_DIR}/vvc-vtm ${CODECS_SRC_DIR}/vvc-vtm-ra-st 124 | ln -s ${CODECS_SRC_DIR}/vvc-vtm ${CODECS_SRC_DIR}/vvc-vtm-ld 125 | ln -s ${CODECS_SRC_DIR}/vvc-vtm ${CODECS_SRC_DIR}/vvc-vtm-ai 126 | fi 127 | 128 | if [ ! -d "${CODECS_SRC_DIR}/thor" ]; then 129 | gosu ${APP_USER}:${APP_USER} git clone https://github.com/cisco/thor.git ${CODECS_SRC_DIR}/thor 130 | ln -s ${CODECS_SRC_DIR}/thor ${CODECS_SRC_DIR}/thor-rt 131 | fi 132 | 133 | if [ ! -d "${CODECS_SRC_DIR}/x264" ]; then 134 | gosu ${APP_USER}:${APP_USER} git clone https://code.videolan.org/videolan/x264.git ${CODECS_SRC_DIR}/x264 135 | fi 136 | 137 | if [ ! -d "${CODECS_SRC_DIR}/x265" ]; then 138 | gosu ${APP_USER}:${APP_USER} git clone https://github.com/videolan/x265.git ${CODECS_SRC_DIR}/x265 139 | ln -s ${CODECS_SRC_DIR}/x265 ${CODECS_SRC_DIR}/x265-rt 140 | fi 141 | 142 | if [ ! -d "${CODECS_SRC_DIR}/libvpx" ]; then 143 | gosu ${APP_USER}:${APP_USER} git clone https://github.com/webmproject/libvpx.git ${CODECS_SRC_DIR}/libvpx 144 | ln -s ${CODECS_SRC_DIR}/libvpx ${CODECS_SRC_DIR}/vp8 145 | ln -s ${CODECS_SRC_DIR}/libvpx ${CODECS_SRC_DIR}/vp8-rt 146 | ln -s ${CODECS_SRC_DIR}/libvpx ${CODECS_SRC_DIR}/libvp8 147 | ln -s ${CODECS_SRC_DIR}/libvpx ${CODECS_SRC_DIR}/libvp8-rt 148 | ln -s ${CODECS_SRC_DIR}/libvpx ${CODECS_SRC_DIR}/vp9 149 | ln -s ${CODECS_SRC_DIR}/libvpx ${CODECS_SRC_DIR}/vp9-rt 150 | ln -s ${CODECS_SRC_DIR}/libvpx ${CODECS_SRC_DIR}/libvp9 151 | ln -s ${CODECS_SRC_DIR}/libvpx ${CODECS_SRC_DIR}/libvp9-rt 152 | ln -s ${CODECS_SRC_DIR}/libvpx ${CODECS_SRC_DIR}/libvpx-rt 153 | fi 154 | 155 | # configure rd_tools 156 | if [ ! -f "${CONFIG_DIR}/machines.json" ]; then 157 | if [ "${LOCAL_WORKER_ENABLED:-false}" != "true" ]; then 158 | echo "[]" >${CONFIG_DIR}/machines.json 159 | 160 | else 161 | cat >${CONFIG_DIR}/machines.json <<-EOF 162 | [ 163 | { 164 | "host": "127.0.0.1", 165 | "user": "${APP_USER}", 166 | "cores": ${LOCAL_WORKER_SLOTS:-$(nproc)}, 167 | "port": 22, 168 | "work_root": "${WORK_DIR}", 169 | "media_path": "${MEDIAS_SRC_DIR}" 170 | } 171 | ] 172 | EOF 173 | fi 174 | 175 | chown ${APP_USER}:${APP_USER} ${CONFIG_DIR}/machines.json 176 | fi 177 | 178 | # configure ssh client 179 | if [ ! -f "/home/${APP_USER}/.ssh/config" ]; then 180 | mkdir -p /home/${APP_USER}/.ssh 181 | cat >/home/${APP_USER}/.ssh/config <<-EOF 182 | Host * 183 | UserKnownHostsFile /dev/null 184 | StrictHostKeyChecking no 185 | LogLevel quiet 186 | Port 22 187 | EOF 188 | chown -R ${APP_USER}:${APP_USER} /home/${APP_USER}/.ssh 189 | fi 190 | 191 | # create SSH host keys if none exist 192 | if [ ! -f /etc/ssh/ssh_host_rsa_key ]; then 193 | echo "Generating SSH host keys" 194 | dpkg-reconfigure openssh-server >/dev/null 2>&1 195 | fi 196 | 197 | # configure ssh authentication 198 | if [ ! -f "${CONFIG_DIR}/awcy.pem" ]; then 199 | gosu ${APP_USER}:${APP_USER} ssh-keygen -t rsa -f ${CONFIG_DIR}/awcy.pem -P '' 200 | chmod 0600 ${CONFIG_DIR}/awcy.pem 201 | mv ${CONFIG_DIR}/awcy.pem.pub ${CONFIG_DIR}/awcy.pub 202 | fi 203 | if [ ! -f "/home/${APP_USER}/.ssh/authorized_keys" ]; then 204 | gosu ${APP_USER}:${APP_USER} cp ${CONFIG_DIR}/awcy.pub /home/${APP_USER}/.ssh/authorized_keys 205 | fi 206 | 207 | # configure local worker services if needed 208 | if [ "${LOCAL_WORKER_ENABLED:-false}" = "true" ]; then 209 | mkdir -p /var/run/sshd 210 | mkdir -p /etc/service/sshd 211 | 212 | cat >/etc/service/sshd/run <<-EOF 213 | #!/bin/sh 214 | echo "STARTING SSHD SERVICE" 215 | exec /usr/sbin/sshd -D 216 | EOF 217 | 218 | chmod a+x /etc/service/sshd/run 219 | fi 220 | 221 | # specify generated SSH private key if none set 222 | if [ -z "${SSH_PRIVKEY_FILE:-}" ]; then 223 | export SSH_PRIVKEY_FILE=${CONFIG_DIR}/awcy.pem 224 | fi 225 | 226 | # Update the git to have all folders as safe.dir for git safety mechansim. 227 | git config --global --add safe.directory '*' 228 | touch /home/${APP_USER}/.gitconfig 229 | cat >/home/${APP_USER}/.gitconfig <<-EOF 230 | [safe] 231 | directory = * 232 | EOF 233 | chown xiph /home/${APP_USER}/.gitconfig 234 | 235 | # Explictly Set Permissions to be sure we got it right 236 | export SSH_PRIVKEY_FILE=${CONFIG_DIR}/awcy.pem 237 | echo $SSH_PRIVKEY_FILE 238 | 239 | ## Add Key to agent 240 | eval `ssh-agent -s` 241 | ssh-add /data/conf/awcy.pem 242 | ssh-add -l 243 | 244 | ## Update permissions 245 | chmod 700 /home/${APP_USER}/.ssh 246 | chmod 600 /home/${APP_USER}/.ssh/authorized_keys 247 | chown -R ${APP_USER}:${APP_USER} /home/${APP_USER}/.ssh 248 | chown -R ${APP_USER}:${APP_USER} /home/${APP_USER}/.ssh/authorized_keys 249 | chown -R ${APP_USER}:${APP_USER} $SSH_PRIVKEY_FILE 250 | 251 | # Link Videos [For Convex-Hull] 252 | ln -fs ${MEDIAS_SRC_DIR}/aomctc-a1-4k ${MEDIAS_SRC_DIR}/A1 253 | 254 | # run runit services 255 | exec tini -g -- /usr/bin/runsvdir -P /etc/service 256 | -------------------------------------------------------------------------------- /etc/entrypoint.worker: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # exit on failure 4 | set -e 5 | 6 | # exit on unassigned variable 7 | set -u 8 | 9 | # run command if specified (and skip the rest of this script) 10 | if [ -n "${1:-}" ]; then 11 | exec "$@" 12 | fi 13 | 14 | # create data directories 15 | for dir in \ 16 | "${WORK_DIR}" \ 17 | "/home/${APP_USER}/.ssh" \ 18 | ; do 19 | mkdir -p ${dir} 20 | chown ${APP_USER}:${APP_USER} ${dir} 21 | done 22 | 23 | # configure sshd 24 | mkdir -p /var/run/sshd 25 | if [ -n "${SSH_PUBKEY:-}" -a ! -f /home/${APP_USER}/.ssh/authorized_keys ]; then 26 | echo "${SSH_PUBKEY}" >/home/${APP_USER}/.ssh/authorized_keys 27 | chown ${APP_USER}:${APP_USER} /home/${APP_USER}/.ssh/authorized_keys 28 | fi 29 | 30 | # create SSH host keys if none exist 31 | if [ ! -f /etc/ssh/ssh_host_rsa_key ]; then 32 | echo "Generating SSH host keys" 33 | dpkg-reconfigure openssh-server >/dev/null 2>&1 34 | fi 35 | 36 | # start sshd 37 | /usr/sbin/sshd -De 38 | -------------------------------------------------------------------------------- /etc/service/awcy/run: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # exit on failure 4 | set -e 5 | 6 | # exit on unassigned variable 7 | set -u 8 | 9 | # show log message 10 | echo "STARTING AWCY SERVICE" 11 | 12 | # enter working directory 13 | cd ${APP_DIR} 14 | 15 | # set env variables 16 | export HOME=/home/${APP_USER} 17 | export CARGO_HOME=/home/${APP_USER}/.cargo/ 18 | 19 | # start awcy server 20 | exec gosu ${APP_USER}:${APP_USER} node awcy_server.js 21 | -------------------------------------------------------------------------------- /etc/service/job-scheduler/run: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # exit on failure 4 | set -e 5 | 6 | # exit on unassigned variable 7 | set -u 8 | 9 | # show log message 10 | echo "STARTING RDTOOL SERVICE" 11 | 12 | # enter working directory 13 | cd ${RD_TOOL_DIR} 14 | 15 | # start awcy server 16 | exec gosu ${APP_USER}:${APP_USER} python3 ./rd_server.py \ 17 | -machineconf ${CONFIG_DIR}/machines.json \ 18 | -port ${RD_SERVER_PORT} \ 19 | -------------------------------------------------------------------------------- /generate_list.ts: -------------------------------------------------------------------------------- 1 | // fs-extra provides deleting directories recursively so I don't 2 | // have to manually implement it. 3 | import fs = require('fs-extra'); 4 | import path = require('path'); 5 | import glob = require('glob'); 6 | 7 | const run_to_update = process.argv[2]; 8 | const config_dir = process.env['CONFIG_DIR'] || './'; 9 | const runs_dst_dir = process.env['RUNS_DST_DIR'] || './runs'; 10 | const listfile = config_dir + '/list.json'; 11 | 12 | function create_list_entry(run_id) { 13 | // TODO: define a typescript interface for info.json 14 | let info: any = {}; 15 | 16 | const infoFile = fs.readFileSync(runs_dst_dir+'/'+run_id+'/info.json').toString(); 17 | info = JSON.parse(infoFile); 18 | 19 | const stat = fs.statSync(runs_dst_dir+'/'+run_id); 20 | let failed = false; 21 | let status = 'completed'; 22 | try { 23 | const statusFile = fs.readFileSync(runs_dst_dir+'/'+run_id+'/status.txt','utf8'); 24 | status = statusFile.trim(); 25 | } catch (e) { 26 | try { 27 | const total_stat = fs.statSync(runs_dst_dir+'/'+run_id+'/'+info['task']); 28 | } catch(e) { 29 | failed = true; 30 | status = 'failed'; 31 | } 32 | } 33 | 34 | const job = { 35 | 'run_id': run_id, 36 | 'tasks': fs.readdirSync(runs_dst_dir+'/'+run_id), 37 | 'date': stat.mtime, 38 | 'info': info, 39 | 'status': status, 40 | 'failed': failed 41 | } 42 | 43 | return job; 44 | } 45 | 46 | if (run_to_update) { 47 | // incremental update 48 | console.log('Performing incremental update on',run_to_update); 49 | const list = JSON.parse(fs.readFileSync(listfile).toString()); 50 | const new_job = create_list_entry(run_to_update); 51 | var found_job = false; 52 | for (var job_num in list) { 53 | if (list[job_num].run_id == new_job.run_id) { 54 | list[job_num] = new_job; 55 | found_job = true; 56 | } 57 | } 58 | if(!found_job) { 59 | list.push(new_job); 60 | } 61 | const tmpname = listfile + '.' + Math.random().toString(36).slice(2); 62 | fs.writeFileSync(tmpname,JSON.stringify(list)); 63 | fs.renameSync(tmpname, listfile); 64 | } else { 65 | const jobs = []; 66 | // full update 67 | fs.readdirSync(runs_dst_dir).forEach(function(run_id) { 68 | try { 69 | // if (erases_old_images(run_id)) return; 70 | const job = create_list_entry(run_id); 71 | erase_old_ivf_files(job); 72 | jobs.push(job); 73 | } catch (e) {}; 74 | }); 75 | 76 | const tmpname = listfile + '.' + Math.random().toString(36).slice(2); 77 | fs.writeFileSync(tmpname, JSON.stringify(jobs)); 78 | fs.renameSync(tmpname, listfile); 79 | 80 | const file_structure = read_ab_image_paths(runs_dst_dir); 81 | fs.writeFile('ab_paths.json', JSON.stringify(file_structure, null, 4)); 82 | 83 | } 84 | 85 | function erase_old_ivf_files(job) { 86 | const stat = fs.statSync(runs_dst_dir+'/' + job.run_id); 87 | const age_ms = Date.now().valueOf() - stat.mtime.valueOf(); 88 | const age_days = age_ms / (1000 * 3600 * 24); 89 | // Not old enough to erase files 90 | if (age_days < 60) { return false; } 91 | const files = glob.sync(runs_dst_dir+'/' + job.run_id + '/' + job.info.task + '/*.ivf'); 92 | if (files.length) { 93 | console.log('Erasing old IVF files for ',job.run_id); 94 | for (let file of files) { 95 | fs.unlinkSync(file); 96 | } 97 | } 98 | } 99 | 100 | // The structure is that each folder contains an array of files. 101 | // Any folder found will be a new object with a new array inside it. 102 | // This only includes folders and `.png` files. Folders without `png` 103 | // files at the bottom layer are considered empty and not included. 104 | function read_ab_image_paths(outer_path) { 105 | // An array of files and an object containing keys to all the folders. 106 | const entries = []; 107 | // An object with keys to all the folders. 108 | const folders = {}; 109 | 110 | fs.readdirSync(outer_path).forEach(function(inner) { 111 | const inner_path = outer_path + '/' + inner; 112 | 113 | if (fs.statSync(inner_path).isDirectory()) { 114 | // A folder is an array of files and objects (folders). 115 | const folder = read_ab_image_paths(inner_path); 116 | 117 | // Don't add an empty folder. 118 | if (folder.length != 0) { folders[inner] = folder; } 119 | } else if (path.extname(inner) == '.png') { entries.push(inner); } 120 | }); 121 | 122 | // Don't append an empty object if there were no folders. 123 | if (Object.keys(folders).length != 0) { 124 | entries.push(folders); 125 | } 126 | 127 | return entries; 128 | } 129 | 130 | function erases_old_images(run_id) { 131 | const stat = fs.statSync(runs_dst_dir+'/' + run_id); 132 | 133 | // TODO: define a typescript interface for info.json 134 | let info: any = {}; 135 | try { 136 | const infoFile = fs.readFileSync(runs_dst_dir+'/'+run_id+'/info.json').toString(); 137 | info = JSON.parse(infoFile); 138 | } catch (e) {}; 139 | 140 | // Never automatically erase special (reference) runs. 141 | if (info.special) { return false; } 142 | 143 | const age_ms = Date.now().valueOf() - stat.mtime.valueOf(); 144 | const age_days = age_ms / (1000 * 3600 * 24); 145 | 146 | // Not old enough to erase files. 147 | if (age_days < 30) { return false; } 148 | 149 | const run_path = runs_dst_dir+'/' + run_id; 150 | let removed_images = false; 151 | 152 | // Remove images in the sets folder if it finds some. 153 | fs.readdirSync(run_path).forEach(function(inside_run) { 154 | const inside_run_path = run_path + '/' + inside_run; 155 | 156 | if (fs.statSync(inside_run_path).isDirectory()) { 157 | const set_path = inside_run_path; 158 | 159 | fs.readdirSync(set_path).forEach(function(inside_set) { 160 | const inside_set_path = set_path + '/' + inside_set; 161 | 162 | if (fs.statSync(inside_set_path).isDirectory()) { 163 | removed_images = true; 164 | fs.removeSync(inside_set_path); 165 | } 166 | }); 167 | } 168 | }); 169 | 170 | if (removed_images) { 171 | console.log('ab images for', run_id, 'were removed since they were', age_days.toFixed(1), 'days old.'); 172 | 173 | return true; 174 | } 175 | 176 | return false; 177 | } 178 | -------------------------------------------------------------------------------- /graph_over_time.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python2 2 | 3 | import calendar 4 | import datetime 5 | import json 6 | import subprocess 7 | import time 8 | 9 | import dateutil.parser 10 | 11 | runs = json.load(open("list.json", "r")) 12 | 13 | watermark_runs = ["x265_1.6_ntt-short-1", "x264_ntt"] 14 | watermark_data = {} 15 | 16 | psnr = [] 17 | psnrhvs = [] 18 | ssim = [] 19 | fastssim = [] 20 | 21 | 22 | def unix_time(dt): 23 | epoch = datetime.datetime.utcfromtimestamp(0) 24 | delta = dt - epoch 25 | return delta.total_seconds() 26 | 27 | 28 | for run in runs: 29 | is_watermark_run = run["run_id"] in watermark_runs 30 | is_master_run = (u"master" in run["info"]) and ( 31 | u"ntt-short-1" in run["info"]["task"] 32 | ) 33 | if is_watermark_run or is_master_run: 34 | filename = "runs/" + run["run_id"] + "/" + run["info"]["task"] + "/total.out" 35 | try: 36 | distortion = subprocess.check_output(["./distortion.m", filename, "0.1"]) 37 | if is_master_run: 38 | date_str = subprocess.check_output( 39 | [ 40 | "git", 41 | "--git-dir=daala/.git", 42 | "--work-tree=daala/", 43 | "show", 44 | "-s", 45 | "--format=%ci", 46 | run["info"]["commit"], 47 | ] 48 | ) 49 | date_obj = dateutil.parser.parse(date_str).replace(tzinfo=None) 50 | date_js = unix_time(date_obj) * 1000 51 | psnr.append([date_js, distortion.split("\n")[0]]) 52 | psnrhvs.append([date_js, distortion.split("\n")[1]]) 53 | ssim.append([date_js, distortion.split("\n")[2]]) 54 | fastssim.append([date_js, distortion.split("\n")[3]]) 55 | if is_watermark_run: 56 | watermark_run = run["info"]["codec"] 57 | watermark_data[watermark_run] = {} 58 | watermark_data[watermark_run]["psnr"] = distortion.split("\n")[0] 59 | watermark_data[watermark_run]["psnrhvs"] = distortion.split("\n")[1] 60 | watermark_data[watermark_run]["ssim"] = distortion.split("\n")[2] 61 | watermark_data[watermark_run]["fastssim"] = distortion.split("\n")[3] 62 | except subprocess.CalledProcessError: 63 | continue 64 | 65 | output = [psnr, psnrhvs, ssim, fastssim] 66 | json.dump(output, open("time_series.json", "w")) 67 | json.dump(watermark_data, open("watermark.json", "w")) 68 | -------------------------------------------------------------------------------- /nuke_branch.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | 5 | TESTNAME=$1 6 | 7 | if [ -n "$TESTNAME" ] ; then 8 | rm -rf runs/$TESTNAME 9 | 10 | pushd daala 11 | git branch -D t-$TESTNAME 12 | popd 13 | fi 14 | 15 | node generate_list.js 16 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "awcy", 3 | "version": "1.0.0", 4 | "description": "Are We Compressed Yet?", 5 | "main": "awcy_server.js", 6 | "scripts": { 7 | "generate_list": "./node_modules/.bin/tsc generate_list.ts; node generate_list.js", 8 | "tsc": "./node_modules/.bin/tsc", 9 | "webpack": "cd www; npm run build-release; cd ..", 10 | "start": "npm run tsc; npm run webpack; node awcy_server.js" 11 | }, 12 | "dependencies": { 13 | "@types/aws-sdk": "0.0.42", 14 | "@types/body-parser": "0.0.33", 15 | "@types/cookie-parser": "^1.4.2", 16 | "@types/express": "^4.17.6", 17 | "@types/fs-extra": "0.0.37", 18 | "@types/irc": "^0.3.33", 19 | "@types/node": "^6.14.10", 20 | "@types/request": "0.0.45", 21 | "@types/sqlite3": "^3.1.6", 22 | "aws-sdk": "^2.659.0", 23 | "body-parser": "^1.19.0", 24 | "cookie-parser": "^1.4.5", 25 | "express": "^4.17.1", 26 | "fs-extra": "^2.0.0", 27 | "archiver": "5.3.1", 28 | "glob": "^7.1.6", 29 | "irc": "^0.5.2", 30 | "request": "^2.88.2", 31 | "sqlite3": "^5.0.0", 32 | "typescript": "^3.0.0" 33 | }, 34 | "repository": { 35 | "type": "git", 36 | "url": "git+https://github.com/tdaede/awcy.git" 37 | }, 38 | "author": "Thomas Daede ", 39 | "license": "MIT", 40 | "bugs": { 41 | "url": "https://github.com/tdaede/awcy/issues" 42 | }, 43 | "homepage": "https://github.com/tdaede/awcy#readme" 44 | } 45 | -------------------------------------------------------------------------------- /pull_running_jobs.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import json 4 | 5 | import requests 6 | 7 | r = requests.get("https://arewecompressedyet.com/run_status.json") 8 | 9 | for job in r.json(): 10 | s = requests.get( 11 | "https://arewecompressedyet.com/runs/" + job["run_id"] + "/status.txt" 12 | ) 13 | if s.text == "running": 14 | print(job["run_id"]) 15 | -------------------------------------------------------------------------------- /push_jobs.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import sys 4 | 5 | import requests 6 | 7 | f = open(sys.argv[1], "r") 8 | for line in f.readlines(): 9 | url = "http://localhost:4000/submit?run_id=" + line.strip() 10 | print(url) 11 | r = requests.get(url) 12 | print(r.text) 13 | -------------------------------------------------------------------------------- /quantizer_log.m: -------------------------------------------------------------------------------- 1 | #!/usr/bin/octave -qf 2 | 3 | warning("off","Octave:nested-functions-coerced"); 4 | 5 | args=argv(); 6 | 7 | if size(args,1)!=2 8 | printf("usage: ./distortion.m \n"); 9 | return 10 | end 11 | 12 | rd1=load("-ascii",args{1}); 13 | chosen_rate_log = log(str2double(args{2})); 14 | 15 | rd1=flipud(sortrows(rd1,1)); 16 | 17 | rate1=rd1(:,3)*8./rd1(:,2); 18 | rate1_log=log(rate1); 19 | q1=rd1(:,1); 20 | q1_log=log(q1); 21 | 22 | %plot(rate1_log,q1_log); 23 | 24 | %pause(); 25 | 26 | printf("%0.5f\n",exp(interp1(rate1_log,q1_log,chosen_rate_log))); 27 | -------------------------------------------------------------------------------- /rate_delta_point.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import argparse 4 | import json 5 | import os 6 | import sys 7 | 8 | from numpy import * 9 | from scipy import * 10 | from scipy.interpolate import interp1d, pchip 11 | 12 | a = flipud(loadtxt(sys.argv[1])) 13 | b = flipud(loadtxt(sys.argv[2])) 14 | 15 | for m in range(0, 11): 16 | try: 17 | ya = a[:, 3 + m] 18 | yb = b[:, 3 + m] 19 | ra = a[:, 2] * 8.0 / a[:, 1] 20 | rb = b[:, 2] * 8.0 / b[:, 1] 21 | a_rate = pchip(ya, log(ra))(float(sys.argv[3])) 22 | b_rate = pchip(yb, log(rb))(float(sys.argv[3])) 23 | print(exp(b_rate - a_rate) - 1) 24 | except IndexError: 25 | print("NaN") 26 | except ValueError: 27 | print("NaN") 28 | -------------------------------------------------------------------------------- /run_awcy.bat: -------------------------------------------------------------------------------- 1 | node awcy_server.js 2 | pause -------------------------------------------------------------------------------- /run_video_test.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | 5 | #./create_test_branch.sh "$1" "$2" $CODEC 6 | 7 | mkdir -p "runs/$2/$3" 8 | 9 | export DAALA_ROOT=daala/ 10 | 11 | OPTS="" 12 | 13 | if [ "$QUALITIES" ]; then 14 | OPTS="$OPTS -qualities $QUALITIES" 15 | fi 16 | 17 | if [ "$SAVE_ENCODE" ]; then 18 | OPTS="$OPTS -save-encode" 19 | fi 20 | 21 | cd rd_tool 22 | DAALA_ROOT=../daala python3 -u rd_tool.py -machines "$NUM_MACHINES" -awsgroup "$SCALING_GROUP" -codec $CODEC $OPTS -prefix "../runs/$2/$3" -runid "$RUN_ID" -bindir "../runs/$2/x86_64" "$3" 23 | #DAALA_ROOT=../daala python3 -u rd_tool.py -machineconf localhost.json -codec $CODEC $QUALITY_OPTS -prefix "../runs/$2/$3" -bindir "../runs/$2/x86_64" "$3" 24 | if [ "$AB_COMPARE" ]; then 25 | DAALA_ROOT=../daala python3 -u rd_tool.py -machines "$NUM_MACHINES" -codec $CODEC -awsgroup "$SCALING_GROUP" -mode 'ab' -runid "$RUN_ID" -bindir "../runs/$2/x86_64" "$3" 26 | fi 27 | -------------------------------------------------------------------------------- /setup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | git clone https://aomedia.googlesource.com/aom av1 4 | ln -s av1 av1-rt 5 | 6 | git clone https://aomedia.googlesource.com/aom av2 7 | 8 | mkdir runs 9 | touch list.json 10 | -------------------------------------------------------------------------------- /stats.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import datetime 4 | 5 | import dateutil 6 | import matplotlib.pyplot as plt 7 | import requests 8 | 9 | list = requests.get("https://arewecompressedyet.com/list.json").json() 10 | 11 | histogram = {} 12 | 13 | for run in list: 14 | date = dateutil.parser.parse(run["date"]) 15 | yearmonth = date.strftime("%Y-%m") 16 | if yearmonth in histogram: 17 | histogram[yearmonth] += 1 18 | else: 19 | histogram[yearmonth] = 1 20 | 21 | xpos = [] 22 | monthnames = [] 23 | counts = [] 24 | n = 0 25 | 26 | for yearmonth in sorted(histogram.keys()): 27 | xpos.append(n) 28 | monthnames.append(yearmonth) 29 | counts.append(histogram[yearmonth]) 30 | n += 1 31 | 32 | plt.bar(xpos, counts, tick_label=monthnames, align="center") 33 | plt.show() 34 | -------------------------------------------------------------------------------- /subjective_metrics.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | import os 4 | import sqlite3 5 | import sys 6 | import textwrap 7 | import urllib.parse 8 | 9 | import matplotlib.pyplot as plt 10 | import scipy.stats 11 | 12 | db = sqlite3.connect("subjective.sqlite3") 13 | 14 | # decoders = "[\"Base-LL\",\"Base-LL-ext_refs\"]" 15 | 16 | labels = ["daala-dist", "ref"] 17 | 18 | # videos = ['objective-1-fast/Netflix_DrivingPOV_1280x720_60fps_8bit_420_60f.y4m-55.ivf','objective-1-fast/Netflix_RollerCoaster_1280x720_60fps_8bit_420_60f.y4m-55.ivf','objective-1-fast/dark70p_60f.y4m-55.ivf'] 19 | # decoders = '["https://beta.arewecompressedyet.com/runs/clpf-only-ll@2017-05-15T22:14:20.557Z/js/decoder.js","https://beta.arewecompressedyet.com/runs/master-ll@2017-05-15T22:12:29.122Z/js/decoder.js"]' 20 | # decoders = '["https://beta.arewecompressedyet.com/runs/clpf-only@2017-05-08T17:19:44.308Z/js/decoder.js","https://beta.arewecompressedyet.com/runs/master@2017-05-02T00:51:52.225Z/js/decoder.js"]' 21 | # decoders = '["https://arewecompressedyet.com/runs/RyanLei_ParallelDeblockingSubjective2_HL@2017-05-05T18:50:07.454Z/js/decoder.js","https://arewecompressedyet.com/runs/RyanLei_ParallelDeblockingSubjective4_HL@2017-05-10T00:44:26.235Z/js/decoder.js"]' 22 | # decoders = '["https://arewecompressedyet.com/runs/RyanLei_ParallelDeblockingSubjective1_HL@2017-05-05T18:45:01.437Z/js/decoder.js","https://arewecompressedyet.com/runs/RyanLei_ParallelDeblockingSubjective4_HL@2017-05-10T00:44:26.235Z/js/decoder.js"]' 23 | decoders = '["https://arewecompressedyet.com/runs/daala_dist_vartx_off_light_June13@2017-06-14T23:59:22.942Z/js/decoder.js","https://arewecompressedyet.com/runs/ref_vartx_off_light_June13@2017-06-15T00:00:21.112Z/js/decoder.js"]' 24 | # videos = ['subjective-wip/MINECRAFT_420_300f.y4m-50.ivf','subjective-wip/Netflix_Crosswalk_1920x1080_30fps_8bit_420_300f.y4m-55.ivf','subjective-wip/Netflix_TunnelFlag_1920x1080_30fps_8bit_420_300f.y4m-55.ivf','subjective-wip/sintel_trailer_cut1.y4m-50.ivf','subjective-wip/vidyo1_720p_30fps_300f.y4m-55.ivf'] 25 | videos = [ 26 | "subjective-wip/MINECRAFT_420_300f.y4m", 27 | "subjective-wip/Netflix_Crosswalk_1920x1080_30fps_8bit_420_300f.y4m", 28 | "subjective-wip/Netflix_TunnelFlag_1920x1080_30fps_8bit_420_300f.y4m", 29 | "subjective-wip/sintel_trailer_cut1.y4m", 30 | "subjective-wip/vidyo1_720p_30fps_300f.y4m", 31 | ] 32 | # videos = ['subjective-wip/MINECRAFT_420_300f.y4m-50.ivf','subjective-wip/Netflix_Crosswalk_1920x1080_30fps_8bit_420_300f.y4m-50.ivf','subjective-wip/Netflix_TunnelFlag_1920x1080_30fps_8bit_420_300f.y4m-50.ivf','subjective-wip/sintel_trailer_cut1.y4m-50.ivf','subjective-wip/vidyo1_720p_30fps_300f.y4m-50.ivf'] 33 | # decoders = '["https://arewecompressedyet.com/runs/RyanLei_ParallelDeblockingSubjective2_HL@2017-05-05T18:50:07.454Z/js/decoder.js","https://arewecompressedyet.com/runs/RyanLei_ParallelDeblockingSubjective4_HL@2017-05-10T00:44:26.235Z/js/decoder.js"]' 34 | # decoders = '["https://arewecompressedyet.com/runs/RyanLei_ParallelDeblockingSubjective1_HL@2017-05-05T18:45:01.437Z/js/decoder.js","https://arewecompressedyet.com/runs/RyanLei_ParallelDeblockingSubjective4_HL@2017-05-10T00:44:26.235Z/js/decoder.js"]' 35 | 36 | cur = db.execute("SELECT * from votes WHERE decoders = ?;", (decoders,)) 37 | 38 | votes = cur.fetchall() 39 | 40 | votes_per_user = {} 41 | 42 | print("Votes per user") 43 | for vote in votes: 44 | if vote[5] in votes_per_user: 45 | votes_per_user[vote[5]] = votes_per_user[vote[5]] + 1 46 | else: 47 | votes_per_user[vote[5]] = 1 48 | for user in votes_per_user: 49 | print(user, votes_per_user[user]) 50 | 51 | # users who didn't vote on all 5 videos 52 | blacklisted_users = [] 53 | print("Blacklisted users") 54 | for k, v in votes_per_user.items(): 55 | for video in videos: 56 | cur = db.execute( 57 | "SELECT * from votes WHERE decoders = ? AND videos LIKE ? AND voter = ?;", 58 | (decoders, video + "%", k), 59 | ) 60 | if len(cur.fetchall()) < 1 and k not in blacklisted_users: 61 | blacklisted_users.append(k) 62 | for u in blacklisted_users: 63 | print(u) 64 | 65 | 66 | print("Number of votes:", str(len(votes))) 67 | print(decoders) 68 | print( 69 | "%40.40s %5s %5s %5s %5s %.5s" 70 | % ("Video", labels[0], "Tie", labels[1], "Total", "P-value") 71 | ) 72 | 73 | f, (ax1, ax2) = plt.subplots(1, 2, sharey=True) 74 | 75 | total_a = 0 76 | total_b = 0 77 | total_t = 0 78 | 79 | 80 | def compute_p_value(a, b, t): 81 | if a < b: 82 | p_value = scipy.stats.binom_test(a + t / 2, a + b + t) 83 | else: 84 | p_value = scipy.stats.binom_test(b + t / 2, a + b + t) 85 | return p_value 86 | 87 | 88 | def get_non_duplicate_votes(db, video): 89 | cur = db.execute( 90 | "SELECT * from votes WHERE decoders = ? AND videos LIKE ?;", 91 | (decoders, video + "%"), 92 | ) 93 | vote_by_user = {} 94 | for vote in cur.fetchall(): 95 | vote_by_user[vote[5]] = vote[2] 96 | # print(vote_by_user) 97 | # vote_by_user = {k:v for k,v in vote_by_user.items() if votes_per_user[k] >= 5} 98 | vote_by_user = {k: v for k, v in vote_by_user.items() if k not in blacklisted_users} 99 | # print(vote_by_user) 100 | a = len([x for x in vote_by_user if vote_by_user[x] == 0]) 101 | b = len([x for x in vote_by_user if vote_by_user[x] == 1]) 102 | t = len([x for x in vote_by_user if vote_by_user[x] == -1]) 103 | # print(a) 104 | return (a, b, t) 105 | 106 | 107 | plt.title(decoders) 108 | plot_num = 1 109 | for video in videos: 110 | plt.subplot(1, len(videos), plot_num) 111 | plt.ylim(0, 13) 112 | plt.title("\n".join(textwrap.wrap(os.path.basename(video), 20))) 113 | plot_num += 1 114 | (a, b, t) = get_non_duplicate_votes(db, video) 115 | p_value = compute_p_value(a, b, t) 116 | print("%40.40s : %5d %5d %5d %5d %.5f" % (video, a, t, b, a + t + b, p_value)) 117 | y_pos = [0, 1, 2] 118 | plt.xticks([0.5, 1.5, 2.5], [labels[0], "tie", labels[1]]) 119 | bar = plt.bar(y_pos, [a, t, b]) 120 | bar[0].set_facecolor("red") 121 | bar[1].set_facecolor("gray") 122 | bar[2].set_facecolor("green") 123 | total_a += a 124 | total_b += b 125 | total_t += t 126 | fig = plt.gcf() 127 | fig.set_size_inches(18.5, 10.5, forward=True) 128 | plt.show() 129 | 130 | print("Total p value: ", compute_p_value(total_a, total_b, total_t)) 131 | -------------------------------------------------------------------------------- /submit_awcy.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from __future__ import print_function 4 | 5 | import argparse 6 | import os 7 | import subprocess 8 | import sys 9 | from datetime import datetime 10 | 11 | import requests 12 | 13 | 14 | # our timestamping function, accurate to milliseconds 15 | # (remove [:-3] to display microseconds) 16 | def GetTime(): 17 | return datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f")[:-3] 18 | 19 | 20 | if "check_output" not in dir(subprocess): # duck punch it in! 21 | 22 | def f(*popenargs, **kwargs): 23 | if "stdout" in kwargs: 24 | raise ValueError("stdout argument not allowed, it will be overridden.") 25 | process = subprocess.Popen(stdout=subprocess.PIPE, *popenargs, **kwargs) 26 | output, unused_err = process.communicate() 27 | retcode = process.poll() 28 | if retcode: 29 | cmd = kwargs.get("args") 30 | if cmd is None: 31 | cmd = popenargs[0] 32 | raise subprocess.CalledProcessError(retcode, cmd) 33 | return output 34 | 35 | subprocess.check_output = f 36 | 37 | key = None 38 | with open("secret_key", "r") as keyfile: 39 | key = keyfile.read().strip() 40 | 41 | if key is None: 42 | print(GetTime(), "Could not open your secret_key file!") 43 | sys.exit(1) 44 | 45 | parser = argparse.ArgumentParser(description="Submit test to arewecompressedyet.com") 46 | parser.add_argument("-branch", default=None) 47 | parser.add_argument("-prefix", default=None) 48 | parser.add_argument("-master", action="store_true", default=False) 49 | args = parser.parse_args() 50 | 51 | if args.branch is None: 52 | args.branch = subprocess.check_output( 53 | "git symbolic-ref -q --short HEAD", shell=True 54 | ).strip() 55 | 56 | if args.prefix is None: 57 | args.prefix = args.branch 58 | 59 | commit = subprocess.check_output("git rev-parse HEAD", shell=True).strip() 60 | short = subprocess.check_output("git rev-parse --short HEAD", shell=True).strip() 61 | date = subprocess.check_output(["git", "show", "-s", "--format=%ci", commit]).strip() 62 | date_short = date.split()[0] 63 | user = args.prefix 64 | is_master = args.master 65 | 66 | run_id = user + "-" + date_short + "-" + short 67 | 68 | print(GetTime(), "Creating run " + run_id) 69 | r = requests.post( 70 | "https://arewecompressedyet.com/submit/job", 71 | {"run_id": run_id, "commit": commit, "master": is_master, "key": key}, 72 | ) 73 | print(GetTime(), r) 74 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "es5", 4 | "sourceMap": true, 5 | "noImplicitAny": false 6 | }, 7 | "files": [ 8 | "awcy_server.ts", 9 | "generate_list.ts" 10 | ] 11 | } 12 | -------------------------------------------------------------------------------- /update.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | git pull 3 | npm run build-release 4 | -------------------------------------------------------------------------------- /update_analyzer.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | cd ../aomanalyzer 3 | git pull 4 | npm install 5 | npm run build-release 6 | echo Building downloadable analyzers 7 | npm run package-linux 8 | npm run package-darwin 9 | npm run package-win32 10 | cd release_builds 11 | tar -czf AOMAnalyzer-linux-x64.tar.gz AOMAnalyzer-linux-x64/ 12 | tar -czf AOMAnalyzer-darwin-x64.tar.gz AOMAnalyzer-darwin-x64/ 13 | zip -r AOMAnalyzer-win32-x64.zip AOMAnalyzer-win32-x64/ 14 | -------------------------------------------------------------------------------- /www/README.md: -------------------------------------------------------------------------------- 1 | Are We Compressed Yet? 2 | ==== 3 | This repository contains the arewecompressedyet.com website source code. 4 | 5 | Running your own local copy of the website 6 | === 7 | 8 | To run a local copy, you will need to install node.js and webpack on your computer, then run the following commands: 9 | 10 | ``` 11 | npm install 12 | npm install --dev 13 | ``` 14 | 15 | To build AWCY whenever a file changes run: 16 | 17 | ``` 18 | npm run build-watch 19 | ``` 20 | 21 | To build a release build of AWCY, run: 22 | 23 | ``` 24 | npm run build-release 25 | ``` 26 | 27 | To start a dev web server run: 28 | 29 | ``` 30 | webpack-dev-server 31 | ``` 32 | -------------------------------------------------------------------------------- /www/config/webpack.config.js: -------------------------------------------------------------------------------- 1 | var WebpackNotifierPlugin = require('webpack-notifier'); 2 | var webpack = require('webpack'); 3 | var path = require('path'); 4 | 5 | var RELEASE = JSON.parse(process.env.RELEASE || '0'); 6 | 7 | module.exports = { 8 | entry: { 9 | index: "./src/index.tsx" 10 | }, 11 | output: { 12 | path: path.resolve(__dirname, '../dist'), 13 | filename: "[name].bundle.js" 14 | }, 15 | 16 | resolve: { 17 | // Add '.ts' and '.tsx' as resolvable extensions. 18 | extensions: [".webpack.js", ".web.js", ".ts", ".tsx", ".js"] 19 | }, 20 | 21 | module: { 22 | rules: [ 23 | // All files with a '.ts' or '.tsx' extension will be handled by 'ts-loader'. 24 | { test: /\.tsx?$/, loader: "ts-loader" }, 25 | // All output '.js' files will have any sourcemaps re-processed by 'source-map-loader'. 26 | { test: /\.js$/, loader: "source-map-loader" } 27 | ] 28 | 29 | }, 30 | // When importing a module whose path matches one of the following, just 31 | // assume a corresponding global variable exists and use that instead. 32 | // This is important because it allows us to avoid bundling all of our 33 | // dependencies, which allows browsers to cache those libraries between builds. 34 | externals: { 35 | "react": "React", 36 | "react-dom": "ReactDOM" 37 | }, 38 | }; 39 | -------------------------------------------------------------------------------- /www/img/beer.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xiph/awcy/89efab681545dcd8f47665a283bea10bb3ca4af3/www/img/beer.png -------------------------------------------------------------------------------- /www/img/bottle.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xiph/awcy/89efab681545dcd8f47665a283bea10bb3ca4af3/www/img/bottle.png -------------------------------------------------------------------------------- /www/img/mug.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xiph/awcy/89efab681545dcd8f47665a283bea10bb3ca4af3/www/img/mug.png -------------------------------------------------------------------------------- /www/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Are We Compressed Yet? 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 271 | 287 |
AreWeCompressedYet?
288 |
289 | 290 | 291 | 292 | 293 | 294 | 295 | 296 | 297 | 298 | 299 | 300 | 301 | 302 | -------------------------------------------------------------------------------- /www/lib/dragscroll.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @fileoverview dragscroll - scroll area by dragging 3 | * @version 0.0.6 4 | * 5 | * @license MIT, see http://github.com/asvd/intence 6 | * @copyright 2015 asvd 7 | */ 8 | 9 | 10 | (function (root, factory) { 11 | if (typeof define === 'function' && define.amd) { 12 | define(['exports'], factory); 13 | } else if (typeof exports !== 'undefined') { 14 | factory(exports); 15 | } else { 16 | factory((root.dragscroll = {})); 17 | } 18 | }(this, function (exports) { 19 | var _window = window; 20 | var _document = document; 21 | var mousemove = 'mousemove'; 22 | var mouseup = 'mouseup'; 23 | var mousedown = 'mousedown'; 24 | var EventListener = 'EventListener'; 25 | var addEventListener = 'add'+EventListener; 26 | var removeEventListener = 'remove'+EventListener; 27 | 28 | var dragged = []; 29 | var reset = function(i, el) { 30 | for (i = 0; i < dragged.length;) { 31 | el = dragged[i++]; 32 | el = el.container || el; 33 | el[removeEventListener](mousedown, el.md, 0); 34 | _window[removeEventListener](mouseup, el.mu, 0); 35 | _window[removeEventListener](mousemove, el.mm, 0); 36 | } 37 | 38 | // cloning into array since HTMLCollection is updated dynamically 39 | dragged = [].slice.call(_document.getElementsByClassName('dragscroll')); 40 | for (i = 0; i < dragged.length;) { 41 | (function(el, lastClientX, lastClientY, pushed, scroller, cont){ 42 | (cont = el.container || el)[addEventListener]( 43 | mousedown, 44 | cont.md = function(e) { 45 | if (!el.hasAttribute('nochilddrag') || 46 | _document.elementFromPoint( 47 | e.pageX, e.pageY 48 | ) == cont 49 | ) { 50 | pushed = 1; 51 | lastClientX = e.clientX; 52 | lastClientY = e.clientY; 53 | 54 | e.preventDefault(); 55 | } 56 | }, 0 57 | ); 58 | 59 | _window[addEventListener]( 60 | mouseup, cont.mu = function() {pushed = 0;}, 0 61 | ); 62 | 63 | _window[addEventListener]( 64 | mousemove, 65 | cont.mm = function(e) { 66 | if (pushed) { 67 | (scroller = el.scroller||el).scrollLeft -= 68 | (- lastClientX + (lastClientX=e.clientX)); 69 | scroller.scrollTop -= 70 | (- lastClientY + (lastClientY=e.clientY)); 71 | } 72 | }, 0 73 | ); 74 | })(dragged[i++]); 75 | } 76 | } 77 | 78 | 79 | if (_document.readyState == 'complete') { 80 | reset(); 81 | } else { 82 | _window[addEventListener]('load', reset, 0); 83 | } 84 | 85 | exports.reset = reset; 86 | })); 87 | 88 | -------------------------------------------------------------------------------- /www/lib/fonts/glyphicons-halflings-regular.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xiph/awcy/89efab681545dcd8f47665a283bea10bb3ca4af3/www/lib/fonts/glyphicons-halflings-regular.eot -------------------------------------------------------------------------------- /www/lib/fonts/glyphicons-halflings-regular.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xiph/awcy/89efab681545dcd8f47665a283bea10bb3ca4af3/www/lib/fonts/glyphicons-halflings-regular.ttf -------------------------------------------------------------------------------- /www/lib/fonts/glyphicons-halflings-regular.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xiph/awcy/89efab681545dcd8f47665a283bea10bb3ca4af3/www/lib/fonts/glyphicons-halflings-regular.woff -------------------------------------------------------------------------------- /www/lib/fonts/glyphicons-halflings-regular.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xiph/awcy/89efab681545dcd8f47665a283bea10bb3ca4af3/www/lib/fonts/glyphicons-halflings-regular.woff2 -------------------------------------------------------------------------------- /www/lib/fonts/icomoon.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xiph/awcy/89efab681545dcd8f47665a283bea10bb3ca4af3/www/lib/fonts/icomoon.eot -------------------------------------------------------------------------------- /www/lib/fonts/icomoon.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Generated by IcoMoon 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | -------------------------------------------------------------------------------- /www/lib/fonts/icomoon.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xiph/awcy/89efab681545dcd8f47665a283bea10bb3ca4af3/www/lib/fonts/icomoon.ttf -------------------------------------------------------------------------------- /www/lib/fonts/icomoon.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xiph/awcy/89efab681545dcd8f47665a283bea10bb3ca4af3/www/lib/fonts/icomoon.woff -------------------------------------------------------------------------------- /www/lib/mousetrap.min.js: -------------------------------------------------------------------------------- 1 | /* mousetrap v1.6.0 craig.is/killing/mice */ 2 | (function(r,t,g){function u(a,b,h){a.addEventListener?a.addEventListener(b,h,!1):a.attachEvent("on"+b,h)}function y(a){if("keypress"==a.type){var b=String.fromCharCode(a.which);a.shiftKey||(b=b.toLowerCase());return b}return k[a.which]?k[a.which]:p[a.which]?p[a.which]:String.fromCharCode(a.which).toLowerCase()}function D(a){var b=[];a.shiftKey&&b.push("shift");a.altKey&&b.push("alt");a.ctrlKey&&b.push("ctrl");a.metaKey&&b.push("meta");return b}function v(a){return"shift"==a||"ctrl"==a||"alt"==a|| 3 | "meta"==a}function z(a,b){var h,c,e,g=[];h=a;"+"===h?h=["+"]:(h=h.replace(/\+{2}/g,"+plus"),h=h.split("+"));for(e=0;el||k.hasOwnProperty(l)&&(n[k[l]]=l)}e=n[h]?"keydown":"keypress"}"keypress"==e&&g.length&&(e="keydown");return{key:c,modifiers:g,action:e}}function C(a,b){return null===a||a===t?!1:a===b?!0:C(a.parentNode,b)}function c(a){function b(a){a= 4 | a||{};var b=!1,m;for(m in n)a[m]?b=!0:n[m]=0;b||(w=!1)}function h(a,b,m,f,c,h){var g,e,k=[],l=m.type;if(!d._callbacks[a])return[];"keyup"==l&&v(a)&&(b=[a]);for(g=0;g":".","?":"/","|":"\\"},A={option:"alt",command:"meta","return":"enter", 9 | escape:"esc",plus:"+",mod:/Mac|iPod|iPhone|iPad/.test(navigator.platform)?"meta":"ctrl"},n;for(g=1;20>g;++g)k[111+g]="f"+g;for(g=0;9>=g;++g)k[g+96]=g;c.prototype.bind=function(a,b,c){a=a instanceof Array?a:[a];this._bindMultiple.call(this,a,b,c);return this};c.prototype.unbind=function(a,b){return this.bind.call(this,a,function(){},b)};c.prototype.trigger=function(a,b){if(this._directMap[a+":"+b])this._directMap[a+":"+b]({},a);return this};c.prototype.reset=function(){this._callbacks={};this._directMap= 10 | {};return this};c.prototype.stopCallback=function(a,b){return-1<(" "+b.className+" ").indexOf(" mousetrap ")||C(b,this.target)?!1:"INPUT"==b.tagName||"SELECT"==b.tagName||"TEXTAREA"==b.tagName||b.isContentEditable};c.prototype.handleKey=function(){return this._handleKey.apply(this,arguments)};c.addKeycodes=function(a){for(var b in a)a.hasOwnProperty(b)&&(k[b]=a[b]);n=null};c.init=function(){var a=c(t),b;for(b in a)"_"!==b.charAt(0)&&(c[b]=function(b){return function(){return a[b].apply(a,arguments)}}(b))}; 11 | c.init();r.Mousetrap=c;"undefined"!==typeof module&&module.exports&&(module.exports=c);"function"===typeof define&&define.amd&&define(function(){return c})}})("undefined"!==typeof window?window:null,"undefined"!==typeof window?document:null); 12 | -------------------------------------------------------------------------------- /www/lib/react-dom.js: -------------------------------------------------------------------------------- 1 | /** 2 | * ReactDOM v15.3.1 3 | * 4 | * Copyright 2013-present, Facebook, Inc. 5 | * All rights reserved. 6 | * 7 | * This source code is licensed under the BSD-style license found in the 8 | * LICENSE file in the root directory of this source tree. An additional grant 9 | * of patent rights can be found in the PATENTS file in the same directory. 10 | * 11 | */ 12 | // Based off https://github.com/ForbesLindesay/umd/blob/master/template.js 13 | ;(function(f) { 14 | // CommonJS 15 | if (typeof exports === "object" && typeof module !== "undefined") { 16 | module.exports = f(require('react')); 17 | 18 | // RequireJS 19 | } else if (typeof define === "function" && define.amd) { 20 | define(['react'], f); 21 | 22 | //