├── .gitignore ├── .travis.yml ├── .travis ├── checkx.sh ├── docker-push-setup.sh ├── run-docker-linter.sh └── run-linters.sh ├── LICENSE ├── README-compose.md ├── README.md ├── elasticsearch ├── 5.6.14-extra │ ├── .travis │ │ └── build-deploy.sh │ └── Dockerfile ├── 6.5.4-extra │ ├── .travis │ │ └── build-deploy.sh │ └── Dockerfile └── README.md ├── quickstatements ├── README.md └── latest │ ├── .travis │ └── build-deploy.sh │ ├── Dockerfile │ ├── config.json │ ├── entrypoint.sh │ ├── oauth.ini │ └── php.ini ├── setup.sh ├── wdqs-frontend ├── README.md ├── latest │ ├── .travis │ │ └── build-deploy.sh │ ├── Dockerfile │ ├── custom-config.json │ ├── default.conf │ └── entrypoint.sh └── legacy │ ├── .travis │ └── build-deploy.sh │ ├── Dockerfile │ ├── custom-config.json │ ├── default.conf │ └── entrypoint.sh ├── wdqs-proxy ├── README.md └── latest │ ├── .travis │ └── build-deploy.sh │ ├── Dockerfile │ ├── entrypoint.sh │ └── wdqs.template ├── wdqs ├── 0.3.10 │ ├── .travis │ │ └── build-deploy.sh │ ├── Dockerfile │ ├── RWStore.properties │ ├── entrypoint.sh │ ├── mwservices.json │ ├── runBlazegraph.sh │ ├── runUpdate.sh │ ├── wait-for-it.sh │ └── whitelist.txt ├── 0.3.40 │ ├── .travis │ │ └── build-deploy.sh │ ├── Dockerfile │ ├── RWStore.properties │ ├── entrypoint.sh │ ├── mwservices.json │ ├── runBlazegraph.sh │ ├── runUpdate.sh │ ├── wait-for-it.sh │ └── whitelist.txt ├── 0.3.6 │ ├── .travis │ │ └── build-deploy.sh │ ├── Dockerfile │ ├── RWStore.properties │ ├── entrypoint.sh │ ├── mwservices.json │ ├── runBlazegraph.sh │ ├── runUpdate.sh │ ├── wait-for-it.sh │ └── whitelist.txt └── README.md └── wikibase ├── 1.31 ├── .travis │ └── build-deploy.sh ├── base │ ├── Dockerfile │ ├── LocalSettings.php.template │ ├── composer.local.json │ ├── download-extension.sh │ ├── entrypoint.sh │ ├── htaccess │ └── wait-for-it.sh └── bundle │ ├── Dockerfile │ ├── LocalSettings.php.wikibase-bundle.template │ ├── download-extension.sh │ ├── extra-entrypoint-run-first.sh │ ├── extra-install.sh │ └── oauth.ini ├── 1.35 ├── .travis │ └── build-deploy.sh ├── base │ ├── Dockerfile │ ├── LocalSettings.php.template │ ├── composer.local.json │ ├── download-extension.sh │ ├── entrypoint.sh │ ├── htaccess │ └── wait-for-it.sh └── bundle │ ├── Dockerfile │ ├── LocalSettings.php.wikibase-bundle.template │ ├── download-extension.sh │ ├── extra-entrypoint-run-first.sh │ ├── extra-install.sh │ └── oauth.ini └── README.md /.gitignore: -------------------------------------------------------------------------------- 1 | .idea -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | sudo: required 2 | language: minimal 3 | services: 4 | - docker 5 | 6 | env: 7 | - IMAGE_PATH=wikibase/1.35/ 8 | - IMAGE_PATH=wikibase/1.31/ 9 | - IMAGE_PATH=wdqs/0.3.6/ 10 | - IMAGE_PATH=wdqs/0.3.10/ 11 | - IMAGE_PATH=wdqs/0.3.40/ 12 | - IMAGE_PATH=wdqs-frontend/latest/ 13 | - IMAGE_PATH=wdqs-frontend/legacy/ 14 | - IMAGE_PATH=wdqs-proxy/latest/ 15 | - IMAGE_PATH=quickstatements/latest/ 16 | - IMAGE_PATH=elasticsearch/5.6.14-extra/ 17 | - IMAGE_PATH=elasticsearch/6.5.4-extra/ 18 | 19 | script: 20 | - source ./.travis/docker-push-setup.sh 21 | - bash ./$IMAGE_PATH/.travis/build-deploy.sh 22 | 23 | jobs: 24 | include: 25 | - stage: prebuild 26 | script: bash ./.travis/checkx.sh 27 | env: 28 | - SCRIPT=checkx 29 | - IMAGE_PATH=all 30 | - stage: prebuild 31 | script: bash ./.travis/run-linters.sh 32 | env: 33 | - SCRIPT=hadolint 34 | - IMAGE_PATH=all 35 | 36 | stages: 37 | - prebuild 38 | - test 39 | 40 | notifications: 41 | irc: 42 | channels: 43 | - "libera.chat#wikidata-feed" 44 | template: 45 | - "%{repository_slug} (%{branch} - %{commit} %{commit_message} : %{author}): %{message}" 46 | - "Change view : %{compare_url}" 47 | - "Build details : %{build_url}" 48 | -------------------------------------------------------------------------------- /.travis/checkx.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # @author Addshore 3 | # Check all .sh files for the +x bit 4 | 5 | failed=false 6 | 7 | for file in $( find . -type f -name '*.sh' ); do 8 | if [[ -x "$file" ]] 9 | then 10 | echo "File '$file' is executable" 11 | else 12 | echo "File '$file' is not executable" 13 | failed=true 14 | fi 15 | done 16 | 17 | if [[ $failed == true ]] 18 | then 19 | echo "Failed: not all .sh files have +x" 20 | exit 1 21 | fi -------------------------------------------------------------------------------- /.travis/docker-push-setup.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | #https://graysonkoonce.com/getting-the-current-branch-name-during-a-pull-request-in-travis-ci 4 | export BRANCH=$(if [ "$TRAVIS_PULL_REQUEST" == "false" ]; then echo $TRAVIS_BRANCH; else echo $TRAVIS_PULL_REQUEST_BRANCH; fi) 5 | 6 | if [ "$BRANCH" == "master" ]; then 7 | echo "This commit has been merged to master so on success images will be pushed" 8 | export SHOULD_DOCKER_PUSH=true 9 | else 10 | echo "This is branch: $BRANCH so we won't be pushing" 11 | fi 12 | 13 | if [ "$SHOULD_DOCKER_PUSH" = true ]; then 14 | echo "$DOCKER_PASS" | docker login -u "$DOCKER_USER" --password-stdin 15 | fi 16 | -------------------------------------------------------------------------------- /.travis/run-docker-linter.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -eu 3 | 4 | echo "now linting: $1" 5 | docker run --rm -i hadolint/hadolint:v1.3.0 hadolint --ignore DL3006 - < "$1" 6 | echo "-------" 7 | -------------------------------------------------------------------------------- /.travis/run-linters.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | docker pull hadolint/hadolint:v1.3.0 4 | 5 | DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" 6 | 7 | find ./ -name Dockerfile -print0 | xargs -0 -L1 $DIR/run-docker-linter.sh 8 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2017-2021, Wikimedia Deutschland e. V. 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions are met: 8 | 9 | 1. Redistributions of source code must retain the above copyright notice, this 10 | list of conditions and the following disclaimer. 11 | 12 | 2. Redistributions in binary form must reproduce the above copyright notice, 13 | this list of conditions and the following disclaimer in the documentation 14 | and/or other materials provided with the distribution. 15 | 16 | 3. Neither the name of the copyright holder nor the names of its 17 | contributors may be used to endorse or promote products derived from 18 | this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | -------------------------------------------------------------------------------- /README-compose.md: -------------------------------------------------------------------------------- 1 | See https://www.mediawiki.org/wiki/Wikibase/Docker 2 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## LEGACY: wikibase-docker 2 | 3 | **This repository will no longer be maintained by WMDE.** 4 | 5 | Please see the announcement for new WMDE maintained docker images https://lists.wikimedia.org/hyperkitty/list/wikibaseug@lists.wikimedia.org/thread/WW4LZJINT3PIG3DOYKTXIWVP3WAKWXCT/ 6 | Along with docs https://www.mediawiki.org/wiki/Wikibase/Docker 7 | 8 | ----- 9 | 10 | ### Issue tracking 11 | 12 | Please do not file issues against these legacy docker images. 13 | 14 | ### Development 15 | 16 | These images are no longer developed. 17 | 18 | ### Further reading 19 | 20 | - [Docker Wikibase Install Docs](https://www.mediawiki.org/wiki/Wikibase/Docker) 21 | - [Announcement of new WMDE maintained docker images & releases](https://lists.wikimedia.org/hyperkitty/list/wikibaseug@lists.wikimedia.org/thread/WW4LZJINT3PIG3DOYKTXIWVP3WAKWXCT/) 22 | 23 | ### Old Repo Guide 24 | 25 | Each legacy image contained within this repo has its own README with more detailed information: 26 | 27 | LEGACY Image name | Description | README 28 | ------------------------ | ------------- | ---------- 29 | [`wikibase/wikibase`](https://hub.docker.com/r/wikibase/wikibase) | MediaWiki with the Wikibase extension| [README](https://github.com/wmde/wikibase-docker/blob/master/wikibase/README.md) 30 | [`wikibase/wdqs`](https://hub.docker.com/r/wikibase/wdqs) | Blazegraph SPARQL query service backend | [README](https://github.com/wmde/wikibase-docker/blob/master/wdqs/README.md) 31 | [`wikibase/wdqs-proxy`](https://hub.docker.com/r/wikibase/wdqs-proxy) | Proxy to make the query service READONLY and enforce query timeouts | [README](https://github.com/wmde/wikibase-docker/blob/master/wdqs-proxy/README.md) 32 | [`wikibase/wdqs-frontend`](https://hub.docker.com/r/wikibase/wdqs-frontend) | UI for the SPARQL query service | [README](https://github.com/wmde/wikibase-docker/blob/master/wdqs-frontend/README.md) 33 | [`wikibase/quickstatements`](https://hub.docker.com/r/wikibase/quickstatements) | UI to add data to Wikibase | [README](https://github.com/wmde/wikibase-docker/blob/master/quickstatements/README.md) 34 | -------------------------------------------------------------------------------- /elasticsearch/5.6.14-extra/.travis/build-deploy.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | #Oneline for full directory name see: https://stackoverflow.com/questions/59895/getting-the-source-directory-of-a-bash-script-from-within 3 | DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" 4 | set -e 5 | docker build "$DIR/../" -t wikibase/elasticsearch:5.6.14-extra -t wikibase/elasticsearch:latest 6 | 7 | if [ "$SHOULD_DOCKER_PUSH" = true ]; then 8 | docker push wikibase/elasticsearch:5.6.14-extra 9 | fi 10 | -------------------------------------------------------------------------------- /elasticsearch/5.6.14-extra/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM elasticsearch:5.6.14 2 | RUN ./bin/elasticsearch-plugin install org.wikimedia.search:extra:5.6.14 3 | RUN ./bin/elasticsearch-plugin install org.wikimedia.search.highlighter:experimental-highlighter-elasticsearch-plugin:5.6.14 4 | -------------------------------------------------------------------------------- /elasticsearch/6.5.4-extra/.travis/build-deploy.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | #Oneline for full directory name see: https://stackoverflow.com/questions/59895/getting-the-source-directory-of-a-bash-script-from-within 3 | DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" 4 | set -e 5 | docker build "$DIR/../" -t wikibase/elasticsearch:6.5.4-extra -t wikibase/elasticsearch:latest 6 | 7 | if [ "$SHOULD_DOCKER_PUSH" = true ]; then 8 | docker push wikibase/elasticsearch:6.5.4-extra 9 | docker push wikibase/elasticsearch:latest 10 | fi 11 | -------------------------------------------------------------------------------- /elasticsearch/6.5.4-extra/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM elasticsearch:6.5.4 2 | RUN ./bin/elasticsearch-plugin install org.wikimedia.search:extra:6.5.4 3 | RUN ./bin/elasticsearch-plugin install org.wikimedia.search.highlighter:experimental-highlighter-elasticsearch-plugin:6.5.4 4 | -------------------------------------------------------------------------------- /elasticsearch/README.md: -------------------------------------------------------------------------------- 1 | ## wikibase elasticsearch image 2 | 3 | Wikibase needs the extra plugin for elasticsearch from here: 4 | 5 | https://mvnrepository.com/artifact/org.wikimedia.search/extra/6.5.4 6 | 7 | Image name | Parent image 8 | --------------------------------------- | ------------------------ 9 | `wikibase/elasticsearch` : `6.5.4-extra`, `latest` | [elasticsearch:6.5.4](https://hub.docker.com/_/elasticsearch/) 10 | `wikibase/elasticsearch` : `5.6.14-extra` | [elasticsearch:5.6.14](https://hub.docker.com/_/elasticsearch/) 11 | 12 | ### Development 13 | 14 | New versions of this image should be created alongside new versions of elasticsearch that are used in production for Wikidata. 15 | 16 | The versions required for this image can generally be seen under the dependencies of the [CirrusSearch extension](https://www.mediawiki.org/wiki/Extension:CirrusSearch). 17 | 18 | You can find the plugin versions that can be used at https://mvnrepository.com/artifact/org.wikimedia.search/extra-common 19 | 20 | The elasticsearch version and versions of plugins should match for new images. 21 | 22 | The process is generally: 23 | - Create a new directory using a previous one as an example 24 | - Update the Dockerfile to use the newer version of elasticsearch and extensions 25 | - Update the CI build by checking the steps in the main README Development section in this repo. 26 | -------------------------------------------------------------------------------- /quickstatements/README.md: -------------------------------------------------------------------------------- 1 | # Quickstatements docker image 2 | 3 | Quickstatements2 as seen at [https://tools.wmflabs.org/quickstatements/](https://tools.wmflabs.org/quickstatements/) 4 | 5 | ### Tags 6 | Image name | Parent image | Quickstatements version 7 | ------------------------------- | ------------------------ | -------------- 8 | `wikibase/quickstatements` : `latest` | [php:7.2-apache](https://hub.docker.com/_/php/) | master 9 | 10 | ### Environment variables 11 | 12 | Variable | Default | Description 13 | -------------------------- | -------- | ----------- 14 | `WIKIBASE_HOST` | NONE | Host of wikibase instance as seen by QS container 15 | `WB_PUBLIC_HOST_AND_PORT` | NONE | Host and port of wikibase as seen by the user's browser 16 | `QS_PUBLIC_HOST_AND_PORT` | NONE | Host and port of QS as seen by the user's browser 17 | `OAUTH_CONSUMER_KEY` | NONE | OAuth consumer key (obtained from wikibase) 18 | `OAUTH_CONSUMER_SECRET` | NONE | OAuth consumer key (obtained from wikibase) 19 | `PHP_TIMEZONE` | UTC | setting of php.ini date.timezone 20 | 21 | ### Filesystem layout 22 | 23 | Directory | Description 24 | --------------------------------- | ------------------------------------------------------------------------------ 25 | `/var/www/html/quickstatements` | Base quickstatements directory 26 | `/var/www/html/quickstatements/public_html` | The Apache Root folder 27 | `/var/www/html/magnustools` | Base magnustools directory 28 | 29 | File | Description 30 | ------------------------- | ------------------------------------------------------------------------------ 31 | `/templates/config.json` | Template for Quickstatements' config.json (substituted to `/var/www/html/quickstatements/public_html/config.json` at runtime) 32 | `/templates/oauth.ini` | Template for Quickstatements' oauth.ini (substituted to `/var/www/html/quickstatements/oauth.ini` at runtime) 33 | `/templates/php.ini` | php config (default provided sets date.timezone to prevent php complaining substituted to `/usr/local/etc/php/conf.d/php.ini` at runtime) 34 | 35 | 36 | ### How to setup and use 37 | 38 | #### Set up quickstatements 39 | In order for quickstatements to communicate with wikibase it needs to know where your instance is and how it can find it. 40 | This must be done by setting the ENV variable WIKIBASE_HOST. n.b. This should reflect how this container when running 41 | sees the wikibase container. For example the docker container alias like wikibase.svc. 42 | 43 | The user's browser will also be redirected to the Wikibase instance and finally back to quickstatements. The address 44 | the user sees for the Wikibase may be different from how the running container sees it. For example: it may be running 45 | on localhost on a specific port. e.g. http://localhost:8181. This should be passed to the quickstatements container as 46 | WB_PUBLIC_HOST_AND_PORT 47 | 48 | One must also know how this container will be visible to the user as well so it can ask the wikibase to redirect the 49 | user back here. This should be passed as QS_PUBLIC_HOST_AND_PORT 50 | 51 | You can pass the consumer and secret token you got from the wikibase to this container as the environment variables 52 | OAUTH_CONSUMER_KEY and OAUTH_CONSUMER_SECRET. If you don't, docker-compose automatically handles this. 53 | 54 | You can now test it works by navigating to http://\ and logging in using the button top right. 55 | 56 | You should be redirected to the wiki where you can authorize this Quickstatements to act on your behalf 57 | 58 | Finally you should be redirected back to Quickstatements and you should appear logged in. 59 | 60 | Use Quickstatements as normal with the Run button. Currently "Run in background" is not supported by this image. 61 | 62 | #### Troubleshooting 63 | If you see an error such as mw-oauth exception when trying to log in check that you have passed the right consumer token 64 | and secret token to quickstatements. 65 | 66 | If you have changed the value of $wgSecretKey $wgOAuthSecretKey since you made the consumer you'll need to make another new consumer or 67 | reissue the secret token for the old one. 68 | 69 | ### Development 70 | 71 | These images are build from the master branch of the quickstatements and magnustools repos. 72 | 73 | For this reason the images may sometimes break due to upstream changes and need fixing. 74 | 75 | Additional images for new releases do not need to be created. 76 | -------------------------------------------------------------------------------- /quickstatements/latest/.travis/build-deploy.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | #Oneline for full directory name see: https://stackoverflow.com/questions/59895/getting-the-source-directory-of-a-bash-script-from-within 3 | DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" 4 | set -e 5 | docker build "$DIR/../" -t wikibase/quickstatements:latest 6 | 7 | if [ "$SHOULD_DOCKER_PUSH" = true ]; then 8 | docker push wikibase/quickstatements:latest 9 | fi 10 | -------------------------------------------------------------------------------- /quickstatements/latest/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ubuntu:xenial as fetcher 2 | 3 | RUN apt-get update && \ 4 | apt-get install --yes --no-install-recommends git=1:2.* ca-certificates=201* && \ 5 | apt-get clean && rm -rf /var/lib/apt/lists/* 6 | 7 | RUN git clone https://phabricator.wikimedia.org/source/tool-quickstatements.git quickstatements 8 | RUN git clone --depth 1 https://bitbucket.org/magnusmanske/magnustools.git magnustools 9 | 10 | RUN rm -rf quickstatements/.git 11 | RUN rm -rf magnustools/.git 12 | 13 | FROM composer:1 as composer 14 | 15 | COPY --from=fetcher /quickstatements /quickstatements 16 | 17 | WORKDIR /quickstatements 18 | RUN composer install --no-dev 19 | 20 | FROM php:7.2-apache 21 | 22 | # Install envsubst 23 | RUN apt-get update && \ 24 | DEBIAN_FRONTEND=noninteractive apt-get install --yes --no-install-recommends gettext-base=0.19.* jq=1.5* && \ 25 | rm -rf /var/lib/apt/lists/* 26 | 27 | COPY --from=composer /quickstatements /var/www/html/quickstatements 28 | COPY --from=fetcher /magnustools /var/www/html/magnustools 29 | 30 | COPY entrypoint.sh /entrypoint.sh 31 | 32 | COPY config.json /templates/config.json 33 | COPY oauth.ini /templates/oauth.ini 34 | COPY php.ini /templates/php.ini 35 | 36 | ENV APACHE_DOCUMENT_ROOT /var/www/html/quickstatements/public_html 37 | RUN sed -ri -e "s!/var/www/html!${APACHE_DOCUMENT_ROOT}!g" /etc/apache2/sites-available/*.conf 38 | RUN sed -ri -e "s!/var/www/!${APACHE_DOCUMENT_ROOT}!g" /etc/apache2/apache2.conf /etc/apache2/conf-available/*.conf 39 | 40 | ENV MW_SITE_NAME=wikibase-docker\ 41 | MW_SITE_LANG=en\ 42 | PHP_TIMEZONE=UTC 43 | 44 | RUN install -d -owww-data /var/log/quickstatements 45 | 46 | ENTRYPOINT ["/bin/bash"] 47 | CMD ["/entrypoint.sh"] 48 | -------------------------------------------------------------------------------- /quickstatements/latest/config.json: -------------------------------------------------------------------------------- 1 | { 2 | "site" : "${MW_SITE_NAME}" , 3 | "bot_config_file" : "/var/www/html/bot.ini" , 4 | "logfile" : "/var/log/quickstatements/tool.log" , 5 | "sites" : { 6 | "${MW_SITE_NAME}" : { 7 | "oauth" : { 8 | "language":"${MW_SITE_LANG}" , 9 | "project":"${MW_SITE_NAME}" , 10 | "ini_file":"/quickstatements/data/oauth.ini" , 11 | "publicMwOAuthUrl":"${WB_PUBLIC_SCHEME_HOST_AND_PORT}/w/index.php?title=Special:OAuth" , 12 | "mwOAuthUrl":"${WIKIBASE_SCHEME_AND_HOST}/w/index.php?title=Special:OAuth" , 13 | "mwOAuthIW":"mw" 14 | } , 15 | "server" : "${WB_PUBLIC_HOST_AND_PORT}" , 16 | "api" : "${WIKIBASE_SCHEME_AND_HOST}/w/api.php" , 17 | "pageBase" : "${WB_PUBLIC_SCHEME_HOST_AND_PORT}/wiki/" , 18 | "toolBase" : "${QS_PUBLIC_SCHEME_HOST_AND_PORT}/" , 19 | "types" : { 20 | "P" : { "type":"property" , "ns":"${WB_PROPERTY_NAMESPACE}" , "ns_prefix":"${WB_PROPERTY_PREFIX}" } , 21 | "Q" : { "type":"item" , "ns":"${WB_ITEM_NAMESPACE}" , "ns_prefix":"${WB_ITEM_PREFIX}" } 22 | } 23 | } 24 | } 25 | } -------------------------------------------------------------------------------- /quickstatements/latest/entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Test if required environment variables have been set 4 | REQUIRED_VARIABLES=(QS_PUBLIC_SCHEME_HOST_AND_PORT WB_PUBLIC_SCHEME_HOST_AND_PORT WIKIBASE_SCHEME_AND_HOST WB_PROPERTY_NAMESPACE WB_PROPERTY_PREFIX WB_ITEM_NAMESPACE WB_ITEM_PREFIX) 5 | for i in ${REQUIRED_VARIABLES[@]}; do 6 | if ! [[ -v "$i" ]]; then 7 | echo "$i is required but isn't set. You should pass it to docker. See: https://docs.docker.com/engine/reference/commandline/run/#set-environment-variables--e---env---env-file"; 8 | exit 1; 9 | fi 10 | done 11 | 12 | if [[ -v "$OAUTH_CONSUMER_KEY" && "$OAUTH_CONSUMER_SECRET" ]]; then 13 | envsubst < /templates/oauth.ini > /quickstatements/data/oauth.ini; 14 | fi 15 | 16 | envsubst < /templates/config.json > /var/www/html/quickstatements/public_html/config.json 17 | envsubst < /templates/php.ini > /usr/local/etc/php/conf.d/php.ini 18 | 19 | docker-php-entrypoint apache2-foreground 20 | -------------------------------------------------------------------------------- /quickstatements/latest/oauth.ini: -------------------------------------------------------------------------------- 1 | ; HTTP User-Agent header 2 | agent = 'Wikibase Docker QuickStatements' 3 | ; assigned by Special:OAuthConsumerRegistration (request modelled after https://www.wikidata.org/wiki/Special:OAuthListConsumers/view/77b4ae5506dd7dbb0bb07f80e3ae3ca9) 4 | consumerKey = '${OAUTH_CONSUMER_KEY}' 5 | consumerSecret = '${OAUTH_CONSUMER_SECRET}' 6 | -------------------------------------------------------------------------------- /quickstatements/latest/php.ini: -------------------------------------------------------------------------------- 1 | date.timezone = "${PHP_TIMEZONE}" 2 | -------------------------------------------------------------------------------- /setup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # This is an example setup script that can be run 4 | # This should work on a WMF labs debian jessie VM 5 | # (as that is what I wrote it on) 6 | # Enjoy! 7 | 8 | ################################################################ 9 | ################## Install Docker CE ###################### 10 | ################################################################ 11 | 12 | # Update the apt package index: 13 | sudo apt-get update 14 | 15 | # Install packages to allow apt to use a repository over HTTPS: 16 | sudo apt-get install --yes \ 17 | apt-transport-https \ 18 | ca-certificates \ 19 | curl \ 20 | gnupg2 \ 21 | software-properties-common 22 | 23 | # Add Docker’s official GPG key: 24 | curl -fsSL https://download.docker.com/linux/$(. /etc/os-release; echo "$ID")/gpg | sudo apt-key add - 25 | 26 | # Add Docker's stable repository 27 | sudo add-apt-repository \ 28 | "deb [arch=amd64] https://download.docker.com/linux/$(. /etc/os-release; echo "$ID") \ 29 | $(lsb_release -cs) \ 30 | stable" 31 | 32 | # Update the apt package index: 33 | sudo apt-get update 34 | 35 | # Install the latest version of Docker CE 36 | sudo apt-get --yes install docker-ce 37 | 38 | # Run Hellow World 39 | sudo docker run hello-world 40 | 41 | ################################################################ 42 | ############## Install Docker Compose #################### 43 | ################################################################ 44 | 45 | # Install pip 46 | sudo apt-get install --yes \ 47 | python-pip 48 | 49 | # Install docker-composer using pip 50 | sudo pip install docker-compose 51 | 52 | ################################################################ 53 | ############### Clone wikibase-docker #################### 54 | ################################################################ 55 | 56 | # Install git 57 | sudo apt-get install --yes \ 58 | git 59 | 60 | # Clone the repo 61 | git clone https://github.com/wmde/wikibase-docker.git 62 | 63 | # Switch to the wikibase-docker directory 64 | cd wikibase-docker/ 65 | 66 | ################################################################ 67 | ###################### Run It! ########################### 68 | ################################################################ 69 | 70 | # Pull the images from docker hub 71 | sudo docker-compose pull 72 | 73 | # Run the services 74 | sudo docker-compose up --no-build -d 75 | -------------------------------------------------------------------------------- /wdqs-frontend/README.md: -------------------------------------------------------------------------------- 1 | ## wdqs-frontend docker image 2 | 3 | UI for the wikibase query service, as seen @ [https://query.wikidata.org](https://query.wikidata.org) for Wikidata. 4 | 5 | Automated build. 6 | 7 | ### Tags 8 | 9 | Image name | Parent image | WDQS UI Version 10 | ------------------------------- | ------------------------ | -------------- 11 | `wikibase/wdqs-frontend` : `latest` | [nginx:stable-alpine](https://hub.docker.com/_/nginx/) | master (built) 12 | `wikibase/wdqs-frontend` : `legacy` | [nginx:stable-alpine](https://hub.docker.com/_/nginx/) | master (not built) 13 | 14 | 15 | ### Environment variables 16 | 17 | Variable | Default | Description 18 | ------------------| ----------------------------| ---------- 19 | `LANGUAGE` | "en" | Language to use in the UI 20 | `BRAND_TITLE` | "DockerWikibaseQueryService" | Name to display on the UI 21 | `WIKIBASE_HOST` | "wikibase.svc" | Hostname of the Wikibase host 22 | `WDQS_HOST` | "wdqs-proxy.svc" | Hostname of the WDQS host (probably READONLY, hence use of the wdqs-proxy service) 23 | `COPYRIGHT_URL` | "undefined" | URL for the copyright notice 24 | 25 | ### Development 26 | 27 | Both images are built directly from master, so new images for releases are not needed (as there are no releases). 28 | 29 | This does mean that sometimes things will break and the images will need fixing. 30 | -------------------------------------------------------------------------------- /wdqs-frontend/latest/.travis/build-deploy.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | #Oneline for full directory name see: https://stackoverflow.com/questions/59895/getting-the-source-directory-of-a-bash-script-from-within 3 | DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" 4 | set -e 5 | docker build "$DIR/../" -t wikibase/wdqs-frontend:latest 6 | 7 | if [ "$SHOULD_DOCKER_PUSH" = true ]; then 8 | docker push wikibase/wdqs-frontend:latest 9 | fi 10 | -------------------------------------------------------------------------------- /wdqs-frontend/latest/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ubuntu:xenial as fetcher 2 | 3 | RUN apt-get update && \ 4 | apt-get install --yes --no-install-recommends unzip=6.* && \ 5 | apt-get clean && rm -rf /var/lib/apt/lists/* 6 | 7 | ADD https://github.com/wikimedia/wikidata-query-gui/archive/master.zip ./master.zip 8 | 9 | # Creates /wikidata-query-gui-master 10 | RUN unzip master.zip && rm master.zip 11 | 12 | 13 | # TODO this should probably just be a node image? 14 | FROM nginx:stable-alpine as builder 15 | 16 | COPY --from=fetcher /wikidata-query-gui-master /tmp/wikidata-query-gui-master 17 | 18 | WORKDIR /tmp/wikidata-query-gui-master 19 | 20 | # Put wdqs gui in the right place 21 | RUN apk --no-cache add --virtual build-dependencies ca-certificates~=20191127-r2 git~=2.24 nodejs~=12 npm~=12 jq~=1.6 python~=2.7 make~=4.2 g++~=9.3 22 | 23 | # TODO do npm build instead of leaving any dev node modules hanging around 24 | RUN mv package.json package.json.orig \ 25 | && jq 'delpaths([["devDependencies","karma-qunit"],["devDependencies","qunitjs"],["devDependencies","sinon"]])' \ 26 | > package.json < package.json.orig \ 27 | && jq 'setpath(["devDependencies","less"]; "~2.7.1")' \ 28 | > package.json < package.json.orig \ 29 | && npm install \ 30 | && npm run build 31 | 32 | 33 | FROM nginx:stable-alpine as final 34 | 35 | WORKDIR /usr/share/nginx/html 36 | 37 | COPY --from=builder /tmp/wikidata-query-gui-master/build /usr/share/nginx/html 38 | RUN echo "" > style.css 39 | COPY entrypoint.sh /entrypoint.sh 40 | COPY custom-config.json /templates/custom-config.json 41 | COPY default.conf /templates/default.conf 42 | 43 | ENV LANGUAGE=en\ 44 | BRAND_TITLE=DockerWikibaseQueryService\ 45 | COPYRIGHT_URL=undefined 46 | 47 | ENTRYPOINT ["/entrypoint.sh"] 48 | CMD ["nginx", "-g", "daemon off;"] 49 | -------------------------------------------------------------------------------- /wdqs-frontend/latest/custom-config.json: -------------------------------------------------------------------------------- 1 | { 2 | "api": { 3 | "sparql": { 4 | "uri": "/proxy/wdqs/bigdata/namespace/wdq/sparql" 5 | }, 6 | "wikibase": { 7 | "uri": "/proxy/wikibase/w/api.php" 8 | }, 9 | "examples": { 10 | "server": "https://www.wikidata.org/", 11 | "endpoint": "api/rest_v1/page/html/", 12 | "pageTitle": "Wikidata:SPARQL_query_service/queries/examples", 13 | "pagePathElement": "wiki/" 14 | }, 15 | "urlShortener": "tinyurl" 16 | }, 17 | "brand": { 18 | "title": "$BRAND_TITLE", 19 | "logo": "logo.svg", 20 | "favicon": "favicon.ico", 21 | "copyrightUrl": "$COPYRIGHT_URL", 22 | "index": "./index.html" 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /wdqs-frontend/latest/default.conf: -------------------------------------------------------------------------------- 1 | # This file is provided by the wikibase/wdqs-frontend docker image. 2 | server { 3 | listen 80; 4 | server_name localhost; 5 | 6 | location /proxy/wikibase { 7 | rewrite /proxy/wikibase/(.*) /${DOLLAR}1 break; 8 | proxy_pass http://$WIKIBASE_HOST:80; 9 | } 10 | 11 | location /proxy/wdqs { 12 | rewrite /proxy/wdqs/(.*) /${DOLLAR}1 break; 13 | proxy_pass http://$WDQS_HOST:80; 14 | } 15 | 16 | location / { 17 | root /usr/share/nginx/html; 18 | index index.html index.htm; 19 | } 20 | 21 | error_page 500 502 503 504 /50x.html; 22 | location = /50x.html { 23 | root /usr/share/nginx/html; 24 | } 25 | 26 | } -------------------------------------------------------------------------------- /wdqs-frontend/latest/entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # This file is provided by the wikibase/wdqs-frontend docker image. 3 | 4 | # Test if required environment variables have been set 5 | if [ -z "$WIKIBASE_HOST" ]; then 6 | echo "WIKIBASE_HOST is required but isn't set. You should pass it to docker. See: https://docs.docker.com/engine/reference/commandline/run/#set-environment-variables--e---env---env-file"; 7 | exit 1; 8 | fi 9 | 10 | if [ -z "$WDQS_HOST" ]; then 11 | echo "WDQS_HOST is required but isn't set. You should pass it to docker. See: https://docs.docker.com/engine/reference/commandline/run/#set-environment-variables--e---env---env-file"; 12 | exit 1; 13 | fi 14 | 15 | set -eu 16 | 17 | export DOLLAR='$' 18 | envsubst < /templates/custom-config.json > /usr/share/nginx/html/custom-config.json 19 | envsubst < /templates/default.conf > /etc/nginx/conf.d/default.conf 20 | 21 | exec "$@" 22 | -------------------------------------------------------------------------------- /wdqs-frontend/legacy/.travis/build-deploy.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | #Oneline for full directory name see: https://stackoverflow.com/questions/59895/getting-the-source-directory-of-a-bash-script-from-within 3 | DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" 4 | set -e 5 | docker build "$DIR/../" -t wikibase/wdqs-frontend:legacy 6 | 7 | if [ "$SHOULD_DOCKER_PUSH" = true ]; then 8 | docker push wikibase/wdqs-frontend:legacy 9 | fi 10 | -------------------------------------------------------------------------------- /wdqs-frontend/legacy/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ubuntu:xenial as fetcher 2 | 3 | RUN apt-get update && \ 4 | apt-get install --yes --no-install-recommends unzip=6.* && \ 5 | apt-get clean && rm -rf /var/lib/apt/lists/* 6 | 7 | ADD https://github.com/wikimedia/wikidata-query-gui/archive/master.zip ./master.zip 8 | 9 | # Creates /wikidata-query-gui-master 10 | RUN unzip master.zip && rm master.zip 11 | 12 | 13 | # TODO this should probably just be a node image? 14 | FROM nginx:stable-alpine as builder 15 | 16 | COPY --from=fetcher /wikidata-query-gui-master /tmp/wikidata-query-gui-master 17 | 18 | WORKDIR /tmp/wikidata-query-gui-master 19 | 20 | # Put wdqs gui in the right place 21 | RUN apk --no-cache add --virtual build-dependencies ca-certificates~=20191127-r2 git~=2.24 nodejs~=12 npm~=12 jq~=1.6 python~=2.7 make~=4.2 g++~=9.3 22 | 23 | # TODO do npm build instead of leaving any dev node modules hanging around 24 | RUN mv package.json package.json.orig \ 25 | && jq 'delpaths([["devDependencies","karma-qunit"],["devDependencies","qunitjs"],["devDependencies","sinon"]])' \ 26 | > package.json < package.json.orig \ 27 | && jq 'setpath(["devDependencies","less"]; "~2.7.1")' \ 28 | > package.json < package.json.orig \ 29 | && npm install 30 | 31 | 32 | FROM nginx:stable-alpine as final 33 | 34 | WORKDIR /usr/share/nginx/html 35 | 36 | COPY --from=builder /tmp/wikidata-query-gui-master /usr/share/nginx/html 37 | RUN echo "" > style.css 38 | COPY entrypoint.sh /entrypoint.sh 39 | COPY custom-config.json /templates/custom-config.json 40 | COPY default.conf /templates/default.conf 41 | 42 | ENV LANGUAGE=en\ 43 | BRAND_TITLE=DockerWikibaseQueryService\ 44 | COPYRIGHT_URL=undefined 45 | 46 | ENTRYPOINT ["/entrypoint.sh"] 47 | CMD ["nginx", "-g", "daemon off;"] 48 | -------------------------------------------------------------------------------- /wdqs-frontend/legacy/custom-config.json: -------------------------------------------------------------------------------- 1 | { 2 | "api": { 3 | "sparql": { 4 | "uri": "/proxy/wdqs/bigdata/namespace/wdq/sparql" 5 | }, 6 | "wikibase": { 7 | "uri": "/proxy/wikibase/w/api.php" 8 | }, 9 | "examples": { 10 | "server": "https://www.wikidata.org/", 11 | "endpoint": "api/rest_v1/page/html/", 12 | "pageTitle": "Wikidata:SPARQL_query_service/queries/examples", 13 | "pagePathElement": "wiki/" 14 | }, 15 | "urlShortener": "tinyurl" 16 | }, 17 | "brand": { 18 | "title": "$BRAND_TITLE", 19 | "logo": "logo.svg", 20 | "favicon": "favicon.ico", 21 | "copyrightUrl": "$COPYRIGHT_URL", 22 | "index": "./index.html" 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /wdqs-frontend/legacy/default.conf: -------------------------------------------------------------------------------- 1 | # This file is provided by the wikibase/wdqs-frontend docker image. 2 | server { 3 | listen 80; 4 | server_name localhost; 5 | 6 | location /proxy/wikibase { 7 | rewrite /proxy/wikibase/(.*) /${DOLLAR}1 break; 8 | proxy_pass http://$WIKIBASE_HOST:80; 9 | } 10 | 11 | location /proxy/wdqs { 12 | rewrite /proxy/wdqs/(.*) /${DOLLAR}1 break; 13 | proxy_pass http://$WDQS_HOST:80; 14 | } 15 | 16 | location / { 17 | root /usr/share/nginx/html; 18 | index index.html index.htm; 19 | } 20 | 21 | error_page 500 502 503 504 /50x.html; 22 | location = /50x.html { 23 | root /usr/share/nginx/html; 24 | } 25 | 26 | } -------------------------------------------------------------------------------- /wdqs-frontend/legacy/entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # This file is provided by the wikibase/wdqs-frontend docker image. 3 | 4 | # Test if required environment variables have been set 5 | if [ -z "$WIKIBASE_HOST" ]; then 6 | echo "WIKIBASE_HOST is required but isn't set. You should pass it to docker. See: https://docs.docker.com/engine/reference/commandline/run/#set-environment-variables--e---env---env-file"; 7 | exit 1; 8 | fi 9 | 10 | if [ -z "$WDQS_HOST" ]; then 11 | echo "WDQS_HOST is required but isn't set. You should pass it to docker. See: https://docs.docker.com/engine/reference/commandline/run/#set-environment-variables--e---env---env-file"; 12 | exit 1; 13 | fi 14 | 15 | set -eu 16 | 17 | export DOLLAR='$' 18 | envsubst < /templates/custom-config.json > /usr/share/nginx/html/custom-config.json 19 | envsubst < /templates/default.conf > /etc/nginx/conf.d/default.conf 20 | 21 | exec "$@" 22 | -------------------------------------------------------------------------------- /wdqs-proxy/README.md: -------------------------------------------------------------------------------- 1 | ## wdqs-proxy docker image 2 | 3 | Proxy to put infront of the wdqs image enforcing READONLY requests query timeouts. 4 | 5 | In order to change how this image is configured just mount over the wdqs.template file. 6 | 7 | Automated build. 8 | 9 | ## Tags 10 | 11 | Image name | Parent image | WDQS UI Version 12 | ------------------------------- | ------------------------ | -------------- 13 | `wikibase/wdqs-proxy` : `latest` | [nginx:stable-alpine](https://hub.docker.com/_/nginx/) | master 14 | 15 | 16 | ## Environment variables 17 | 18 | NOTE TODO XXX: PROXY_PASS_HOST is currently provided by the nginx image and 19 | we should probably instead use WDQS_HOST and WDQS_PORT and set PROXY_PASS_HOST ourselves. 20 | 21 | Variable | Default | Description 22 | ------------------| ----------------------------| ---------- 23 | `PROXY_PASS_HOST` | "wdqs.svc:9999" | Where to forward the requests to 24 | 25 | 26 | ### Filesystem layout 27 | 28 | File | Description 29 | --------------------------------- | ------------------------------------------------------------------------------ 30 | `/etc/nginx/conf.d/wdqs.template` | Template for the nginx config (substituted to `/etc/nginx/conf.d/default.conf` at runtime) 31 | `/etc/nginx/conf.d/default.conf` | nginx config. To override this you must also use a custom entrypoint to avoid the file being overwritten. 32 | 33 | ### Development 34 | 35 | This image is based directly on the nginx latest image, thus new images are not needed for new releases. 36 | 37 | However if the latest image goes through a major version bump that renders our configuration broken we may need to create a new image. 38 | -------------------------------------------------------------------------------- /wdqs-proxy/latest/.travis/build-deploy.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | #Oneline for full directory name see: https://stackoverflow.com/questions/59895/getting-the-source-directory-of-a-bash-script-from-within 3 | DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" 4 | set -e 5 | docker build "$DIR/../" -t wikibase/wdqs-proxy:latest 6 | 7 | if [ "$SHOULD_DOCKER_PUSH" = true ]; then 8 | docker push wikibase/wdqs-proxy:latest 9 | fi 10 | -------------------------------------------------------------------------------- /wdqs-proxy/latest/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM nginx:stable-alpine 2 | 3 | COPY entrypoint.sh /entrypoint.sh 4 | COPY wdqs.template /etc/nginx/conf.d/wdqs.template 5 | 6 | ENTRYPOINT "/entrypoint.sh" -------------------------------------------------------------------------------- /wdqs-proxy/latest/entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # This file is provided by the wikibase/wdqs-proxy docker image. 3 | 4 | # Test if required environment variables have been set 5 | if [ -z "$PROXY_PASS_HOST" ]; then 6 | echo "PROXY_PASS_HOST is required but isn't set. You should pass it to docker. See: https://docs.docker.com/engine/reference/commandline/run/#set-environment-variables--e---env---env-file"; 7 | exit 1; 8 | fi 9 | 10 | set -eu 11 | 12 | envsubst < /etc/nginx/conf.d/wdqs.template > /etc/nginx/conf.d/default.conf 13 | 14 | nginx -g 'daemon off;' 15 | -------------------------------------------------------------------------------- /wdqs-proxy/latest/wdqs.template: -------------------------------------------------------------------------------- 1 | # This file is provided by the wikibase/wdqs-proxy docker image. 2 | server { 3 | listen 80 default_server; 4 | listen [::]:80 default_server; 5 | 6 | location / { 7 | # use IP address for the backend and not "localhost" to ensure 8 | # connection is done over IPv4 (backend is configured as IPv4 only) 9 | proxy_pass http://${PROXY_PASS_HOST}; 10 | 11 | proxy_set_header X-BIGDATA-MAX-QUERY-MILLIS 60000; 12 | proxy_set_header X-BIGDATA-READ-ONLY "yes"; 13 | 14 | add_header Access-Control-Allow-Origin * always; 15 | #add_header Cache-Control "public, max-age=300"; 16 | add_header Vary Accept; 17 | 18 | client_max_body_size 1m; 19 | client_body_buffer_size 1m; 20 | proxy_intercept_errors off; 21 | proxy_buffering on; 22 | proxy_buffer_size 16k; 23 | proxy_buffers 256 16k; 24 | proxy_busy_buffers_size 256k; 25 | proxy_temp_file_write_size 16k; 26 | proxy_max_temp_file_size 10m; 27 | proxy_read_timeout 300; 28 | 29 | limit_except GET OPTIONS POST { 30 | deny all; 31 | } 32 | 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /wdqs/0.3.10/.travis/build-deploy.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | #Oneline for full directory name see: https://stackoverflow.com/questions/59895/getting-the-source-directory-of-a-bash-script-from-within 3 | DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" 4 | set -e 5 | docker build "$DIR/../" -t wikibase/wdqs:0.3.10 6 | 7 | if [ "$SHOULD_DOCKER_PUSH" = true ]; then 8 | docker push wikibase/wdqs:0.3.10 9 | fi 10 | -------------------------------------------------------------------------------- /wdqs/0.3.10/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ubuntu:xenial as fetcher 2 | 3 | RUN apt-get update && \ 4 | apt-get install --yes --no-install-recommends unzip=6.* && \ 5 | apt-get clean && rm -rf /var/lib/apt/lists/* 6 | 7 | ADD https://archiva.wikimedia.org/repository/snapshots/org/wikidata/query/rdf/service/0.3.10-SNAPSHOT/service-0.3.10-SNAPSHOT-dist.zip ./service-dist.zip 8 | 9 | RUN unzip service-dist.zip && rm service-dist.zip 10 | 11 | 12 | FROM openjdk:8-jdk-alpine 13 | 14 | # Blazegraph scripts require bash 15 | # Install gettext for envsubst command, (it needs libintl package) 16 | # Install curl for the loadData.sh wdqs script (if someone needs it) 17 | RUN set -x ; \ 18 | apk --no-cache add bash=\<4.5.0 gettext=\<0.19.8.2 libintl=\<0.19.8.2 curl=\<7.64.999 su-exec=\~0.2 19 | 20 | RUN addgroup -g 66 -S blazegraph 21 | RUN adduser -S -G blazegraph -u 666 -s /bin/bash blazegraph 22 | 23 | COPY --from=fetcher --chown=blazegraph:blazegraph /service-0.3.10-SNAPSHOT /wdqs 24 | 25 | # Don't set a memory limit otherwise bad things happen (OOMs) 26 | ENV MEMORY=""\ 27 | HEAP_SIZE="1g"\ 28 | HOST="0.0.0.0"\ 29 | WDQS_ENTITY_NAMESPACES="120,122"\ 30 | WIKIBASE_SCHEME="http"\ 31 | WIKIBASE_MAX_DAYS_BACK="90" 32 | 33 | WORKDIR /wdqs 34 | 35 | COPY --chown=blazegraph:blazegraph wait-for-it.sh entrypoint.sh runBlazegraph.sh runUpdate.sh / 36 | COPY --chown=blazegraph:blazegraph mwservices.json /templates/mwservices.json 37 | COPY --chown=blazegraph:blazegraph RWStore.properties whitelist.txt /wdqs/ 38 | 39 | # TODO this shouldn't be needed, but CI currently doesnt check for the +x bit, which is why this line is here 40 | RUN chmod +x /wdqs/runUpdate.sh 41 | 42 | ENTRYPOINT ["/entrypoint.sh"] 43 | -------------------------------------------------------------------------------- /wdqs/0.3.10/RWStore.properties: -------------------------------------------------------------------------------- 1 | # This file is provided by the wikibase/wdqs docker image. 2 | 3 | # Dump data in target. 4 | com.bigdata.journal.AbstractJournal.file=data/data.jnl 5 | com.bigdata.journal.AbstractJournal.bufferMode=DiskRW 6 | com.bigdata.service.AbstractTransactionService.minReleaseAge=1 7 | 8 | com.bigdata.rdf.store.AbstractTripleStore.quads=false 9 | com.bigdata.rdf.store.AbstractTripleStore.statementIdentifiers=false 10 | 11 | # Don't use truth maintenance right yet. 12 | com.bigdata.rdf.sail.truthMaintenance=false 13 | com.bigdata.rdf.store.AbstractTripleStore.textIndex=false 14 | com.bigdata.rdf.store.AbstractTripleStore.axiomsClass=com.bigdata.rdf.axioms.NoAxioms 15 | 16 | # Use our private vocabularies 17 | com.bigdata.rdf.store.AbstractTripleStore.vocabularyClass=org.wikidata.query.rdf.blazegraph.WikibaseVocabulary$V003 18 | com.bigdata.rdf.store.AbstractTripleStore.inlineURIFactory=org.wikidata.query.rdf.blazegraph.WikibaseInlineUriFactory 19 | com.bigdata.rdf.store.AbstractTripleStore.extensionFactoryClass=org.wikidata.query.rdf.blazegraph.WikibaseExtensionFactory 20 | 21 | # Suggested settings from https://phabricator.wikimedia.org/T92308 22 | com.bigdata.btree.writeRetentionQueue.capacity=4000 23 | com.bigdata.btree.BTree.branchingFactor=128 24 | # 200M initial extent. 25 | com.bigdata.journal.AbstractJournal.initialExtent=209715200 26 | com.bigdata.journal.AbstractJournal.maximumExtent=209715200 27 | # Bump up the branching factor for the lexicon indices on the default kb. 28 | com.bigdata.namespace.wdq.lex.com.bigdata.btree.BTree.branchingFactor=400 29 | com.bigdata.namespace.wdq.lex.ID2TERM.com.bigdata.btree.BTree.branchingFactor=800 30 | com.bigdata.namespace.wdq.lex.TERM2ID.com.bigdata.btree.BTree.branchingFactor=128 31 | # Bump up the branching factor for the statement indices on the default kb. 32 | com.bigdata.namespace.wdq.spo.com.bigdata.btree.BTree.branchingFactor=1024 33 | com.bigdata.namespace.wdq.spo.OSP.com.bigdata.btree.BTree.branchingFactor=64 34 | com.bigdata.namespace.wdq.spo.SPO.com.bigdata.btree.BTree.branchingFactor=600 35 | # larger statement buffer capacity for bulk loading. 36 | com.bigdata.rdf.sail.bufferCapacity=100000 37 | # Override the #of write cache buffers to improve bulk load performance. Requires enough native heap! 38 | com.bigdata.journal.AbstractJournal.writeCacheBufferCount=1000 39 | # Enable small slot optimization! 40 | com.bigdata.rwstore.RWStore.smallSlotType=1024 41 | # See https://jira.blazegraph.com/browse/BLZG-1385 - reduce LRU cache timeout 42 | com.bigdata.journal.AbstractJournal.historicalIndexCacheCapacity=20 43 | com.bigdata.journal.AbstractJournal.historicalIndexCacheTimeout=5 44 | # Geospatial ON 45 | com.bigdata.rdf.store.AbstractTripleStore.geoSpatial=true 46 | com.bigdata.rdf.store.AbstractTripleStore.geoSpatialDefaultDatatype=http\://www.opengis.net/ont/geosparql#wktLiteral 47 | com.bigdata.rdf.store.AbstractTripleStore.geoSpatialIncludeBuiltinDatatypes=false 48 | com.bigdata.rdf.store.AbstractTripleStore.geoSpatialDatatypeConfig.0={"config": \ 49 | {"uri":"http://www.opengis.net/ont/geosparql#wktLiteral",\ 50 | "literalSerializer":"org.wikidata.query.rdf.blazegraph.inline.literal.WKTSerializer",\ 51 | "fields":[\ 52 | {"valueType":"DOUBLE","multiplier":"1000000000","serviceMapping":"LONGITUDE"},\ 53 | {"valueType":"DOUBLE","multiplier":"1000000000","serviceMapping":"LATITUDE"},\ 54 | {"valueType":"LONG","multiplier":"1","minValue":"0","serviceMapping":"COORD_SYSTEM"}\ 55 | ]}} 56 | -------------------------------------------------------------------------------- /wdqs/0.3.10/entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # This file is provided by the wikibase/wdqs docker image. 3 | 4 | # Test if required environment variables have been set 5 | REQUIRED_VARIABLES=(WIKIBASE_HOST WDQS_HOST WDQS_PORT) 6 | for i in ${REQUIRED_VARIABLES[@]}; do 7 | eval THISSHOULDBESET=\$$i 8 | if [ -z "$THISSHOULDBESET" ]; then 9 | echo "$i is required but isn't set. You should pass it to docker. See: https://docs.docker.com/engine/reference/commandline/run/#set-environment-variables--e---env---env-file"; 10 | exit 1; 11 | fi 12 | done 13 | 14 | set -eu 15 | 16 | export BLAZEGRAPH_OPTS="-DwikibaseHost=${WIKIBASE_HOST}" 17 | export UPDATER_OPTS="-DwikibaseHost=${WIKIBASE_HOST} -DwikibaseMaxDaysBack=${WIKIBASE_MAX_DAYS_BACK}" 18 | 19 | envsubst < /templates/mwservices.json > /wdqs/mwservices.json 20 | chown 666:66 /wdqs/mwservices.json 21 | 22 | # The data directory should always be owned by the blazegraph user 23 | # This used to be owned by root (https://phabricator.wikimedia.org/T237248) 24 | if [ -d /wdqs/data/ ]; then 25 | chown 666:66 -R /wdqs/data/ 26 | fi 27 | 28 | su-exec 666:66 "$@" 29 | -------------------------------------------------------------------------------- /wdqs/0.3.10/mwservices.json: -------------------------------------------------------------------------------- 1 | { 2 | "services": { 3 | "Generator": { 4 | "@note": "Use generator Query API", 5 | "@docs": "https://www.mediawiki.org/wiki/API:Query#Generators", 6 | "params": { 7 | "action": "query", 8 | "generator": { 9 | "type": "string" 10 | }, 11 | "prop": { 12 | "type": "string", 13 | "default": "info|pageprops" 14 | }, 15 | "ppprop": { 16 | "type": "string", 17 | "default": "wikibase_item" 18 | } 19 | }, 20 | "output": { 21 | "items": "/api/query/pages/page", 22 | "vars": { 23 | "title": "@title", 24 | "item": "pageprops/@wikibase_item", 25 | "pageid": "@pageid", 26 | "lastrevid": "@lastrevid", 27 | "timestamp": "@touched" 28 | } 29 | } 30 | }, 31 | "Categories": { 32 | "params": { 33 | "action": "query", 34 | "prop": "categories", 35 | "titles": { 36 | "type": "list" 37 | }, 38 | "cllimit": { 39 | "type": "int", 40 | "default": "max" 41 | } 42 | }, 43 | "output": { 44 | "items": "//api/query/pages/page/categories/cl", 45 | "vars": { 46 | "category": "@title", 47 | "title": "//api/query/pages/page/@title" 48 | } 49 | } 50 | }, 51 | "Search": { 52 | "@note": "Full-text search in the wiki", 53 | "@docs": "https://www.mediawiki.org/wiki/API:Search", 54 | "params": { 55 | "action": "query", 56 | "list": "search", 57 | "srsearch": { 58 | "type": "string" 59 | }, 60 | "srwhat": { 61 | "type": "string", 62 | "default": "text" 63 | }, 64 | "srlimit": { 65 | "type": "int", 66 | "default": "max" 67 | } 68 | }, 69 | "output": { 70 | "items": "//api/query/search/p", 71 | "vars": { 72 | "title": "@title" 73 | } 74 | } 75 | }, 76 | "EntitySearch": { 77 | "@note": "Entity search for Wikibase", 78 | "@docs": "https://www.mediawiki.org/wiki/Wikibase/API#wbsearchentities", 79 | "params": { 80 | "action": "wbsearchentities", 81 | "search": { 82 | "type": "string" 83 | }, 84 | "language": { 85 | "type": "string" 86 | }, 87 | "type": { 88 | "type": "string", 89 | "default": "item" 90 | }, 91 | "limit": { 92 | "type": "string", 93 | "default": "max" 94 | } 95 | }, 96 | "output": { 97 | "items": "//api/search/entity", 98 | "vars": { 99 | "item": "@id", 100 | "label": "@label" 101 | } 102 | } 103 | } 104 | }, 105 | "endpoints": ["${WIKIBASE_HOST}"] 106 | } 107 | -------------------------------------------------------------------------------- /wdqs/0.3.10/runBlazegraph.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # This file is provided by the wikibase/wdqs docker image. 3 | 4 | cd /wdqs 5 | 6 | ./runBlazegraph.sh -------------------------------------------------------------------------------- /wdqs/0.3.10/runUpdate.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # This file is provided by the wikibase/wdqs docker image. 3 | 4 | cd /wdqs 5 | 6 | # TODO env vars for entity namespaces, scheme and other settings 7 | /wait-for-it.sh $WIKIBASE_HOST:80 -t 300 -- \ 8 | /wait-for-it.sh $WDQS_HOST:$WDQS_PORT -t 300 -- \ 9 | ./runUpdate.sh -h http://$WDQS_HOST:$WDQS_PORT -- --wikibaseUrl $WIKIBASE_SCHEME://$WIKIBASE_HOST --conceptUri $WIKIBASE_SCHEME://$WIKIBASE_HOST --entityNamespaces $WDQS_ENTITY_NAMESPACES 10 | -------------------------------------------------------------------------------- /wdqs/0.3.10/wait-for-it.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # This file is provided by the wikibase/wdqs docker image. 3 | # Use this script to test if a given TCP host/port are available 4 | 5 | cmdname=$(basename $0) 6 | 7 | echoerr() { if [[ $QUIET -ne 1 ]]; then echo "$@" 1>&2; fi } 8 | 9 | usage() 10 | { 11 | cat << USAGE >&2 12 | Usage: 13 | $cmdname host:port [-s] [-t timeout] [-- command args] 14 | -h HOST | --host=HOST Host or IP under test 15 | -p PORT | --port=PORT TCP port under test 16 | Alternatively, you specify the host and port as host:port 17 | -s | --strict Only execute subcommand if the test succeeds 18 | -q | --quiet Don't output any status messages 19 | -t TIMEOUT | --timeout=TIMEOUT 20 | Timeout in seconds, zero for no timeout 21 | -- COMMAND ARGS Execute command with args after the test finishes 22 | USAGE 23 | exit 1 24 | } 25 | 26 | wait_for() 27 | { 28 | if [[ $TIMEOUT -gt 0 ]]; then 29 | echoerr "$cmdname: waiting $TIMEOUT seconds for $HOST:$PORT" 30 | else 31 | echoerr "$cmdname: waiting for $HOST:$PORT without a timeout" 32 | fi 33 | start_ts=$(date +%s) 34 | while : 35 | do 36 | if [[ $ISBUSY -eq 1 ]]; then 37 | nc -z $HOST $PORT 38 | result=$? 39 | else 40 | (echo > /dev/tcp/$HOST/$PORT) >/dev/null 2>&1 41 | result=$? 42 | fi 43 | if [[ $result -eq 0 ]]; then 44 | end_ts=$(date +%s) 45 | echoerr "$cmdname: $HOST:$PORT is available after $((end_ts - start_ts)) seconds" 46 | break 47 | fi 48 | sleep 1 49 | done 50 | return $result 51 | } 52 | 53 | wait_for_wrapper() 54 | { 55 | # In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692 56 | if [[ $QUIET -eq 1 ]]; then 57 | timeout $BUSYTIMEFLAG $TIMEOUT $0 --quiet --child --host=$HOST --port=$PORT --timeout=$TIMEOUT & 58 | else 59 | timeout $BUSYTIMEFLAG $TIMEOUT $0 --child --host=$HOST --port=$PORT --timeout=$TIMEOUT & 60 | fi 61 | PID=$! 62 | trap "kill -INT -$PID" INT 63 | wait $PID 64 | RESULT=$? 65 | if [[ $RESULT -ne 0 ]]; then 66 | echoerr "$cmdname: timeout occurred after waiting $TIMEOUT seconds for $HOST:$PORT" 67 | fi 68 | return $RESULT 69 | } 70 | 71 | # process arguments 72 | while [[ $# -gt 0 ]] 73 | do 74 | case "$1" in 75 | *:* ) 76 | hostport=(${1//:/ }) 77 | HOST=${hostport[0]} 78 | PORT=${hostport[1]} 79 | shift 1 80 | ;; 81 | --child) 82 | CHILD=1 83 | shift 1 84 | ;; 85 | -q | --quiet) 86 | QUIET=1 87 | shift 1 88 | ;; 89 | -s | --strict) 90 | STRICT=1 91 | shift 1 92 | ;; 93 | -h) 94 | HOST="$2" 95 | if [[ $HOST == "" ]]; then break; fi 96 | shift 2 97 | ;; 98 | --host=*) 99 | HOST="${1#*=}" 100 | shift 1 101 | ;; 102 | -p) 103 | PORT="$2" 104 | if [[ $PORT == "" ]]; then break; fi 105 | shift 2 106 | ;; 107 | --port=*) 108 | PORT="${1#*=}" 109 | shift 1 110 | ;; 111 | -t) 112 | TIMEOUT="$2" 113 | if [[ $TIMEOUT == "" ]]; then break; fi 114 | shift 2 115 | ;; 116 | --timeout=*) 117 | TIMEOUT="${1#*=}" 118 | shift 1 119 | ;; 120 | --) 121 | shift 122 | CLI="$@" 123 | break 124 | ;; 125 | --help) 126 | usage 127 | ;; 128 | *) 129 | echoerr "Unknown argument: $1" 130 | usage 131 | ;; 132 | esac 133 | done 134 | 135 | if [[ "$HOST" == "" || "$PORT" == "" ]]; then 136 | echoerr "Error: you need to provide a host and port to test." 137 | usage 138 | fi 139 | 140 | TIMEOUT=${TIMEOUT:-15} 141 | STRICT=${STRICT:-0} 142 | CHILD=${CHILD:-0} 143 | QUIET=${QUIET:-0} 144 | 145 | # check to see if timeout is from busybox? 146 | # check to see if timeout is from busybox? 147 | TIMEOUT_PATH=$(realpath $(which timeout)) 148 | if [[ $TIMEOUT_PATH =~ "busybox" ]]; then 149 | ISBUSY=1 150 | BUSYTIMEFLAG="-t" 151 | else 152 | ISBUSY=0 153 | BUSYTIMEFLAG="" 154 | fi 155 | 156 | if [[ $CHILD -gt 0 ]]; then 157 | wait_for 158 | RESULT=$? 159 | exit $RESULT 160 | else 161 | if [[ $TIMEOUT -gt 0 ]]; then 162 | wait_for_wrapper 163 | RESULT=$? 164 | else 165 | wait_for 166 | RESULT=$? 167 | fi 168 | fi 169 | 170 | if [[ $CLI != "" ]]; then 171 | if [[ $RESULT -ne 0 && $STRICT -eq 1 ]]; then 172 | echoerr "$cmdname: strict mode, refusing to execute subprocess" 173 | exit $RESULT 174 | fi 175 | exec $CLI 176 | else 177 | exit $RESULT 178 | fi -------------------------------------------------------------------------------- /wdqs/0.3.10/whitelist.txt: -------------------------------------------------------------------------------- 1 | https://query.wikidata.org/bigdata/namespace/categories/sparql 2 | https://query.wikidata.org/bigdata/namespace/dcatap/sparql 3 | https://query.wikidata.org/bigdata/namespace/wdq/sparql 4 | https://query.wikidata.org/sparql 5 | http://sparql.europeana.eu/ 6 | http://nomisma.org/query 7 | http://data.cervantesvirtual.com/openrdf-sesame/repositories/data 8 | http://datos.bne.es/sparql 9 | http://edan.si.edu/saam/sparql 10 | http://data.bnf.fr/sparql 11 | http://dbpedia.org/sparql 12 | http://rdf.insee.fr/sparql 13 | http://sparql.wikipathways.org/ 14 | http://dati.camera.it/sparql 15 | http://dati.emilia-romagna.it/sparql 16 | http://data.ordnancesurvey.co.uk/datasets/os-linked-data/apis/sparql 17 | http://statistics.data.gov.uk/sparql 18 | http://opendatacommunities.org/sparql 19 | http://data.plan4all.eu/sparql 20 | http://vocab.getty.edu/sparql.json 21 | http://linkeddata.uriburner.com/sparql 22 | http://tools.wmflabs.org/mw2sparql/sparql 23 | http://linkedgeodata.org/sparql 24 | http://sparql.hegroup.org/sparql/ 25 | http://lov.okfn.org/dataset/lov/sparql 26 | http://opencitations.net/sparql 27 | http://dati.beniculturali.it/sparql 28 | http://zbw.eu/beta/sparql/stw/query 29 | http://zbw.eu/beta/sparql/gnd/query 30 | http://zbw.eu/beta/sparql/econ_pers/query 31 | http://zbw.eu/beta/sparql/econ_corp/query 32 | http://id.nlm.nih.gov/mesh/sparql 33 | http://collection.britishmuseum.org/sparql 34 | http://lod.opentransportdata.swiss/query 35 | http://data.bibliotheken.nl/sparql 36 | https://data.pdok.nl/sparql 37 | http://collection.britishart.yale.edu/openrdf-sesame/repositories/ycba 38 | http://bnb.data.bl.uk/sparql 39 | https://dati.quirinale.it/sparql 40 | http://sparql.uniprot.org/sparql 41 | https://sparql.uniprot.org/sparql 42 | https://ld.stadt-zuerich.ch/query 43 | http://data.nobelprize.org/sparql 44 | http://zbw.eu/beta/sparql/pm20/query 45 | http://libris.kb.se/sparql 46 | https://api.parliament.uk/sparql 47 | https://data.istex.fr/sparql/ 48 | https://data.epo.org/linked-data/query 49 | https://data.niod.nl/PoolParty/sparql/WO2_Thesaurus 50 | https://www.ebi.ac.uk/rdf/services/sparql 51 | https://integbio.jp/rdf/ddbj/sparql 52 | https://integbio.jp/rdf/kero/sparql 53 | https://integbio.jp/rdf/sparql 54 | https://data.idref.fr/sparql 55 | https://sophox.org/sparql 56 | http://sciencesource-query.wmflabs.org/proxy/wdqs/bigdata/namespace/wdq/sparql 57 | http://agrovoc.uniroma2.it:3030/agrovoc/sparql 58 | https://api.druid.datalegend.net/datasets/nlgis/gemeentegeschiedenis/services/gemeentegeschiedenis/sparql 59 | https://data.niod.nl/PoolParty/sparql/WO2_biografieen 60 | http://data.muziekschatten.nl/sparql 61 | https://jpsearch.go.jp/rdf/sparql/ 62 | https://slod.fiz-karlsruhe.de/sparql 63 | http://dbpedia.org/sparql* 64 | https://wdhqs.wmflabs.org/sparql 65 | http://dati.isprambiente.it/sparql 66 | https://linkeddata.cultureelerfgoed.nl/sparql 67 | https://tora.entryscape.net/store/sparql 68 | http://de.dbpedia.org/sparql* 69 | http://lod.openaire.eu/sparql 70 | https://lod.openaire.eu/sparql 71 | -------------------------------------------------------------------------------- /wdqs/0.3.40/.travis/build-deploy.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | #Oneline for full directory name see: https://stackoverflow.com/questions/59895/getting-the-source-directory-of-a-bash-script-from-within 3 | DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" 4 | set -e 5 | docker build "$DIR/../" -t wikibase/wdqs:0.3.40 -t wikibase/wdqs:latest 6 | 7 | if [ "$SHOULD_DOCKER_PUSH" = true ]; then 8 | docker push wikibase/wdqs:0.3.40 9 | docker push wikibase/wdqs:latest 10 | fi 11 | -------------------------------------------------------------------------------- /wdqs/0.3.40/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ubuntu:xenial as fetcher 2 | 3 | RUN apt-get update && \ 4 | apt-get install --yes --no-install-recommends unzip=6.* && \ 5 | apt-get clean && rm -rf /var/lib/apt/lists/* 6 | 7 | ADD https://archiva.wikimedia.org/repository/releases/org/wikidata/query/rdf/service/0.3.40/service-0.3.40-dist.tar.gz ./service-dist.tar.gz 8 | 9 | RUN tar xfv service-dist.tar.gz && rm service-dist.tar.gz 10 | 11 | FROM openjdk:8-jdk-alpine 12 | 13 | # Blazegraph scripts require bash 14 | # Install gettext for envsubst command, (it needs libintl package) 15 | # Install curl for the loadData.sh wdqs script (if someone needs it) 16 | RUN set -x ; \ 17 | apk --no-cache add bash=\<4.5.0 gettext=\<0.19.8.2 libintl=\<0.19.8.2 curl=\<7.64.999 su-exec=\~0.2 18 | 19 | RUN addgroup -g 66 -S blazegraph 20 | RUN adduser -S -G blazegraph -u 666 -s /bin/bash blazegraph 21 | 22 | COPY --from=fetcher --chown=blazegraph:blazegraph /service-0.3.40 /wdqs 23 | 24 | RUN mkdir /var/log/wdqs && chown blazegraph /var/log/wdqs 25 | 26 | # Don't set a memory limit otherwise bad things happen (OOMs) 27 | ENV MEMORY=""\ 28 | HEAP_SIZE="1g"\ 29 | HOST="0.0.0.0"\ 30 | WDQS_ENTITY_NAMESPACES="120,122"\ 31 | WIKIBASE_SCHEME="http"\ 32 | WIKIBASE_MAX_DAYS_BACK="90" 33 | 34 | WORKDIR /wdqs 35 | 36 | COPY --chown=blazegraph:blazegraph wait-for-it.sh entrypoint.sh runBlazegraph.sh runUpdate.sh / 37 | COPY --chown=blazegraph:blazegraph mwservices.json /templates/mwservices.json 38 | COPY --chown=blazegraph:blazegraph RWStore.properties whitelist.txt /wdqs/ 39 | 40 | # TODO this shouldn't be needed, but CI currently doesnt check for the +x bit, which is why this line is here 41 | RUN chmod +x /wdqs/runUpdate.sh 42 | 43 | ENTRYPOINT ["/entrypoint.sh"] 44 | -------------------------------------------------------------------------------- /wdqs/0.3.40/RWStore.properties: -------------------------------------------------------------------------------- 1 | # This file is provided by the wikibase/wdqs docker image. 2 | 3 | # Dump data in target. 4 | com.bigdata.journal.AbstractJournal.file=data/data.jnl 5 | com.bigdata.journal.AbstractJournal.bufferMode=DiskRW 6 | com.bigdata.service.AbstractTransactionService.minReleaseAge=1 7 | # Disable raw records - see https://phabricator.wikimedia.org/T213375 8 | com.bigdata.rdf.store.AbstractTripleStore.enableRawRecordsSupport=false 9 | 10 | com.bigdata.rdf.store.AbstractTripleStore.quads=false 11 | com.bigdata.rdf.store.AbstractTripleStore.statementIdentifiers=false 12 | 13 | # Don't use truth maintenance right yet. 14 | com.bigdata.rdf.sail.truthMaintenance=false 15 | com.bigdata.rdf.store.AbstractTripleStore.textIndex=false 16 | com.bigdata.rdf.store.AbstractTripleStore.axiomsClass=com.bigdata.rdf.axioms.NoAxioms 17 | 18 | # Use our private vocabularies 19 | com.bigdata.rdf.store.AbstractTripleStore.vocabularyClass=org.wikidata.query.rdf.blazegraph.WikibaseVocabulary$V005 20 | # Enable values inlining - see https://phabricator.wikimedia.org/T213375 21 | com.bigdata.rdf.store.AbstractTripleStore.inlineURIFactory=org.wikidata.query.rdf.blazegraph.WikibaseInlineUriFactory$V002 22 | com.bigdata.rdf.store.AbstractTripleStore.extensionFactoryClass=org.wikidata.query.rdf.blazegraph.WikibaseExtensionFactory 23 | 24 | # Suggested settings from https://phabricator.wikimedia.org/T92308 25 | com.bigdata.btree.writeRetentionQueue.capacity=4000 26 | com.bigdata.btree.BTree.branchingFactor=128 27 | # 200M initial extent. 28 | com.bigdata.journal.AbstractJournal.initialExtent=209715200 29 | com.bigdata.journal.AbstractJournal.maximumExtent=209715200 30 | # Bump up the branching factor for the lexicon indices on the default kb. 31 | com.bigdata.namespace.wdq.lex.com.bigdata.btree.BTree.branchingFactor=400 32 | com.bigdata.namespace.wdq.lex.ID2TERM.com.bigdata.btree.BTree.branchingFactor=600 33 | com.bigdata.namespace.wdq.lex.TERM2ID.com.bigdata.btree.BTree.branchingFactor=330 34 | # Bump up the branching factor for the statement indices on the default kb. 35 | com.bigdata.namespace.wdq.spo.com.bigdata.btree.BTree.branchingFactor=1024 36 | com.bigdata.namespace.wdq.spo.OSP.com.bigdata.btree.BTree.branchingFactor=900 37 | com.bigdata.namespace.wdq.spo.SPO.com.bigdata.btree.BTree.branchingFactor=900 38 | # larger statement buffer capacity for bulk loading. 39 | com.bigdata.rdf.sail.bufferCapacity=100000 40 | # Override the #of write cache buffers to improve bulk load performance. Requires enough native heap! 41 | com.bigdata.journal.AbstractJournal.writeCacheBufferCount=1000 42 | # Enable small slot optimization! 43 | com.bigdata.rwstore.RWStore.smallSlotType=1024 44 | # See https://jira.blazegraph.com/browse/BLZG-1385 - reduce LRU cache timeout 45 | com.bigdata.journal.AbstractJournal.historicalIndexCacheCapacity=20 46 | com.bigdata.journal.AbstractJournal.historicalIndexCacheTimeout=5 47 | # Geospatial ON 48 | com.bigdata.rdf.store.AbstractTripleStore.geoSpatial=true 49 | com.bigdata.rdf.store.AbstractTripleStore.geoSpatialDefaultDatatype=http\://www.opengis.net/ont/geosparql#wktLiteral 50 | com.bigdata.rdf.store.AbstractTripleStore.geoSpatialIncludeBuiltinDatatypes=false 51 | com.bigdata.rdf.store.AbstractTripleStore.geoSpatialDatatypeConfig.0={"config": \ 52 | {"uri":"http://www.opengis.net/ont/geosparql#wktLiteral",\ 53 | "literalSerializer":"org.wikidata.query.rdf.blazegraph.inline.literal.WKTSerializer",\ 54 | "fields":[\ 55 | {"valueType":"DOUBLE","multiplier":"1000000000","serviceMapping":"LONGITUDE"},\ 56 | {"valueType":"DOUBLE","multiplier":"1000000000","serviceMapping":"LATITUDE"},\ 57 | {"valueType":"LONG","multiplier":"1","minValue":"0","serviceMapping":"COORD_SYSTEM"}\ 58 | ]}} 59 | -------------------------------------------------------------------------------- /wdqs/0.3.40/entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # This file is provided by the wikibase/wdqs docker image. 3 | 4 | # Test if required environment variables have been set 5 | REQUIRED_VARIABLES=(WIKIBASE_HOST WDQS_HOST WDQS_PORT) 6 | for i in ${REQUIRED_VARIABLES[@]}; do 7 | eval THISSHOULDBESET=\$$i 8 | if [ -z "$THISSHOULDBESET" ]; then 9 | echo "$i is required but isn't set. You should pass it to docker. See: https://docs.docker.com/engine/reference/commandline/run/#set-environment-variables--e---env---env-file"; 10 | exit 1; 11 | fi 12 | done 13 | 14 | set -eu 15 | 16 | export BLAZEGRAPH_OPTS="-DwikibaseHost=${WIKIBASE_HOST}" 17 | export UPDATER_OPTS="-DwikibaseHost=${WIKIBASE_HOST} -DwikibaseMaxDaysBack=${WIKIBASE_MAX_DAYS_BACK}" 18 | 19 | envsubst < /templates/mwservices.json > /wdqs/mwservices.json 20 | chown 666:66 /wdqs/mwservices.json 21 | 22 | # The data directory should always be owned by the blazegraph user 23 | # This used to be owned by root (https://phabricator.wikimedia.org/T237248) 24 | if [ -d /wdqs/data/ ]; then 25 | chown 666:66 -R /wdqs/data/ 26 | fi 27 | 28 | su-exec 666:66 "$@" 29 | -------------------------------------------------------------------------------- /wdqs/0.3.40/mwservices.json: -------------------------------------------------------------------------------- 1 | { 2 | "services": { 3 | "Generator": { 4 | "@note": "Use generator Query API", 5 | "@docs": "https://www.mediawiki.org/wiki/API:Query#Generators", 6 | "params": { 7 | "action": "query", 8 | "generator": { 9 | "type": "string" 10 | }, 11 | "prop": { 12 | "type": "string", 13 | "default": "info|pageprops" 14 | }, 15 | "ppprop": { 16 | "type": "string", 17 | "default": "wikibase_item" 18 | } 19 | }, 20 | "output": { 21 | "items": "/api/query/pages/page", 22 | "vars": { 23 | "title": "@title", 24 | "item": "pageprops/@wikibase_item", 25 | "pageid": "@pageid", 26 | "lastrevid": "@lastrevid", 27 | "timestamp": "@touched" 28 | } 29 | } 30 | }, 31 | "Categories": { 32 | "params": { 33 | "action": "query", 34 | "prop": "categories", 35 | "titles": { 36 | "type": "list" 37 | }, 38 | "cllimit": { 39 | "type": "int", 40 | "default": "max" 41 | } 42 | }, 43 | "output": { 44 | "items": "//api/query/pages/page/categories/cl", 45 | "vars": { 46 | "category": "@title", 47 | "title": "//api/query/pages/page/@title" 48 | } 49 | } 50 | }, 51 | "Search": { 52 | "@note": "Full-text search in the wiki", 53 | "@docs": "https://www.mediawiki.org/wiki/API:Search", 54 | "params": { 55 | "action": "query", 56 | "list": "search", 57 | "srsearch": { 58 | "type": "string" 59 | }, 60 | "srwhat": { 61 | "type": "string", 62 | "default": "text" 63 | }, 64 | "srlimit": { 65 | "type": "int", 66 | "default": "max" 67 | } 68 | }, 69 | "output": { 70 | "items": "//api/query/search/p", 71 | "vars": { 72 | "title": "@title" 73 | } 74 | } 75 | }, 76 | "EntitySearch": { 77 | "@note": "Entity search for Wikibase", 78 | "@docs": "https://www.mediawiki.org/wiki/Wikibase/API#wbsearchentities", 79 | "params": { 80 | "action": "wbsearchentities", 81 | "search": { 82 | "type": "string" 83 | }, 84 | "language": { 85 | "type": "string" 86 | }, 87 | "type": { 88 | "type": "string", 89 | "default": "item" 90 | }, 91 | "limit": { 92 | "type": "string", 93 | "default": "max" 94 | } 95 | }, 96 | "output": { 97 | "items": "//api/search/entity", 98 | "vars": { 99 | "item": "@id", 100 | "label": "@label" 101 | } 102 | } 103 | } 104 | }, 105 | "endpoints": ["${WIKIBASE_HOST}"] 106 | } 107 | -------------------------------------------------------------------------------- /wdqs/0.3.40/runBlazegraph.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # This file is provided by the wikibase/wdqs docker image. 3 | 4 | cd /wdqs 5 | 6 | ./runBlazegraph.sh -------------------------------------------------------------------------------- /wdqs/0.3.40/runUpdate.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # This file is provided by the wikibase/wdqs docker image. 3 | 4 | cd /wdqs 5 | 6 | # TODO env vars for entity namespaces, scheme and other settings 7 | /wait-for-it.sh $WIKIBASE_HOST:80 -t 300 -- \ 8 | /wait-for-it.sh $WDQS_HOST:$WDQS_PORT -t 300 -- \ 9 | ./runUpdate.sh -h http://$WDQS_HOST:$WDQS_PORT -- --wikibaseUrl $WIKIBASE_SCHEME://$WIKIBASE_HOST --conceptUri $WIKIBASE_SCHEME://$WIKIBASE_HOST --entityNamespaces $WDQS_ENTITY_NAMESPACES 10 | -------------------------------------------------------------------------------- /wdqs/0.3.40/wait-for-it.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # This file is provided by the wikibase/wdqs docker image. 3 | # Use this script to test if a given TCP host/port are available 4 | 5 | cmdname=$(basename $0) 6 | 7 | echoerr() { if [[ $QUIET -ne 1 ]]; then echo "$@" 1>&2; fi } 8 | 9 | usage() 10 | { 11 | cat << USAGE >&2 12 | Usage: 13 | $cmdname host:port [-s] [-t timeout] [-- command args] 14 | -h HOST | --host=HOST Host or IP under test 15 | -p PORT | --port=PORT TCP port under test 16 | Alternatively, you specify the host and port as host:port 17 | -s | --strict Only execute subcommand if the test succeeds 18 | -q | --quiet Don't output any status messages 19 | -t TIMEOUT | --timeout=TIMEOUT 20 | Timeout in seconds, zero for no timeout 21 | -- COMMAND ARGS Execute command with args after the test finishes 22 | USAGE 23 | exit 1 24 | } 25 | 26 | wait_for() 27 | { 28 | if [[ $TIMEOUT -gt 0 ]]; then 29 | echoerr "$cmdname: waiting $TIMEOUT seconds for $HOST:$PORT" 30 | else 31 | echoerr "$cmdname: waiting for $HOST:$PORT without a timeout" 32 | fi 33 | start_ts=$(date +%s) 34 | while : 35 | do 36 | if [[ $ISBUSY -eq 1 ]]; then 37 | nc -z $HOST $PORT 38 | result=$? 39 | else 40 | (echo > /dev/tcp/$HOST/$PORT) >/dev/null 2>&1 41 | result=$? 42 | fi 43 | if [[ $result -eq 0 ]]; then 44 | end_ts=$(date +%s) 45 | echoerr "$cmdname: $HOST:$PORT is available after $((end_ts - start_ts)) seconds" 46 | break 47 | fi 48 | sleep 1 49 | done 50 | return $result 51 | } 52 | 53 | wait_for_wrapper() 54 | { 55 | # In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692 56 | if [[ $QUIET -eq 1 ]]; then 57 | timeout $BUSYTIMEFLAG $TIMEOUT $0 --quiet --child --host=$HOST --port=$PORT --timeout=$TIMEOUT & 58 | else 59 | timeout $BUSYTIMEFLAG $TIMEOUT $0 --child --host=$HOST --port=$PORT --timeout=$TIMEOUT & 60 | fi 61 | PID=$! 62 | trap "kill -INT -$PID" INT 63 | wait $PID 64 | RESULT=$? 65 | if [[ $RESULT -ne 0 ]]; then 66 | echoerr "$cmdname: timeout occurred after waiting $TIMEOUT seconds for $HOST:$PORT" 67 | fi 68 | return $RESULT 69 | } 70 | 71 | # process arguments 72 | while [[ $# -gt 0 ]] 73 | do 74 | case "$1" in 75 | *:* ) 76 | hostport=(${1//:/ }) 77 | HOST=${hostport[0]} 78 | PORT=${hostport[1]} 79 | shift 1 80 | ;; 81 | --child) 82 | CHILD=1 83 | shift 1 84 | ;; 85 | -q | --quiet) 86 | QUIET=1 87 | shift 1 88 | ;; 89 | -s | --strict) 90 | STRICT=1 91 | shift 1 92 | ;; 93 | -h) 94 | HOST="$2" 95 | if [[ $HOST == "" ]]; then break; fi 96 | shift 2 97 | ;; 98 | --host=*) 99 | HOST="${1#*=}" 100 | shift 1 101 | ;; 102 | -p) 103 | PORT="$2" 104 | if [[ $PORT == "" ]]; then break; fi 105 | shift 2 106 | ;; 107 | --port=*) 108 | PORT="${1#*=}" 109 | shift 1 110 | ;; 111 | -t) 112 | TIMEOUT="$2" 113 | if [[ $TIMEOUT == "" ]]; then break; fi 114 | shift 2 115 | ;; 116 | --timeout=*) 117 | TIMEOUT="${1#*=}" 118 | shift 1 119 | ;; 120 | --) 121 | shift 122 | CLI="$@" 123 | break 124 | ;; 125 | --help) 126 | usage 127 | ;; 128 | *) 129 | echoerr "Unknown argument: $1" 130 | usage 131 | ;; 132 | esac 133 | done 134 | 135 | if [[ "$HOST" == "" || "$PORT" == "" ]]; then 136 | echoerr "Error: you need to provide a host and port to test." 137 | usage 138 | fi 139 | 140 | TIMEOUT=${TIMEOUT:-15} 141 | STRICT=${STRICT:-0} 142 | CHILD=${CHILD:-0} 143 | QUIET=${QUIET:-0} 144 | 145 | # check to see if timeout is from busybox? 146 | # check to see if timeout is from busybox? 147 | TIMEOUT_PATH=$(realpath $(which timeout)) 148 | if [[ $TIMEOUT_PATH =~ "busybox" ]]; then 149 | ISBUSY=1 150 | BUSYTIMEFLAG="-t" 151 | else 152 | ISBUSY=0 153 | BUSYTIMEFLAG="" 154 | fi 155 | 156 | if [[ $CHILD -gt 0 ]]; then 157 | wait_for 158 | RESULT=$? 159 | exit $RESULT 160 | else 161 | if [[ $TIMEOUT -gt 0 ]]; then 162 | wait_for_wrapper 163 | RESULT=$? 164 | else 165 | wait_for 166 | RESULT=$? 167 | fi 168 | fi 169 | 170 | if [[ $CLI != "" ]]; then 171 | if [[ $RESULT -ne 0 && $STRICT -eq 1 ]]; then 172 | echoerr "$cmdname: strict mode, refusing to execute subprocess" 173 | exit $RESULT 174 | fi 175 | exec $CLI 176 | else 177 | exit $RESULT 178 | fi -------------------------------------------------------------------------------- /wdqs/0.3.40/whitelist.txt: -------------------------------------------------------------------------------- 1 | https://query.wikidata.org/bigdata/namespace/categories/sparql 2 | https://query.wikidata.org/bigdata/namespace/dcatap/sparql 3 | https://query.wikidata.org/bigdata/namespace/wdq/sparql 4 | https://query.wikidata.org/sparql 5 | http://sparql.europeana.eu/ 6 | http://nomisma.org/query 7 | http://data.cervantesvirtual.com/openrdf-sesame/repositories/data 8 | http://datos.bne.es/sparql 9 | http://edan.si.edu/saam/sparql 10 | http://data.bnf.fr/sparql 11 | http://dbpedia.org/sparql 12 | http://rdf.insee.fr/sparql 13 | http://sparql.wikipathways.org/ 14 | http://dati.camera.it/sparql 15 | http://dati.emilia-romagna.it/sparql 16 | http://data.ordnancesurvey.co.uk/datasets/os-linked-data/apis/sparql 17 | http://statistics.data.gov.uk/sparql 18 | http://opendatacommunities.org/sparql 19 | http://data.plan4all.eu/sparql 20 | http://vocab.getty.edu/sparql.json 21 | http://linkeddata.uriburner.com/sparql 22 | http://tools.wmflabs.org/mw2sparql/sparql 23 | http://linkedgeodata.org/sparql 24 | http://sparql.hegroup.org/sparql/ 25 | http://lov.okfn.org/dataset/lov/sparql 26 | http://opencitations.net/sparql 27 | http://dati.beniculturali.it/sparql 28 | http://zbw.eu/beta/sparql/stw/query 29 | http://zbw.eu/beta/sparql/gnd/query 30 | http://zbw.eu/beta/sparql/econ_pers/query 31 | http://zbw.eu/beta/sparql/econ_corp/query 32 | http://id.nlm.nih.gov/mesh/sparql 33 | http://collection.britishmuseum.org/sparql 34 | http://lod.opentransportdata.swiss/query 35 | http://data.bibliotheken.nl/sparql 36 | https://data.pdok.nl/sparql 37 | http://collection.britishart.yale.edu/openrdf-sesame/repositories/ycba 38 | http://bnb.data.bl.uk/sparql 39 | https://dati.quirinale.it/sparql 40 | http://sparql.uniprot.org/sparql 41 | https://sparql.uniprot.org/sparql 42 | https://ld.stadt-zuerich.ch/query 43 | http://data.nobelprize.org/sparql 44 | http://zbw.eu/beta/sparql/pm20/query 45 | http://libris.kb.se/sparql 46 | https://api.parliament.uk/sparql 47 | https://data.istex.fr/sparql/ 48 | https://data.epo.org/linked-data/query 49 | https://data.niod.nl/PoolParty/sparql/WO2_Thesaurus 50 | https://www.ebi.ac.uk/rdf/services/sparql 51 | https://integbio.jp/rdf/ddbj/sparql 52 | https://integbio.jp/rdf/kero/sparql 53 | https://integbio.jp/rdf/sparql 54 | https://data.idref.fr/sparql 55 | https://sophox.org/sparql 56 | http://sciencesource-query.wmflabs.org/proxy/wdqs/bigdata/namespace/wdq/sparql 57 | http://agrovoc.uniroma2.it:3030/agrovoc/sparql 58 | https://api.druid.datalegend.net/datasets/nlgis/gemeentegeschiedenis/services/gemeentegeschiedenis/sparql 59 | https://data.niod.nl/PoolParty/sparql/WO2_biografieen 60 | http://data.muziekschatten.nl/sparql 61 | https://jpsearch.go.jp/rdf/sparql/ 62 | https://slod.fiz-karlsruhe.de/sparql 63 | http://dbpedia.org/sparql* 64 | https://wdhqs.wmflabs.org/sparql 65 | http://dati.isprambiente.it/sparql 66 | https://linkeddata.cultureelerfgoed.nl/sparql 67 | https://tora.entryscape.net/store/sparql 68 | http://de.dbpedia.org/sparql* 69 | http://lod.openaire.eu/sparql 70 | https://lod.openaire.eu/sparql 71 | -------------------------------------------------------------------------------- /wdqs/0.3.6/.travis/build-deploy.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | #Oneline for full directory name see: https://stackoverflow.com/questions/59895/getting-the-source-directory-of-a-bash-script-from-within 3 | DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" 4 | set -e 5 | docker build "$DIR/../" -t wikibase/wdqs:0.3.6 6 | 7 | if [ "$SHOULD_DOCKER_PUSH" = true ]; then 8 | docker push wikibase/wdqs:0.3.6 9 | fi 10 | -------------------------------------------------------------------------------- /wdqs/0.3.6/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ubuntu:xenial as fetcher 2 | 3 | RUN apt-get update && \ 4 | apt-get install --yes --no-install-recommends unzip=6.* && \ 5 | apt-get clean && rm -rf /var/lib/apt/lists/* 6 | 7 | ADD https://archiva.wikimedia.org/repository/snapshots/org/wikidata/query/rdf/service/0.3.6-SNAPSHOT/service-0.3.6-SNAPSHOT-dist.zip ./service-dist.zip 8 | 9 | RUN unzip service-dist.zip && rm service-dist.zip 10 | 11 | 12 | FROM openjdk:8-jdk-alpine 13 | 14 | # Blazegraph scripts require bash 15 | # Install gettext for envsubst command, (it needs libintl package) 16 | # Install curl for the loadData.sh wdqs script (if someone needs it) 17 | RUN set -x ; \ 18 | apk --no-cache add bash=\<4.5.0 gettext=\<0.19.8.2 libintl=\<0.19.8.2 curl=\<7.64.999 su-exec=\~0.2 19 | 20 | RUN addgroup -g 66 -S blazegraph 21 | RUN adduser -S -G blazegraph -u 666 -s /bin/bash blazegraph 22 | 23 | COPY --from=fetcher --chown=blazegraph:blazegraph /service-0.3.6-SNAPSHOT /wdqs 24 | 25 | # Don't set a memory limit otherwise bad things happen (OOMs) 26 | ENV MEMORY=""\ 27 | HEAP_SIZE="1g"\ 28 | HOST="0.0.0.0"\ 29 | WDQS_ENTITY_NAMESPACES="120,122"\ 30 | WIKIBASE_SCHEME="http"\ 31 | WIKIBASE_MAX_DAYS_BACK="90" 32 | 33 | WORKDIR /wdqs 34 | 35 | COPY --chown=blazegraph:blazegraph wait-for-it.sh entrypoint.sh runBlazegraph.sh runUpdate.sh / 36 | COPY --chown=blazegraph:blazegraph mwservices.json /templates/mwservices.json 37 | COPY --chown=blazegraph:blazegraph RWStore.properties whitelist.txt /wdqs/ 38 | 39 | # TODO this shouldn't be needed, but CI currently doesnt check for the +x bit, which is why this line is here 40 | RUN chmod +x /wdqs/runUpdate.sh 41 | 42 | ENTRYPOINT ["/entrypoint.sh"] 43 | -------------------------------------------------------------------------------- /wdqs/0.3.6/RWStore.properties: -------------------------------------------------------------------------------- 1 | # This file is provided by the wikibase/wdqs docker image. 2 | 3 | # Dump data in target. 4 | com.bigdata.journal.AbstractJournal.file=data/data.jnl 5 | com.bigdata.journal.AbstractJournal.bufferMode=DiskRW 6 | com.bigdata.service.AbstractTransactionService.minReleaseAge=1 7 | 8 | com.bigdata.rdf.store.AbstractTripleStore.quads=false 9 | com.bigdata.rdf.store.AbstractTripleStore.statementIdentifiers=false 10 | 11 | # Don't use truth maintenance right yet. 12 | com.bigdata.rdf.sail.truthMaintenance=false 13 | com.bigdata.rdf.store.AbstractTripleStore.textIndex=false 14 | com.bigdata.rdf.store.AbstractTripleStore.axiomsClass=com.bigdata.rdf.axioms.NoAxioms 15 | 16 | # Use our private vocabularies 17 | com.bigdata.rdf.store.AbstractTripleStore.vocabularyClass=org.wikidata.query.rdf.blazegraph.WikibaseVocabulary$V003 18 | com.bigdata.rdf.store.AbstractTripleStore.inlineURIFactory=org.wikidata.query.rdf.blazegraph.WikibaseInlineUriFactory 19 | com.bigdata.rdf.store.AbstractTripleStore.extensionFactoryClass=org.wikidata.query.rdf.blazegraph.WikibaseExtensionFactory 20 | 21 | # Suggested settings from https://phabricator.wikimedia.org/T92308 22 | com.bigdata.btree.writeRetentionQueue.capacity=4000 23 | com.bigdata.btree.BTree.branchingFactor=128 24 | # 200M initial extent. 25 | com.bigdata.journal.AbstractJournal.initialExtent=209715200 26 | com.bigdata.journal.AbstractJournal.maximumExtent=209715200 27 | # Bump up the branching factor for the lexicon indices on the default kb. 28 | com.bigdata.namespace.wdq.lex.com.bigdata.btree.BTree.branchingFactor=400 29 | com.bigdata.namespace.wdq.lex.ID2TERM.com.bigdata.btree.BTree.branchingFactor=800 30 | com.bigdata.namespace.wdq.lex.TERM2ID.com.bigdata.btree.BTree.branchingFactor=128 31 | # Bump up the branching factor for the statement indices on the default kb. 32 | com.bigdata.namespace.wdq.spo.com.bigdata.btree.BTree.branchingFactor=1024 33 | com.bigdata.namespace.wdq.spo.OSP.com.bigdata.btree.BTree.branchingFactor=64 34 | com.bigdata.namespace.wdq.spo.SPO.com.bigdata.btree.BTree.branchingFactor=600 35 | # larger statement buffer capacity for bulk loading. 36 | com.bigdata.rdf.sail.bufferCapacity=100000 37 | # Override the #of write cache buffers to improve bulk load performance. Requires enough native heap! 38 | com.bigdata.journal.AbstractJournal.writeCacheBufferCount=1000 39 | # Enable small slot optimization! 40 | com.bigdata.rwstore.RWStore.smallSlotType=1024 41 | # See https://jira.blazegraph.com/browse/BLZG-1385 - reduce LRU cache timeout 42 | com.bigdata.journal.AbstractJournal.historicalIndexCacheCapacity=20 43 | com.bigdata.journal.AbstractJournal.historicalIndexCacheTimeout=5 44 | # Geospatial ON 45 | com.bigdata.rdf.store.AbstractTripleStore.geoSpatial=true 46 | com.bigdata.rdf.store.AbstractTripleStore.geoSpatialDefaultDatatype=http\://www.opengis.net/ont/geosparql#wktLiteral 47 | com.bigdata.rdf.store.AbstractTripleStore.geoSpatialIncludeBuiltinDatatypes=false 48 | com.bigdata.rdf.store.AbstractTripleStore.geoSpatialDatatypeConfig.0={"config": \ 49 | {"uri":"http://www.opengis.net/ont/geosparql#wktLiteral",\ 50 | "literalSerializer":"org.wikidata.query.rdf.blazegraph.inline.literal.WKTSerializer",\ 51 | "fields":[\ 52 | {"valueType":"DOUBLE","multiplier":"1000000000","serviceMapping":"LONGITUDE"},\ 53 | {"valueType":"DOUBLE","multiplier":"1000000000","serviceMapping":"LATITUDE"},\ 54 | {"valueType":"LONG","multiplier":"1","minValue":"0","serviceMapping":"COORD_SYSTEM"}\ 55 | ]}} 56 | -------------------------------------------------------------------------------- /wdqs/0.3.6/entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # This file is provided by the wikibase/wdqs docker image. 3 | 4 | # Test if required environment variables have been set 5 | REQUIRED_VARIABLES=(WIKIBASE_HOST WDQS_HOST WDQS_PORT) 6 | for i in ${REQUIRED_VARIABLES[@]}; do 7 | eval THISSHOULDBESET=\$$i 8 | if [ -z "$THISSHOULDBESET" ]; then 9 | echo "$i is required but isn't set. You should pass it to docker. See: https://docs.docker.com/engine/reference/commandline/run/#set-environment-variables--e---env---env-file"; 10 | exit 1; 11 | fi 12 | done 13 | 14 | set -eu 15 | 16 | export BLAZEGRAPH_OPTS="-DwikibaseHost=${WIKIBASE_HOST}" 17 | export UPDATER_OPTS="-DwikibaseHost=${WIKIBASE_HOST} -DwikibaseMaxDaysBack=${WIKIBASE_MAX_DAYS_BACK}" 18 | 19 | envsubst < /templates/mwservices.json > /wdqs/mwservices.json 20 | chown 666:66 /wdqs/mwservices.json 21 | 22 | # The data directory should always be owned by the blazegraph user 23 | # This used to be owned by root (https://phabricator.wikimedia.org/T237248) 24 | if [ -d /wdqs/data/ ]; then 25 | chown 666:66 -R /wdqs/data/ 26 | fi 27 | 28 | su-exec 666:66 "$@" 29 | -------------------------------------------------------------------------------- /wdqs/0.3.6/mwservices.json: -------------------------------------------------------------------------------- 1 | { 2 | "services": { 3 | "Generator": { 4 | "@note": "Use generator Query API", 5 | "@docs": "https://www.mediawiki.org/wiki/API:Query#Generators", 6 | "params": { 7 | "action": "query", 8 | "generator": { 9 | "type": "string" 10 | }, 11 | "prop": { 12 | "type": "string", 13 | "default": "info|pageprops" 14 | }, 15 | "ppprop": { 16 | "type": "string", 17 | "default": "wikibase_item" 18 | } 19 | }, 20 | "output": { 21 | "items": "/api/query/pages/page", 22 | "vars": { 23 | "title": "@title", 24 | "item": "pageprops/@wikibase_item", 25 | "pageid": "@pageid", 26 | "lastrevid": "@lastrevid", 27 | "timestamp": "@touched" 28 | } 29 | } 30 | }, 31 | "Categories": { 32 | "params": { 33 | "action": "query", 34 | "prop": "categories", 35 | "titles": { 36 | "type": "list" 37 | }, 38 | "cllimit": { 39 | "type": "int", 40 | "default": "max" 41 | } 42 | }, 43 | "output": { 44 | "items": "//api/query/pages/page/categories/cl", 45 | "vars": { 46 | "category": "@title", 47 | "title": "//api/query/pages/page/@title" 48 | } 49 | } 50 | }, 51 | "Search": { 52 | "@note": "Full-text search in the wiki", 53 | "@docs": "https://www.mediawiki.org/wiki/API:Search", 54 | "params": { 55 | "action": "query", 56 | "list": "search", 57 | "srsearch": { 58 | "type": "string" 59 | }, 60 | "srwhat": { 61 | "type": "string", 62 | "default": "text" 63 | }, 64 | "srlimit": { 65 | "type": "int", 66 | "default": "max" 67 | } 68 | }, 69 | "output": { 70 | "items": "//api/query/search/p", 71 | "vars": { 72 | "title": "@title" 73 | } 74 | } 75 | }, 76 | "EntitySearch": { 77 | "@note": "Entity search for Wikibase", 78 | "@docs": "https://www.mediawiki.org/wiki/Wikibase/API#wbsearchentities", 79 | "params": { 80 | "action": "wbsearchentities", 81 | "search": { 82 | "type": "string" 83 | }, 84 | "language": { 85 | "type": "string" 86 | }, 87 | "type": { 88 | "type": "string", 89 | "default": "item" 90 | }, 91 | "limit": { 92 | "type": "string", 93 | "default": "max" 94 | } 95 | }, 96 | "output": { 97 | "items": "//api/search/entity", 98 | "vars": { 99 | "item": "@id", 100 | "label": "@label" 101 | } 102 | } 103 | } 104 | }, 105 | "endpoints": ["${WIKIBASE_HOST}"] 106 | } 107 | -------------------------------------------------------------------------------- /wdqs/0.3.6/runBlazegraph.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # This file is provided by the wikibase/wdqs docker image. 3 | 4 | cd /wdqs 5 | 6 | ./runBlazegraph.sh -------------------------------------------------------------------------------- /wdqs/0.3.6/runUpdate.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # This file is provided by the wikibase/wdqs docker image. 3 | 4 | cd /wdqs 5 | 6 | # TODO env vars for entity namespaces, scheme and other settings 7 | /wait-for-it.sh $WIKIBASE_HOST:80 -t 300 -- \ 8 | /wait-for-it.sh $WDQS_HOST:$WDQS_PORT -t 300 -- \ 9 | ./runUpdate.sh -h http://$WDQS_HOST:$WDQS_PORT -- --wikibaseUrl $WIKIBASE_SCHEME://$WIKIBASE_HOST --conceptUri $WIKIBASE_SCHEME://$WIKIBASE_HOST --entityNamespaces $WDQS_ENTITY_NAMESPACES 10 | -------------------------------------------------------------------------------- /wdqs/0.3.6/wait-for-it.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # This file is provided by the wikibase/wdqs docker image. 3 | # Use this script to test if a given TCP host/port are available 4 | 5 | cmdname=$(basename $0) 6 | 7 | echoerr() { if [[ $QUIET -ne 1 ]]; then echo "$@" 1>&2; fi } 8 | 9 | usage() 10 | { 11 | cat << USAGE >&2 12 | Usage: 13 | $cmdname host:port [-s] [-t timeout] [-- command args] 14 | -h HOST | --host=HOST Host or IP under test 15 | -p PORT | --port=PORT TCP port under test 16 | Alternatively, you specify the host and port as host:port 17 | -s | --strict Only execute subcommand if the test succeeds 18 | -q | --quiet Don't output any status messages 19 | -t TIMEOUT | --timeout=TIMEOUT 20 | Timeout in seconds, zero for no timeout 21 | -- COMMAND ARGS Execute command with args after the test finishes 22 | USAGE 23 | exit 1 24 | } 25 | 26 | wait_for() 27 | { 28 | if [[ $TIMEOUT -gt 0 ]]; then 29 | echoerr "$cmdname: waiting $TIMEOUT seconds for $HOST:$PORT" 30 | else 31 | echoerr "$cmdname: waiting for $HOST:$PORT without a timeout" 32 | fi 33 | start_ts=$(date +%s) 34 | while : 35 | do 36 | if [[ $ISBUSY -eq 1 ]]; then 37 | nc -z $HOST $PORT 38 | result=$? 39 | else 40 | (echo > /dev/tcp/$HOST/$PORT) >/dev/null 2>&1 41 | result=$? 42 | fi 43 | if [[ $result -eq 0 ]]; then 44 | end_ts=$(date +%s) 45 | echoerr "$cmdname: $HOST:$PORT is available after $((end_ts - start_ts)) seconds" 46 | break 47 | fi 48 | sleep 1 49 | done 50 | return $result 51 | } 52 | 53 | wait_for_wrapper() 54 | { 55 | # In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692 56 | if [[ $QUIET -eq 1 ]]; then 57 | timeout $BUSYTIMEFLAG $TIMEOUT $0 --quiet --child --host=$HOST --port=$PORT --timeout=$TIMEOUT & 58 | else 59 | timeout $BUSYTIMEFLAG $TIMEOUT $0 --child --host=$HOST --port=$PORT --timeout=$TIMEOUT & 60 | fi 61 | PID=$! 62 | trap "kill -INT -$PID" INT 63 | wait $PID 64 | RESULT=$? 65 | if [[ $RESULT -ne 0 ]]; then 66 | echoerr "$cmdname: timeout occurred after waiting $TIMEOUT seconds for $HOST:$PORT" 67 | fi 68 | return $RESULT 69 | } 70 | 71 | # process arguments 72 | while [[ $# -gt 0 ]] 73 | do 74 | case "$1" in 75 | *:* ) 76 | hostport=(${1//:/ }) 77 | HOST=${hostport[0]} 78 | PORT=${hostport[1]} 79 | shift 1 80 | ;; 81 | --child) 82 | CHILD=1 83 | shift 1 84 | ;; 85 | -q | --quiet) 86 | QUIET=1 87 | shift 1 88 | ;; 89 | -s | --strict) 90 | STRICT=1 91 | shift 1 92 | ;; 93 | -h) 94 | HOST="$2" 95 | if [[ $HOST == "" ]]; then break; fi 96 | shift 2 97 | ;; 98 | --host=*) 99 | HOST="${1#*=}" 100 | shift 1 101 | ;; 102 | -p) 103 | PORT="$2" 104 | if [[ $PORT == "" ]]; then break; fi 105 | shift 2 106 | ;; 107 | --port=*) 108 | PORT="${1#*=}" 109 | shift 1 110 | ;; 111 | -t) 112 | TIMEOUT="$2" 113 | if [[ $TIMEOUT == "" ]]; then break; fi 114 | shift 2 115 | ;; 116 | --timeout=*) 117 | TIMEOUT="${1#*=}" 118 | shift 1 119 | ;; 120 | --) 121 | shift 122 | CLI="$@" 123 | break 124 | ;; 125 | --help) 126 | usage 127 | ;; 128 | *) 129 | echoerr "Unknown argument: $1" 130 | usage 131 | ;; 132 | esac 133 | done 134 | 135 | if [[ "$HOST" == "" || "$PORT" == "" ]]; then 136 | echoerr "Error: you need to provide a host and port to test." 137 | usage 138 | fi 139 | 140 | TIMEOUT=${TIMEOUT:-15} 141 | STRICT=${STRICT:-0} 142 | CHILD=${CHILD:-0} 143 | QUIET=${QUIET:-0} 144 | 145 | # check to see if timeout is from busybox? 146 | # check to see if timeout is from busybox? 147 | TIMEOUT_PATH=$(realpath $(which timeout)) 148 | if [[ $TIMEOUT_PATH =~ "busybox" ]]; then 149 | ISBUSY=1 150 | BUSYTIMEFLAG="-t" 151 | else 152 | ISBUSY=0 153 | BUSYTIMEFLAG="" 154 | fi 155 | 156 | if [[ $CHILD -gt 0 ]]; then 157 | wait_for 158 | RESULT=$? 159 | exit $RESULT 160 | else 161 | if [[ $TIMEOUT -gt 0 ]]; then 162 | wait_for_wrapper 163 | RESULT=$? 164 | else 165 | wait_for 166 | RESULT=$? 167 | fi 168 | fi 169 | 170 | if [[ $CLI != "" ]]; then 171 | if [[ $RESULT -ne 0 && $STRICT -eq 1 ]]; then 172 | echoerr "$cmdname: strict mode, refusing to execute subprocess" 173 | exit $RESULT 174 | fi 175 | exec $CLI 176 | else 177 | exit $RESULT 178 | fi -------------------------------------------------------------------------------- /wdqs/0.3.6/whitelist.txt: -------------------------------------------------------------------------------- 1 | https://query.wikidata.org/bigdata/namespace/categories/sparql 2 | https://query.wikidata.org/bigdata/namespace/dcatap/sparql 3 | https://query.wikidata.org/bigdata/namespace/wdq/sparql 4 | https://query.wikidata.org/sparql 5 | http://sparql.europeana.eu/ 6 | http://nomisma.org/query 7 | http://data.cervantesvirtual.com/openrdf-sesame/repositories/data 8 | http://datos.bne.es/sparql 9 | http://edan.si.edu/saam/sparql 10 | http://data.bnf.fr/sparql 11 | http://dbpedia.org/sparql 12 | http://rdf.insee.fr/sparql 13 | http://sparql.wikipathways.org/ 14 | http://dati.camera.it/sparql 15 | http://dati.emilia-romagna.it/sparql 16 | http://data.ordnancesurvey.co.uk/datasets/os-linked-data/apis/sparql 17 | http://statistics.data.gov.uk/sparql 18 | http://opendatacommunities.org/sparql 19 | http://data.plan4all.eu/sparql 20 | http://vocab.getty.edu/sparql.json 21 | http://linkeddata.uriburner.com/sparql 22 | http://tools.wmflabs.org/mw2sparql/sparql 23 | http://linkedgeodata.org/sparql 24 | http://sparql.hegroup.org/sparql/ 25 | http://lov.okfn.org/dataset/lov/sparql 26 | http://opencitations.net/sparql 27 | http://dati.beniculturali.it/sparql 28 | http://zbw.eu/beta/sparql/stw/query 29 | http://zbw.eu/beta/sparql/gnd/query 30 | http://zbw.eu/beta/sparql/econ_pers/query 31 | http://zbw.eu/beta/sparql/econ_corp/query 32 | http://id.nlm.nih.gov/mesh/sparql 33 | http://collection.britishmuseum.org/sparql 34 | http://lod.opentransportdata.swiss/query 35 | http://data.bibliotheken.nl/sparql 36 | https://data.pdok.nl/sparql 37 | http://collection.britishart.yale.edu/openrdf-sesame/repositories/ycba 38 | http://bnb.data.bl.uk/sparql 39 | https://dati.quirinale.it/sparql 40 | http://sparql.uniprot.org/sparql 41 | https://sparql.uniprot.org/sparql 42 | https://ld.stadt-zuerich.ch/query 43 | http://data.nobelprize.org/sparql 44 | http://zbw.eu/beta/sparql/pm20/query 45 | http://libris.kb.se/sparql 46 | https://api.parliament.uk/sparql 47 | https://data.istex.fr/sparql/ 48 | https://data.epo.org/linked-data/query 49 | https://data.niod.nl/PoolParty/sparql/WO2_Thesaurus 50 | https://www.ebi.ac.uk/rdf/services/sparql 51 | https://integbio.jp/rdf/ddbj/sparql 52 | https://integbio.jp/rdf/kero/sparql 53 | https://integbio.jp/rdf/sparql 54 | https://data.idref.fr/sparql 55 | https://sophox.org/sparql 56 | http://sciencesource-query.wmflabs.org/proxy/wdqs/bigdata/namespace/wdq/sparql 57 | http://agrovoc.uniroma2.it:3030/agrovoc/sparql 58 | https://api.druid.datalegend.net/datasets/nlgis/gemeentegeschiedenis/services/gemeentegeschiedenis/sparql 59 | https://data.niod.nl/PoolParty/sparql/WO2_biografieen 60 | http://data.muziekschatten.nl/sparql 61 | https://jpsearch.go.jp/rdf/sparql/ 62 | https://slod.fiz-karlsruhe.de/sparql 63 | http://dbpedia.org/sparql* 64 | https://wdhqs.wmflabs.org/sparql 65 | http://dati.isprambiente.it/sparql 66 | https://linkeddata.cultureelerfgoed.nl/sparql 67 | https://tora.entryscape.net/store/sparql 68 | http://de.dbpedia.org/sparql* 69 | http://lod.openaire.eu/sparql 70 | https://lod.openaire.eu/sparql 71 | -------------------------------------------------------------------------------- /wdqs/README.md: -------------------------------------------------------------------------------- 1 | ## wdqs docker image 2 | 3 | Wikibase specific blazegraph image. 4 | 5 | Automated build. 6 | 7 | ### Tags 8 | 9 | Image name | Parent image | WDQS Version 10 | --------------------------------------- | ------------------------ | -------------- 11 | `wikibase/wdqs` : `0.3.40`, `latest` | [openjdk:8-jdk-alpine](https://hub.docker.com/_/openjdk/) | 0.3.40 12 | `wikibase/wdqs` : `0.3.10` | [openjdk:8-jdk-alpine](https://hub.docker.com/_/openjdk/) | 0.3.10 13 | `wikibase/wdqs` : `0.3.6` | [openjdk:8-jdk-alpine](https://hub.docker.com/_/openjdk/) | 0.3.6 14 | 15 | ### Upgrading 16 | 17 | When upgrading between wdqs versions the data stored in `/wdqs/data` may not be compatible with the newer version. 18 | When testing the new image if no data appears to be loaded into the query service you will need to reload the data. 19 | 20 | If all changes are still in RecentChanges then simply removing `/wdqs/data` and restarting the service should reload all data. 21 | 22 | If you can not use RecentChanges then you will need to reload from an RDF dump: 23 | - make an RDF dump from your Wikibase instance: https://github.com/wikimedia/mediawiki-extensions-Wikibase/blob/master/repo/maintenance/dumpRdf.php 24 | - Load an RDF dump into the query service: https://github.com/wikimedia/wikidata-query-rdf/blob/master/docs/getting-started.md#load-the-dump 25 | 26 | ### Environment variables 27 | 28 | Variable | Default | Since | Description 29 | -------------------------| ------------------| --------| ---------- 30 | `MEMORY` | "" | 0.2.5 | Memory limit for blazegraph 31 | `HEAP_SIZE` | "1g" | 0.2.5 | Heap size for blazegraph 32 | `WIKIBASE_HOST` | "wikibase.svc" | 0.2.5 | Hostname of the Wikibase host 33 | `WIKIBASE_SCHEME` | "http" | 0.2.5 | Scheme of the Wikibase host 34 | `WDQS_HOST` | "wdqs.svc" | 0.2.5 | Hostname of the WDQS host (this service) 35 | `WDQS_PORT` | "9999" | 0.2.5 | Port of the WDQS host (this service) 36 | `WDQS_ENTITY_NAMESPACES` | "120,122" | 0.2.5 | Wikibase Namespaces to load data from 37 | `WIKIBASE_MAX_DAYS_BACK` | "90" | 0.3.0 | Max days updater is allowed back from now 38 | 39 | 40 | ### Filesystem layout 41 | 42 | File | Description 43 | --------------------------------- | ------------------------------------------------------------------------------ 44 | `/wdqs/whitelist.txt` | Whitelist file for other SPARQL endpoints 45 | `/wdqs/RWStore.properties` | Properties for the service 46 | `/templates/mwservices.json` | Template for MediaWiki services (substituted to `/wdqs/mwservices.json` at runtime) 47 | 48 | ### Troubleshooting 49 | 50 | * The query service is not running or seems to get killed by the OS? 51 | * The image requires more than 2GB of available RAM to start. While being developed the dev machine had 4GB of RAM. 52 | 53 | ### Development 54 | 55 | New versions of this image should be created alongside new versions of wdqs that are used in production for Wikidata. 56 | 57 | New releases that are available for images to be created can be found on archiva: https://archiva.wikimedia.org/repository/releases/org/wikidata/query/rdf/service/ 58 | 59 | When creating a new image RWStore.properties will need to be updated to match the properties used in production at the time the snapshot was being used. 60 | 61 | For this reason it is easier to only create new releases for wdqs versions currently being used in Wikidata production. 62 | 63 | When creating a new release the WMF Search platform team should be contacted for help syncing the wdqs version and RWStore.properties file. 64 | 65 | The process is generally: 66 | - Create a new directory using a previous one as an example 67 | - Update the service snapshot that is being fetched 68 | - Update the CI build by checking the steps in the main README Development section in this repo. 69 | -------------------------------------------------------------------------------- /wikibase/1.31/.travis/build-deploy.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | #Oneline for full directory name see: https://stackoverflow.com/questions/59895/getting-the-source-directory-of-a-bash-script-from-within 3 | DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" 4 | set -e 5 | docker build "$DIR/../base" -t wikibase/wikibase:1.31 -t wikibase/wikibase:1.31-base 6 | docker build "$DIR/../bundle" -t wikibase/wikibase:1.31-bundle 7 | 8 | if [ "$SHOULD_DOCKER_PUSH" = true ]; then 9 | docker push wikibase/wikibase:1.31 10 | docker push wikibase/wikibase:1.31-base 11 | docker push wikibase/wikibase:1.31-bundle 12 | fi 13 | -------------------------------------------------------------------------------- /wikibase/1.31/base/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ubuntu:xenial as fetcher 2 | 3 | COPY download-extension.sh . 4 | RUN apt-get update && \ 5 | apt-get install --yes --no-install-recommends unzip=6.* jq=1.* curl=7.* ca-certificates=201* && \ 6 | apt-get clean && rm -rf /var/lib/apt/lists/* 7 | 8 | RUN bash download-extension.sh Wikibase;\ 9 | tar xzf Wikibase.tar.gz; 10 | 11 | 12 | FROM mediawiki:1.31 as collector 13 | COPY --from=fetcher /Wikibase /var/www/html/extensions/Wikibase 14 | 15 | 16 | FROM composer@sha256:d374b2e1f715621e9d9929575d6b35b11cf4a6dc237d4a08f2e6d1611f534675 as composer 17 | COPY --from=collector /var/www/html /var/www/html 18 | WORKDIR /var/www/html/ 19 | COPY composer.local.json /var/www/html/composer.local.json 20 | RUN composer install --no-dev 21 | 22 | 23 | FROM mediawiki:1.31 24 | 25 | RUN apt-get update && \ 26 | DEBIAN_FRONTEND=noninteractive apt-get install --yes --no-install-recommends libbz2-dev=1.* gettext-base=0.19.* && \ 27 | rm -rf /var/lib/apt/lists/* 28 | 29 | RUN a2enmod rewrite 30 | 31 | RUN install -d /var/log/mediawiki -o www-data 32 | 33 | RUN docker-php-ext-install calendar bz2 34 | 35 | COPY --from=composer /var/www/html /var/www/html 36 | COPY wait-for-it.sh /wait-for-it.sh 37 | COPY entrypoint.sh /entrypoint.sh 38 | COPY LocalSettings.php.template /LocalSettings.php.template 39 | COPY htaccess /var/www/html/.htaccess 40 | 41 | RUN ln -s /var/www/html/ /var/www/html/w 42 | 43 | ENV MW_SITE_NAME=wikibase-docker\ 44 | MW_SITE_LANG=en 45 | 46 | ENTRYPOINT ["/bin/bash"] 47 | CMD ["/entrypoint.sh"] 48 | -------------------------------------------------------------------------------- /wikibase/1.31/base/LocalSettings.php.template: -------------------------------------------------------------------------------- 1 | '/var/log/mediawiki/resourceloader.log', 22 | 'exception' => '/var/log/mediawiki/exception.log', 23 | 'error' => '/var/log/mediawiki/error.log', 24 | ); 25 | 26 | ## Site Settings 27 | # TODO pass in the rest of this with env vars? 28 | ${DOLLAR}wgShellLocale = "en_US.utf8"; 29 | ${DOLLAR}wgLanguageCode = "${MW_SITE_LANG}"; 30 | ${DOLLAR}wgSitename = "${MW_SITE_NAME}"; 31 | ${DOLLAR}wgMetaNamespace = "Project"; 32 | # Configured web paths & short URLs 33 | # This allows use of the /wiki/* path 34 | ## https://www.mediawiki.org/wiki/Manual:Short_URL 35 | ${DOLLAR}wgScriptPath = "/w"; // this should already have been configured this way 36 | ${DOLLAR}wgArticlePath = "/wiki/${DOLLAR}1"; 37 | 38 | #Set Secret 39 | ${DOLLAR}wgSecretKey = "${MW_WG_SECRET_KEY}"; 40 | 41 | ## RC Age 42 | # https://www.mediawiki.org/wiki/Manual:$wgRCMaxAge 43 | # Items in the recentchanges table are periodically purged; entries older than this many seconds will go. 44 | # The query service (by default) loads data from recent changes 45 | # Set this to 1 year to avoid any changes being removed from the RC table over a shorter period of time. 46 | ${DOLLAR}wgRCMaxAge = 365 * 24 * 3600; 47 | 48 | wfLoadSkin( 'Vector' ); 49 | 50 | ## Wikibase 51 | # Load Wikibase repo, client & lib with the example / default settings. 52 | require_once "${DOLLAR}IP/extensions/Wikibase/lib/WikibaseLib.php"; 53 | require_once "${DOLLAR}IP/extensions/Wikibase/repo/Wikibase.php"; 54 | require_once "${DOLLAR}IP/extensions/Wikibase/repo/ExampleSettings.php"; 55 | require_once "${DOLLAR}IP/extensions/Wikibase/client/WikibaseClient.php"; 56 | require_once "${DOLLAR}IP/extensions/Wikibase/client/ExampleSettings.php"; 57 | -------------------------------------------------------------------------------- /wikibase/1.31/base/composer.local.json: -------------------------------------------------------------------------------- 1 | { 2 | "extra": { 3 | "merge-plugin": { 4 | "include": [ 5 | "extensions/*/composer.json", 6 | "skins/*/composer.json" 7 | ] 8 | } 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /wikibase/1.31/base/download-extension.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | EXTENSION=$1 3 | 4 | TAR_URL=$(curl -s "https://www.mediawiki.org/w/api.php?action=query&list=extdistbranches&edbexts=$EXTENSION&formatversion=2&format=json" | jq -r ".query.extdistbranches.extensions.$EXTENSION.REL1_31") 5 | curl -s "$TAR_URL" -o "$EXTENSION".tar.gz 6 | -------------------------------------------------------------------------------- /wikibase/1.31/base/entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # This file is provided by the wikibase/wikibase docker image. 3 | 4 | # Test if required environment variables have been set 5 | REQUIRED_VARIABLES=(MW_ADMIN_NAME MW_ADMIN_PASS MW_ADMIN_EMAIL MW_WG_SECRET_KEY DB_SERVER DB_USER DB_PASS DB_NAME) 6 | for i in ${REQUIRED_VARIABLES[@]}; do 7 | eval THISSHOULDBESET=\$$i 8 | if [ -z "$THISSHOULDBESET" ]; then 9 | echo "$i is required but isn't set. You should pass it to docker. See: https://docs.docker.com/engine/reference/commandline/run/#set-environment-variables--e---env---env-file"; 10 | exit 1; 11 | fi 12 | done 13 | 14 | set -eu 15 | 16 | # Wait for the db to come up 17 | /wait-for-it.sh $DB_SERVER -t 300 18 | # Sometimes it appears to come up and then go back down meaning MW install fails 19 | # So wait for a second and double check! 20 | sleep 1 21 | /wait-for-it.sh $DB_SERVER -t 300 22 | 23 | # Run extra scripts everytime 24 | if [ -f /extra-entrypoint-run-first.sh ]; then 25 | source /extra-entrypoint-run-first.sh 26 | fi 27 | 28 | # Do the mediawiki install (only if LocalSettings doesn't already exist) 29 | if [ ! -e "/var/www/html/LocalSettings.php" ]; then 30 | php /var/www/html/maintenance/install.php --dbuser "$DB_USER" --dbpass "$DB_PASS" --dbname "$DB_NAME" --dbserver "$DB_SERVER" --lang "$MW_SITE_LANG" --pass "$MW_ADMIN_PASS" "$MW_SITE_NAME" "$MW_ADMIN_NAME" 31 | php /var/www/html/maintenance/resetUserEmail.php --no-reset-password "$MW_ADMIN_NAME" "$MW_ADMIN_EMAIL" 32 | 33 | # Copy our LocalSettings into place after install from the template 34 | # https://stackoverflow.com/a/24964089/4746236 35 | export DOLLAR='$' 36 | envsubst < /LocalSettings.php.template > /var/www/html/LocalSettings.php 37 | 38 | # Run update.php to install Wikibase 39 | php /var/www/html/maintenance/update.php --quick 40 | 41 | # Run extrascripts on first run 42 | if [ -f /extra-install.sh ]; then 43 | source /extra-install.sh 44 | fi 45 | fi 46 | 47 | # Run the actual entry point 48 | docker-php-entrypoint apache2-foreground 49 | -------------------------------------------------------------------------------- /wikibase/1.31/base/htaccess: -------------------------------------------------------------------------------- 1 | # This file is provided by the wikibase/wikibase docker image. 2 | ## http://www.mediawiki.org/wiki/Manual:Short_URL/Apache 3 | 4 | # Enable the rewrite engine 5 | RewriteEngine On 6 | 7 | # Short url for wiki pages 8 | RewriteRule ^/?wiki(/.*)?$ %{DOCUMENT_ROOT}/w/index.php [L] 9 | 10 | # Redirect / to Main Page 11 | RewriteRule ^/*$ %{DOCUMENT_ROOT}/w/index.php [L] 12 | 13 | # rewrite /entity/ URLs like wikidata per 14 | # https://meta.wikimedia.org/wiki/Wikidata/Notes/URI_scheme 15 | RewriteRule ^/?entity/(.*)$ /wiki/Special:EntityData/$1 [R=303,QSA] 16 | -------------------------------------------------------------------------------- /wikibase/1.31/base/wait-for-it.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # This file is provided by the wikibase/wikibase docker image. 3 | # Use this script to test if a given TCP host/port are available 4 | 5 | cmdname=$(basename $0) 6 | 7 | echoerr() { if [[ $QUIET -ne 1 ]]; then echo "$@" 1>&2; fi } 8 | 9 | usage() 10 | { 11 | cat << USAGE >&2 12 | Usage: 13 | $cmdname host:port [-s] [-t timeout] [-- command args] 14 | -h HOST | --host=HOST Host or IP under test 15 | -p PORT | --port=PORT TCP port under test 16 | Alternatively, you specify the host and port as host:port 17 | -s | --strict Only execute subcommand if the test succeeds 18 | -q | --quiet Don't output any status messages 19 | -t TIMEOUT | --timeout=TIMEOUT 20 | Timeout in seconds, zero for no timeout 21 | -- COMMAND ARGS Execute command with args after the test finishes 22 | USAGE 23 | exit 1 24 | } 25 | 26 | wait_for() 27 | { 28 | if [[ $TIMEOUT -gt 0 ]]; then 29 | echoerr "$cmdname: waiting $TIMEOUT seconds for $HOST:$PORT" 30 | else 31 | echoerr "$cmdname: waiting for $HOST:$PORT without a timeout" 32 | fi 33 | start_ts=$(date +%s) 34 | while : 35 | do 36 | if [[ $ISBUSY -eq 1 ]]; then 37 | nc -z $HOST $PORT 38 | result=$? 39 | else 40 | (echo > /dev/tcp/$HOST/$PORT) >/dev/null 2>&1 41 | result=$? 42 | fi 43 | if [[ $result -eq 0 ]]; then 44 | end_ts=$(date +%s) 45 | echoerr "$cmdname: $HOST:$PORT is available after $((end_ts - start_ts)) seconds" 46 | break 47 | fi 48 | sleep 1 49 | done 50 | return $result 51 | } 52 | 53 | wait_for_wrapper() 54 | { 55 | # In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692 56 | if [[ $QUIET -eq 1 ]]; then 57 | timeout $BUSYTIMEFLAG $TIMEOUT $0 --quiet --child --host=$HOST --port=$PORT --timeout=$TIMEOUT & 58 | else 59 | timeout $BUSYTIMEFLAG $TIMEOUT $0 --child --host=$HOST --port=$PORT --timeout=$TIMEOUT & 60 | fi 61 | PID=$! 62 | trap "kill -INT -$PID" INT 63 | wait $PID 64 | RESULT=$? 65 | if [[ $RESULT -ne 0 ]]; then 66 | echoerr "$cmdname: timeout occurred after waiting $TIMEOUT seconds for $HOST:$PORT" 67 | fi 68 | return $RESULT 69 | } 70 | 71 | # process arguments 72 | while [[ $# -gt 0 ]] 73 | do 74 | case "$1" in 75 | *:* ) 76 | hostport=(${1//:/ }) 77 | HOST=${hostport[0]} 78 | PORT=${hostport[1]} 79 | shift 1 80 | ;; 81 | --child) 82 | CHILD=1 83 | shift 1 84 | ;; 85 | -q | --quiet) 86 | QUIET=1 87 | shift 1 88 | ;; 89 | -s | --strict) 90 | STRICT=1 91 | shift 1 92 | ;; 93 | -h) 94 | HOST="$2" 95 | if [[ $HOST == "" ]]; then break; fi 96 | shift 2 97 | ;; 98 | --host=*) 99 | HOST="${1#*=}" 100 | shift 1 101 | ;; 102 | -p) 103 | PORT="$2" 104 | if [[ $PORT == "" ]]; then break; fi 105 | shift 2 106 | ;; 107 | --port=*) 108 | PORT="${1#*=}" 109 | shift 1 110 | ;; 111 | -t) 112 | TIMEOUT="$2" 113 | if [[ $TIMEOUT == "" ]]; then break; fi 114 | shift 2 115 | ;; 116 | --timeout=*) 117 | TIMEOUT="${1#*=}" 118 | shift 1 119 | ;; 120 | --) 121 | shift 122 | CLI="$@" 123 | break 124 | ;; 125 | --help) 126 | usage 127 | ;; 128 | *) 129 | echoerr "Unknown argument: $1" 130 | usage 131 | ;; 132 | esac 133 | done 134 | 135 | if [[ "$HOST" == "" || "$PORT" == "" ]]; then 136 | echoerr "Error: you need to provide a host and port to test." 137 | usage 138 | fi 139 | 140 | TIMEOUT=${TIMEOUT:-15} 141 | STRICT=${STRICT:-0} 142 | CHILD=${CHILD:-0} 143 | QUIET=${QUIET:-0} 144 | 145 | # check to see if timeout is from busybox? 146 | # check to see if timeout is from busybox? 147 | TIMEOUT_PATH=$(realpath $(which timeout)) 148 | if [[ $TIMEOUT_PATH =~ "busybox" ]]; then 149 | ISBUSY=1 150 | BUSYTIMEFLAG="-t" 151 | else 152 | ISBUSY=0 153 | BUSYTIMEFLAG="" 154 | fi 155 | 156 | if [[ $CHILD -gt 0 ]]; then 157 | wait_for 158 | RESULT=$? 159 | exit $RESULT 160 | else 161 | if [[ $TIMEOUT -gt 0 ]]; then 162 | wait_for_wrapper 163 | RESULT=$? 164 | else 165 | wait_for 166 | RESULT=$? 167 | fi 168 | fi 169 | 170 | if [[ $CLI != "" ]]; then 171 | if [[ $RESULT -ne 0 && $STRICT -eq 1 ]]; then 172 | echoerr "$cmdname: strict mode, refusing to execute subprocess" 173 | exit $RESULT 174 | fi 175 | exec $CLI 176 | else 177 | exit $RESULT 178 | fi -------------------------------------------------------------------------------- /wikibase/1.31/bundle/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ubuntu:xenial as fetcher 2 | 3 | RUN apt-get update && \ 4 | apt-get install --yes --no-install-recommends unzip=6.* jq=1.* curl=7.* ca-certificates=201* && \ 5 | apt-get clean && rm -rf /var/lib/apt/lists/* 6 | 7 | COPY download-extension.sh . 8 | ADD https://github.com/wikidata/WikibaseImport/archive/master.tar.gz /WikibaseImport.tar.gz 9 | RUN bash download-extension.sh OAuth;\ 10 | bash download-extension.sh Elastica;\ 11 | bash download-extension.sh CirrusSearch;\ 12 | bash download-extension.sh UniversalLanguageSelector;\ 13 | bash download-extension.sh cldr;\ 14 | tar xzf OAuth.tar.gz;\ 15 | tar xzf Elastica.tar.gz;\ 16 | tar xzf CirrusSearch.tar.gz;\ 17 | tar xzf UniversalLanguageSelector.tar.gz;\ 18 | tar xzf cldr.tar.gz;\ 19 | tar xzf WikibaseImport.tar.gz;\ 20 | rm ./*.tar.gz 21 | 22 | FROM wikibase/wikibase:1.31 as collector 23 | COPY --from=fetcher /WikibaseImport-master /var/www/html/extensions/WikibaseImport 24 | COPY --from=fetcher /Elastica /var/www/html/extensions/Elastica 25 | COPY --from=fetcher /OAuth /var/www/html/extensions/OAuth 26 | COPY --from=fetcher /CirrusSearch /var/www/html/extensions/CirrusSearch 27 | COPY --from=fetcher /UniversalLanguageSelector /var/www/html/extensions/UniversalLanguageSelector 28 | COPY --from=fetcher /cldr /var/www/html/extensions/cldr 29 | 30 | FROM composer@sha256:d374b2e1f715621e9d9929575d6b35b11cf4a6dc237d4a08f2e6d1611f534675 as composer 31 | COPY --from=collector /var/www/html /var/www/html 32 | WORKDIR /var/www/html/ 33 | RUN rm /var/www/html/composer.lock 34 | RUN composer install --no-dev 35 | 36 | FROM wikibase/wikibase:1.31 37 | 38 | RUN apt-get update && \ 39 | apt-get install --yes --no-install-recommends jq=1.* && \ 40 | apt-get clean && rm -rf /var/lib/apt/lists/* 41 | COPY --from=composer /var/www/html /var/www/html 42 | COPY LocalSettings.php.wikibase-bundle.template /LocalSettings.php.wikibase-bundle.template 43 | COPY extra-install.sh / 44 | COPY extra-entrypoint-run-first.sh / 45 | RUN cat /LocalSettings.php.wikibase-bundle.template >> /LocalSettings.php.template && rm /LocalSettings.php.wikibase-bundle.template 46 | COPY oauth.ini /templates/oauth.ini 47 | -------------------------------------------------------------------------------- /wikibase/1.31/bundle/LocalSettings.php.wikibase-bundle.template: -------------------------------------------------------------------------------- 1 | # OAuth 2 | wfLoadExtension( 'OAuth' ); 3 | ${DOLLAR}wgGroupPermissions['sysop']['mwoauthproposeconsumer'] = true; 4 | ${DOLLAR}wgGroupPermissions['sysop']['mwoauthmanageconsumer'] = true; 5 | ${DOLLAR}wgGroupPermissions['sysop']['mwoauthviewprivate'] = true; 6 | ${DOLLAR}wgGroupPermissions['sysop']['mwoauthupdateownconsumer'] = true; 7 | 8 | # WikibaseImport 9 | require_once "${DOLLAR}IP/extensions/WikibaseImport/WikibaseImport.php"; 10 | 11 | # CirrusSearch 12 | wfLoadExtension( 'Elastica' ); 13 | require_once "${DOLLAR}IP/extensions/CirrusSearch/CirrusSearch.php"; 14 | ${DOLLAR}wgCirrusSearchServers = [ '${MW_ELASTIC_HOST}' ]; 15 | ${DOLLAR}wgSearchType = 'CirrusSearch'; 16 | ${DOLLAR}wgCirrusSearchExtraIndexSettings['index.mapping.total_fields.limit'] = 5000; 17 | 18 | # UniversalLanguageSelector 19 | wfLoadExtension( 'UniversalLanguageSelector' ); 20 | 21 | # cldr 22 | wfLoadExtension( 'cldr' ); 23 | -------------------------------------------------------------------------------- /wikibase/1.31/bundle/download-extension.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | EXTENSION=$1 3 | 4 | TAR_URL=$(curl -s "https://www.mediawiki.org/w/api.php?action=query&list=extdistbranches&edbexts=$EXTENSION&formatversion=2&format=json" | jq -r ".query.extdistbranches.extensions.$EXTENSION.REL1_31") 5 | curl -s "$TAR_URL" -o "$EXTENSION".tar.gz 6 | -------------------------------------------------------------------------------- /wikibase/1.31/bundle/extra-entrypoint-run-first.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | /wait-for-it.sh $MW_ELASTIC_HOST:$MW_ELASTIC_PORT -t 300 4 | -------------------------------------------------------------------------------- /wikibase/1.31/bundle/extra-install.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | php /var/www/html/extensions/CirrusSearch/maintenance/updateSearchIndexConfig.php 4 | php /var/www/html/extensions/CirrusSearch/maintenance/forceSearchIndex.php --skipParse 5 | php /var/www/html/extensions/CirrusSearch/maintenance/forceSearchIndex.php --skipLinks --indexOnSkip 6 | 7 | n=0 8 | until [ $n -ge 5 ] 9 | do 10 | php /var/www/html/extensions/OAuth/maintenance/createOAuthConsumer.php --approve --callbackUrl $QS_PUBLIC_SCHEME_HOST_AND_PORT/api.php \ 11 | --callbackIsPrefix true --user $MW_ADMIN_NAME --name QuickStatements --description QuickStatements --version 1.0.1 \ 12 | --grants createeditmovepage --grants editpage --grants highvolume --jsonOnSuccess > /quickstatements/data/qs-oauth.json && break 13 | n=$[$n+1] 14 | sleep 5s 15 | done 16 | 17 | if [[ -f /quickstatements/data/qs-oauth.json ]]; then 18 | export OAUTH_CONSUMER_KEY=$(jq -r '.key' /quickstatements/data/qs-oauth.json); 19 | export OAUTH_CONSUMER_SECRET=$(jq -r '.secret' /quickstatements/data/qs-oauth.json); 20 | envsubst < /templates/oauth.ini > /quickstatements/data/oauth.ini 21 | fi -------------------------------------------------------------------------------- /wikibase/1.31/bundle/oauth.ini: -------------------------------------------------------------------------------- 1 | ; HTTP User-Agent header 2 | agent = 'Wikibase Docker QuickStatements' 3 | ; assigned by Special:OAuthConsumerRegistration (request modelled after https://www.wikidata.org/wiki/Special:OAuthListConsumers/view/77b4ae5506dd7dbb0bb07f80e3ae3ca9) 4 | consumerKey = '${OAUTH_CONSUMER_KEY}' 5 | consumerSecret = '${OAUTH_CONSUMER_SECRET}' 6 | -------------------------------------------------------------------------------- /wikibase/1.35/.travis/build-deploy.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | #Oneline for full directory name see: https://stackoverflow.com/questions/59895/getting-the-source-directory-of-a-bash-script-from-within 3 | DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" 4 | set -e 5 | docker build "$DIR/../base" -t wikibase/wikibase:1.35 -t wikibase/wikibase:1.35-base -t wikibase/wikibase:latest -t wikibase/wikibase:latest-base -t wikibase/wikibase:lts -t wikibase/wikibase:lts-base 6 | docker build "$DIR/../bundle" -t wikibase/wikibase:1.35-bundle -t wikibase/wikibase:latest-bundle -t wikibase/wikibase:lts-bundle 7 | 8 | if [ "$SHOULD_DOCKER_PUSH" = true ]; then 9 | docker push wikibase/wikibase:lts 10 | docker push wikibase/wikibase:1.35 11 | docker push wikibase/wikibase:latest 12 | docker push wikibase/wikibase:lts-base 13 | docker push wikibase/wikibase:1.35-base 14 | docker push wikibase/wikibase:lts-bundle 15 | docker push wikibase/wikibase:latest-base 16 | docker push wikibase/wikibase:1.35-bundle 17 | docker push wikibase/wikibase:latest-bundle 18 | fi 19 | -------------------------------------------------------------------------------- /wikibase/1.35/base/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ubuntu:xenial as fetcher 2 | 3 | COPY download-extension.sh . 4 | RUN apt-get update && \ 5 | apt-get install --yes --no-install-recommends unzip=6.* jq=1.* curl=7.* ca-certificates=201* && \ 6 | apt-get clean && rm -rf /var/lib/apt/lists/* 7 | 8 | RUN bash download-extension.sh Wikibase;\ 9 | tar xzf Wikibase.tar.gz; 10 | 11 | 12 | FROM mediawiki:1.35 as collector 13 | COPY --from=fetcher /Wikibase /var/www/html/extensions/Wikibase 14 | 15 | 16 | FROM composer@sha256:d374b2e1f715621e9d9929575d6b35b11cf4a6dc237d4a08f2e6d1611f534675 as composer 17 | COPY --from=collector /var/www/html /var/www/html 18 | WORKDIR /var/www/html/ 19 | COPY composer.local.json /var/www/html/composer.local.json 20 | RUN composer install --no-dev 21 | 22 | 23 | FROM mediawiki:1.35 24 | 25 | RUN apt-get update && \ 26 | DEBIAN_FRONTEND=noninteractive apt-get install --yes --no-install-recommends libbz2-dev=1.* gettext-base=0.19.* && \ 27 | rm -rf /var/lib/apt/lists/* 28 | 29 | RUN a2enmod rewrite 30 | 31 | RUN install -d /var/log/mediawiki -o www-data 32 | 33 | RUN docker-php-ext-install calendar bz2 34 | 35 | COPY --from=composer /var/www/html /var/www/html 36 | COPY wait-for-it.sh /wait-for-it.sh 37 | COPY entrypoint.sh /entrypoint.sh 38 | COPY LocalSettings.php.template /LocalSettings.php.template 39 | COPY htaccess /var/www/html/.htaccess 40 | 41 | RUN ln -s /var/www/html/ /var/www/html/w 42 | 43 | ENV MW_SITE_NAME=wikibase-docker\ 44 | MW_SITE_LANG=en 45 | 46 | ENTRYPOINT ["/bin/bash"] 47 | CMD ["/entrypoint.sh"] 48 | -------------------------------------------------------------------------------- /wikibase/1.35/base/LocalSettings.php.template: -------------------------------------------------------------------------------- 1 | '/var/log/mediawiki/resourceloader.log', 22 | 'exception' => '/var/log/mediawiki/exception.log', 23 | 'error' => '/var/log/mediawiki/error.log', 24 | ); 25 | 26 | ## Site Settings 27 | # TODO pass in the rest of this with env vars? 28 | ${DOLLAR}wgServer = WebRequest::detectServer(); 29 | ${DOLLAR}wgShellLocale = "en_US.utf8"; 30 | ${DOLLAR}wgLanguageCode = "${MW_SITE_LANG}"; 31 | ${DOLLAR}wgSitename = "${MW_SITE_NAME}"; 32 | ${DOLLAR}wgMetaNamespace = "Project"; 33 | # Configured web paths & short URLs 34 | # This allows use of the /wiki/* path 35 | ## https://www.mediawiki.org/wiki/Manual:Short_URL 36 | ${DOLLAR}wgScriptPath = "/w"; // this should already have been configured this way 37 | ${DOLLAR}wgArticlePath = "/wiki/${DOLLAR}1"; 38 | 39 | #Set Secret 40 | ${DOLLAR}wgSecretKey = "${MW_WG_SECRET_KEY}"; 41 | 42 | ## RC Age 43 | # https://www.mediawiki.org/wiki/Manual:$wgRCMaxAge 44 | # Items in the recentchanges table are periodically purged; entries older than this many seconds will go. 45 | # The query service (by default) loads data from recent changes 46 | # Set this to 1 year to avoid any changes being removed from the RC table over a shorter period of time. 47 | ${DOLLAR}wgRCMaxAge = 365 * 24 * 3600; 48 | 49 | wfLoadSkin( 'Vector' ); 50 | 51 | ## Wikibase 52 | # Load Wikibase repo & client with the example / default settings. 53 | require_once "${DOLLAR}IP/extensions/Wikibase/repo/Wikibase.php"; 54 | require_once "${DOLLAR}IP/extensions/Wikibase/repo/ExampleSettings.php"; 55 | require_once "${DOLLAR}IP/extensions/Wikibase/client/WikibaseClient.php"; 56 | require_once "${DOLLAR}IP/extensions/Wikibase/client/ExampleSettings.php"; 57 | -------------------------------------------------------------------------------- /wikibase/1.35/base/composer.local.json: -------------------------------------------------------------------------------- 1 | { 2 | "extra": { 3 | "merge-plugin": { 4 | "include": [ 5 | "extensions/*/composer.json", 6 | "skins/*/composer.json" 7 | ] 8 | } 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /wikibase/1.35/base/download-extension.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | EXTENSION=$1 3 | 4 | TAR_URL=$(curl -s "https://www.mediawiki.org/w/api.php?action=query&list=extdistbranches&edbexts=$EXTENSION&formatversion=2&format=json" | jq -r ".query.extdistbranches.extensions.$EXTENSION.REL1_35") 5 | curl -s "$TAR_URL" -o "$EXTENSION".tar.gz 6 | -------------------------------------------------------------------------------- /wikibase/1.35/base/entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # This file is provided by the wikibase/wikibase docker image. 3 | 4 | # Test if required environment variables have been set 5 | REQUIRED_VARIABLES=(MW_ADMIN_NAME MW_ADMIN_PASS MW_ADMIN_EMAIL MW_WG_SECRET_KEY DB_SERVER DB_USER DB_PASS DB_NAME) 6 | for i in ${REQUIRED_VARIABLES[@]}; do 7 | eval THISSHOULDBESET=\$$i 8 | if [ -z "$THISSHOULDBESET" ]; then 9 | echo "$i is required but isn't set. You should pass it to docker. See: https://docs.docker.com/engine/reference/commandline/run/#set-environment-variables--e---env---env-file"; 10 | exit 1; 11 | fi 12 | done 13 | 14 | set -eu 15 | 16 | # Wait for the db to come up 17 | /wait-for-it.sh $DB_SERVER -t 300 18 | # Sometimes it appears to come up and then go back down meaning MW install fails 19 | # So wait for a second and double check! 20 | sleep 1 21 | /wait-for-it.sh $DB_SERVER -t 300 22 | 23 | # Run extra scripts everytime 24 | if [ -f /extra-entrypoint-run-first.sh ]; then 25 | source /extra-entrypoint-run-first.sh 26 | fi 27 | 28 | # Do the mediawiki install (only if LocalSettings doesn't already exist) 29 | if [ ! -e "/var/www/html/LocalSettings.php" ]; then 30 | php /var/www/html/maintenance/install.php --dbuser "$DB_USER" --dbpass "$DB_PASS" --dbname "$DB_NAME" --dbserver "$DB_SERVER" --lang "$MW_SITE_LANG" --pass "$MW_ADMIN_PASS" "$MW_SITE_NAME" "$MW_ADMIN_NAME" 31 | php /var/www/html/maintenance/resetUserEmail.php --no-reset-password "$MW_ADMIN_NAME" "$MW_ADMIN_EMAIL" 32 | 33 | # Copy our LocalSettings into place after install from the template 34 | # https://stackoverflow.com/a/24964089/4746236 35 | export DOLLAR='$' 36 | envsubst < /LocalSettings.php.template > /var/www/html/LocalSettings.php 37 | 38 | # Run update.php to install Wikibase 39 | php /var/www/html/maintenance/update.php --quick 40 | 41 | # Run extrascripts on first run 42 | if [ -f /extra-install.sh ]; then 43 | source /extra-install.sh 44 | fi 45 | fi 46 | 47 | # Run the actual entry point 48 | docker-php-entrypoint apache2-foreground 49 | -------------------------------------------------------------------------------- /wikibase/1.35/base/htaccess: -------------------------------------------------------------------------------- 1 | # This file is provided by the wikibase/wikibase docker image. 2 | ## http://www.mediawiki.org/wiki/Manual:Short_URL/Apache 3 | 4 | # Enable the rewrite engine 5 | RewriteEngine On 6 | 7 | # Short url for wiki pages 8 | RewriteRule ^/?wiki(/.*)?$ %{DOCUMENT_ROOT}/w/index.php [L] 9 | 10 | # Redirect / to Main Page 11 | RewriteRule ^/*$ %{DOCUMENT_ROOT}/w/index.php [L] 12 | 13 | # rewrite /entity/ URLs like wikidata per 14 | # https://meta.wikimedia.org/wiki/Wikidata/Notes/URI_scheme 15 | RewriteRule ^/?entity/(.*)$ /wiki/Special:EntityData/$1 [R=303,QSA] 16 | -------------------------------------------------------------------------------- /wikibase/1.35/base/wait-for-it.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # This file is provided by the wikibase/wikibase docker image. 3 | # Use this script to test if a given TCP host/port are available 4 | 5 | cmdname=$(basename $0) 6 | 7 | echoerr() { if [[ $QUIET -ne 1 ]]; then echo "$@" 1>&2; fi } 8 | 9 | usage() 10 | { 11 | cat << USAGE >&2 12 | Usage: 13 | $cmdname host:port [-s] [-t timeout] [-- command args] 14 | -h HOST | --host=HOST Host or IP under test 15 | -p PORT | --port=PORT TCP port under test 16 | Alternatively, you specify the host and port as host:port 17 | -s | --strict Only execute subcommand if the test succeeds 18 | -q | --quiet Don't output any status messages 19 | -t TIMEOUT | --timeout=TIMEOUT 20 | Timeout in seconds, zero for no timeout 21 | -- COMMAND ARGS Execute command with args after the test finishes 22 | USAGE 23 | exit 1 24 | } 25 | 26 | wait_for() 27 | { 28 | if [[ $TIMEOUT -gt 0 ]]; then 29 | echoerr "$cmdname: waiting $TIMEOUT seconds for $HOST:$PORT" 30 | else 31 | echoerr "$cmdname: waiting for $HOST:$PORT without a timeout" 32 | fi 33 | start_ts=$(date +%s) 34 | while : 35 | do 36 | if [[ $ISBUSY -eq 1 ]]; then 37 | nc -z $HOST $PORT 38 | result=$? 39 | else 40 | (echo > /dev/tcp/$HOST/$PORT) >/dev/null 2>&1 41 | result=$? 42 | fi 43 | if [[ $result -eq 0 ]]; then 44 | end_ts=$(date +%s) 45 | echoerr "$cmdname: $HOST:$PORT is available after $((end_ts - start_ts)) seconds" 46 | break 47 | fi 48 | sleep 1 49 | done 50 | return $result 51 | } 52 | 53 | wait_for_wrapper() 54 | { 55 | # In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692 56 | if [[ $QUIET -eq 1 ]]; then 57 | timeout $BUSYTIMEFLAG $TIMEOUT $0 --quiet --child --host=$HOST --port=$PORT --timeout=$TIMEOUT & 58 | else 59 | timeout $BUSYTIMEFLAG $TIMEOUT $0 --child --host=$HOST --port=$PORT --timeout=$TIMEOUT & 60 | fi 61 | PID=$! 62 | trap "kill -INT -$PID" INT 63 | wait $PID 64 | RESULT=$? 65 | if [[ $RESULT -ne 0 ]]; then 66 | echoerr "$cmdname: timeout occurred after waiting $TIMEOUT seconds for $HOST:$PORT" 67 | fi 68 | return $RESULT 69 | } 70 | 71 | # process arguments 72 | while [[ $# -gt 0 ]] 73 | do 74 | case "$1" in 75 | *:* ) 76 | hostport=(${1//:/ }) 77 | HOST=${hostport[0]} 78 | PORT=${hostport[1]} 79 | shift 1 80 | ;; 81 | --child) 82 | CHILD=1 83 | shift 1 84 | ;; 85 | -q | --quiet) 86 | QUIET=1 87 | shift 1 88 | ;; 89 | -s | --strict) 90 | STRICT=1 91 | shift 1 92 | ;; 93 | -h) 94 | HOST="$2" 95 | if [[ $HOST == "" ]]; then break; fi 96 | shift 2 97 | ;; 98 | --host=*) 99 | HOST="${1#*=}" 100 | shift 1 101 | ;; 102 | -p) 103 | PORT="$2" 104 | if [[ $PORT == "" ]]; then break; fi 105 | shift 2 106 | ;; 107 | --port=*) 108 | PORT="${1#*=}" 109 | shift 1 110 | ;; 111 | -t) 112 | TIMEOUT="$2" 113 | if [[ $TIMEOUT == "" ]]; then break; fi 114 | shift 2 115 | ;; 116 | --timeout=*) 117 | TIMEOUT="${1#*=}" 118 | shift 1 119 | ;; 120 | --) 121 | shift 122 | CLI="$@" 123 | break 124 | ;; 125 | --help) 126 | usage 127 | ;; 128 | *) 129 | echoerr "Unknown argument: $1" 130 | usage 131 | ;; 132 | esac 133 | done 134 | 135 | if [[ "$HOST" == "" || "$PORT" == "" ]]; then 136 | echoerr "Error: you need to provide a host and port to test." 137 | usage 138 | fi 139 | 140 | TIMEOUT=${TIMEOUT:-15} 141 | STRICT=${STRICT:-0} 142 | CHILD=${CHILD:-0} 143 | QUIET=${QUIET:-0} 144 | 145 | # check to see if timeout is from busybox? 146 | # check to see if timeout is from busybox? 147 | TIMEOUT_PATH=$(realpath $(which timeout)) 148 | if [[ $TIMEOUT_PATH =~ "busybox" ]]; then 149 | ISBUSY=1 150 | BUSYTIMEFLAG="-t" 151 | else 152 | ISBUSY=0 153 | BUSYTIMEFLAG="" 154 | fi 155 | 156 | if [[ $CHILD -gt 0 ]]; then 157 | wait_for 158 | RESULT=$? 159 | exit $RESULT 160 | else 161 | if [[ $TIMEOUT -gt 0 ]]; then 162 | wait_for_wrapper 163 | RESULT=$? 164 | else 165 | wait_for 166 | RESULT=$? 167 | fi 168 | fi 169 | 170 | if [[ $CLI != "" ]]; then 171 | if [[ $RESULT -ne 0 && $STRICT -eq 1 ]]; then 172 | echoerr "$cmdname: strict mode, refusing to execute subprocess" 173 | exit $RESULT 174 | fi 175 | exec $CLI 176 | else 177 | exit $RESULT 178 | fi -------------------------------------------------------------------------------- /wikibase/1.35/bundle/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ubuntu:xenial as fetcher 2 | 3 | RUN apt-get update && \ 4 | apt-get install --yes --no-install-recommends unzip=6.* jq=1.* curl=7.* ca-certificates=201* && \ 5 | apt-get clean && rm -rf /var/lib/apt/lists/* 6 | 7 | COPY download-extension.sh . 8 | ADD https://github.com/wikidata/WikibaseImport/archive/master.tar.gz /WikibaseImport.tar.gz 9 | RUN bash download-extension.sh OAuth;\ 10 | bash download-extension.sh Elastica;\ 11 | bash download-extension.sh CirrusSearch;\ 12 | bash download-extension.sh WikibaseCirrusSearch;\ 13 | bash download-extension.sh UniversalLanguageSelector;\ 14 | bash download-extension.sh cldr;\ 15 | bash download-extension.sh EntitySchema;\ 16 | tar xzf OAuth.tar.gz;\ 17 | tar xzf Elastica.tar.gz;\ 18 | tar xzf CirrusSearch.tar.gz;\ 19 | tar xzf WikibaseCirrusSearch.tar.gz;\ 20 | tar xzf UniversalLanguageSelector.tar.gz;\ 21 | tar xzf cldr.tar.gz;\ 22 | tar xzf WikibaseImport.tar.gz;\ 23 | tar xzf EntitySchema.tar.gz;\ 24 | rm ./*.tar.gz 25 | 26 | FROM wikibase/wikibase:1.35 as collector 27 | COPY --from=fetcher /WikibaseImport-master /var/www/html/extensions/WikibaseImport 28 | COPY --from=fetcher /Elastica /var/www/html/extensions/Elastica 29 | COPY --from=fetcher /OAuth /var/www/html/extensions/OAuth 30 | COPY --from=fetcher /CirrusSearch /var/www/html/extensions/CirrusSearch 31 | COPY --from=fetcher /WikibaseCirrusSearch /var/www/html/extensions/WikibaseCirrusSearch 32 | COPY --from=fetcher /UniversalLanguageSelector /var/www/html/extensions/UniversalLanguageSelector 33 | COPY --from=fetcher /cldr /var/www/html/extensions/cldr 34 | COPY --from=fetcher /EntitySchema /var/www/html/extensions/EntitySchema 35 | 36 | FROM composer@sha256:d374b2e1f715621e9d9929575d6b35b11cf4a6dc237d4a08f2e6d1611f534675 as composer 37 | COPY --from=collector /var/www/html /var/www/html 38 | WORKDIR /var/www/html/ 39 | RUN rm /var/www/html/composer.lock 40 | RUN composer install --no-dev 41 | 42 | FROM wikibase/wikibase:1.35 43 | 44 | RUN apt-get update && \ 45 | apt-get install --yes --no-install-recommends jq=1.* && \ 46 | apt-get clean && rm -rf /var/lib/apt/lists/* 47 | 48 | COPY --from=composer /var/www/html /var/www/html 49 | COPY LocalSettings.php.wikibase-bundle.template /LocalSettings.php.wikibase-bundle.template 50 | COPY extra-install.sh / 51 | COPY extra-entrypoint-run-first.sh / 52 | RUN cat /LocalSettings.php.wikibase-bundle.template >> /LocalSettings.php.template && rm /LocalSettings.php.wikibase-bundle.template 53 | COPY oauth.ini /templates/oauth.ini 54 | -------------------------------------------------------------------------------- /wikibase/1.35/bundle/LocalSettings.php.wikibase-bundle.template: -------------------------------------------------------------------------------- 1 | # OAuth 2 | wfLoadExtension( 'OAuth' ); 3 | ${DOLLAR}wgGroupPermissions['sysop']['mwoauthproposeconsumer'] = true; 4 | ${DOLLAR}wgGroupPermissions['sysop']['mwoauthmanageconsumer'] = true; 5 | ${DOLLAR}wgGroupPermissions['sysop']['mwoauthviewprivate'] = true; 6 | ${DOLLAR}wgGroupPermissions['sysop']['mwoauthupdateownconsumer'] = true; 7 | 8 | # WikibaseImport 9 | require_once "${DOLLAR}IP/extensions/WikibaseImport/WikibaseImport.php"; 10 | 11 | # CirrusSearch 12 | wfLoadExtension( 'Elastica' ); 13 | wfLoadExtension( 'CirrusSearch' ); 14 | wfLoadExtension( 'WikibaseCirrusSearch' ); 15 | ${DOLLAR}wgCirrusSearchServers = [ '${MW_ELASTIC_HOST}' ]; 16 | ${DOLLAR}wgSearchType = 'CirrusSearch'; 17 | ${DOLLAR}wgCirrusSearchExtraIndexSettings['index.mapping.total_fields.limit'] = 5000; 18 | ${DOLLAR}wgWBCSUseCirrus = true; 19 | 20 | # UniversalLanguageSelector 21 | wfLoadExtension( 'UniversalLanguageSelector' ); 22 | 23 | # cldr 24 | wfLoadExtension( 'cldr' ); 25 | 26 | #EntitySchema 27 | wfLoadExtension( 'EntitySchema' ); 28 | -------------------------------------------------------------------------------- /wikibase/1.35/bundle/download-extension.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | EXTENSION=$1 3 | 4 | TAR_URL=$(curl -s "https://www.mediawiki.org/w/api.php?action=query&list=extdistbranches&edbexts=$EXTENSION&formatversion=2&format=json" | jq -r ".query.extdistbranches.extensions.$EXTENSION.REL1_35") 5 | curl -s "$TAR_URL" -o "$EXTENSION".tar.gz 6 | -------------------------------------------------------------------------------- /wikibase/1.35/bundle/extra-entrypoint-run-first.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | /wait-for-it.sh $MW_ELASTIC_HOST:$MW_ELASTIC_PORT -t 300 4 | -------------------------------------------------------------------------------- /wikibase/1.35/bundle/extra-install.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | php /var/www/html/extensions/CirrusSearch/maintenance/UpdateSearchIndexConfig.php 4 | php /var/www/html/extensions/CirrusSearch/maintenance/ForceSearchIndex.php --skipParse 5 | php /var/www/html/extensions/CirrusSearch/maintenance/ForceSearchIndex.php --skipLinks --indexOnSkip 6 | 7 | n=0 8 | until [ $n -ge 5 ] 9 | do 10 | php /var/www/html/extensions/OAuth/maintenance/createOAuthConsumer.php --approve --callbackUrl $QS_PUBLIC_SCHEME_HOST_AND_PORT/api.php \ 11 | --callbackIsPrefix true --user $MW_ADMIN_NAME --name QuickStatements --description QuickStatements --version 1.0.1 \ 12 | --grants createeditmovepage --grants editpage --grants highvolume --jsonOnSuccess > /quickstatements/data/qs-oauth.json && break 13 | n=$[$n+1] 14 | sleep 5s 15 | done 16 | 17 | if [[ -f /quickstatements/data/qs-oauth.json ]]; then 18 | export OAUTH_CONSUMER_KEY=$(jq -r '.key' /quickstatements/data/qs-oauth.json); 19 | export OAUTH_CONSUMER_SECRET=$(jq -r '.secret' /quickstatements/data/qs-oauth.json); 20 | envsubst < /templates/oauth.ini > /quickstatements/data/oauth.ini 21 | fi 22 | -------------------------------------------------------------------------------- /wikibase/1.35/bundle/oauth.ini: -------------------------------------------------------------------------------- 1 | ; HTTP User-Agent header 2 | agent = 'Wikibase Docker QuickStatements' 3 | ; assigned by Special:OAuthConsumerRegistration (request modelled after https://www.wikidata.org/wiki/Special:OAuthListConsumers/view/77b4ae5506dd7dbb0bb07f80e3ae3ca9) 4 | consumerKey = '${OAUTH_CONSUMER_KEY}' 5 | consumerSecret = '${OAUTH_CONSUMER_SECRET}' 6 | -------------------------------------------------------------------------------- /wikibase/README.md: -------------------------------------------------------------------------------- 1 | ## wikibase docker image 2 | 3 | Wikibase extension running on Mediawiki. 4 | 5 | If no `LocalSettings.php` file is mounted then an installation & update will automatically happen. 6 | If `LocalSettings.php` is mounted then the image assumes you will do this yourself. 7 | 8 | Automated build. 9 | 10 | ### Tags 11 | 12 | Image name | Parent image 13 | ------------------------------------------------------------------------------------ | ------------------------ 14 | `wikibase/wikibase` : `latest`, `1.35`, `lts`, `latest-base`, `1.35-base`, `lts-base` | [mediawiki:1.35](https://hub.docker.com/_/mediawiki/) 15 | `wikibase/wikibase` : `1.31`, `1.31-base` | [mediawiki:1.31](https://hub.docker.com/_/mediawiki/) 16 | `wikibase/wikibase` : `latest-bundle`, `lts-bundle`, `1.35-bundle` | [wikibase:1.35](https://hub.docker.com/r/wikibase/wikibase/) 17 | `wikibase/wikibase` : `1.31-bundle` | [wikibase:1.31](https://hub.docker.com/r/wikibase/wikibase/) 18 | 19 | ### Version support 20 | 21 | Wikibase operates a continuous development model (alongside Mediawiki), where software changes are pushed live to Wikimedia sites such as Wikidata on a regular basis. 22 | 23 | The LTS (long term stable) and EOL (end of life) versions and dates for Wikibase are the same as those for Mediawiki. 24 | 25 | See https://www.mediawiki.org/wiki/Version_lifecycle for more details. 26 | 27 | ## Bundle image 28 | Wikibase-bundle images are built from the base wikibase images and also include the following additional extensions: 29 | - [OAuth](https://www.mediawiki.org/wiki/Extension:OAuth) 30 | - [Elastica](https://www.mediawiki.org/wiki/Extension:Elastica) 31 | - [CirrusSearch](https://www.mediawiki.org/wiki/Extension:CirrusSearch) 32 | - [CLDR](https://www.mediawiki.org/wiki/Extension:CLDR) 33 | - [WikibaseImport](https://github.com/filbertkm/WikibaseImport) 34 | - [UniversalLanguageSelector](https://www.mediawiki.org/wiki/Extension:UniversalLanguageSelector) 35 | - [EntitySchema](https://www.mediawiki.org/wiki/Extension:EntitySchema) (from 1.33) 36 | - [WikibaseCirrusSearch](https://www.mediawiki.org/wiki/Extension:WikibaseCirrusSearch) (from 1.34) 37 | 38 | ### Upgrading 39 | 40 | When upgrading between Wikibase versions you will have to run update.php to update your mysql tables. 41 | 42 | A blog post documenting the update progress for this image in a docker-compose setup can be found [here](https://addshore.com/2019/01/wikibase-docker-mediawiki-wikibase-update/) 43 | 44 | 45 | ### Environment variables 46 | 47 | Note: MW_ADMIN_NAME and MW_ADMIN_PASS probably shouldn't be here... 48 | 49 | Variable | Default | Description 50 | ------------------| -------------------------| ---------- 51 | `DB_SERVER` | "mysql.svc:3306" | Hostname and port for the MySQL server to use for Mediawiki & Wikibase 52 | `DB_USER` | "wikiuser" | Username to use for the MySQL server 53 | `DB_PASS` | "sqlpass" | Password to use for the MySQL server 54 | `DB_NAME` | "my_wiki" | Database name to use for the MySQL server 55 | `MW_SITE_NAME` | "wikibase-docker" | $wgSitename to use for MediaWiki 56 | `MW_SITE_LANG` | "en" | $wgLanguageCode to use for MediaWiki 57 | `MW_ADMIN_NAME` | "WikibaseAdmin" | Admin username to create on MediaWiki first install 58 | `MW_ADMIN_PASS` | "WikibaseDockerAdminPass" | Admin password to use for admin account on first install 59 | `MW_WG_SECRET_KEY`| "secretkey" | Used as source of entropy for persistent login/Oauth etc..(since 1.30) 60 | 61 | ### Filesystem layout 62 | 63 | Directory | Description 64 | --------------------------------- | ------------------------------------------------------------------------------ 65 | `/var/www/html` | Base Mediawiki directory 66 | `/var/www/html/skins` | Mediawiki skins directory 67 | `/var/www/html/extensions` | Mediawiki extensions directory 68 | 69 | File | Description 70 | --------------------------------- | ------------------------------------------------------------------------------ 71 | `/LocalSettings.php.template` | Template for Mediawiki Localsettings.php (substituted to `/var/www/html/LocalSettings.php` at runtime) 72 | `/var/www/html/LocalSettings.php` | LocalSettings.php location, when passed in `/LocalSettings.php.template` will not be used. install.php & update.php will also not be run. 73 | `/extra-install.sh` | Extra code that will be run if LocalSettings.php isn't present (since 1.30) 74 | `/extra-preinstall-runtime.sh` | Extra code that will be run from the entrypoint every time (since 1.30) 75 | 76 | ### Running Maintenance Scripts 77 | Maintenance scripts from extensions and mediawiki core can be run with `docker exec` using the wikibase/wikibase container as the targeted container 78 | 79 | For example to run a maintenance script from WikibaseImport: 80 | 81 | ```docker exec php //var/www/html/extensions/WikibaseImport/maintenance/importEntities.php --entity Q147``` 82 | 83 | ### Development 84 | 85 | A new image should be created for every major release of MediaWiki and Wikibase. 86 | These images currently use the [MediaWiki docker hub base image](https://hub.docker.com/_/mediawiki), so that needs to have a new version prior to updates here. 87 | 88 | - Create a new release folder, copying the content from a previous release 89 | - Update the base Dockerfile to fetch the latest mediawiki image 90 | - Update the bundle Dockerfile to use the new version of the base image 91 | - Update download-extension.sh to fetch new versions of the extensions 92 | - Update the CI build by checking the steps in the main README Development section in this repo. 93 | 94 | Releases that are no longer supported per the [Version lifecycle](https://www.mediawiki.org/wiki/Version_lifecycle) can be deleted. 95 | --------------------------------------------------------------------------------