6 |
7 | #if($response.response.get('grouped'))
8 |
9 | #foreach($grouping in $response.response.get('grouped'))
10 | #parse("hit_grouped.vm")
11 | #end
12 |
13 | #else
14 |
15 | #foreach($doc in $response.results)
16 | #parse("hit.vm")
17 | ## Can get an extremely simple view of the doc
18 | ## which might be nicer for debugging
19 | ##parse("hit_plain.vm")
20 | #end
21 |
22 | #end
23 |
--------------------------------------------------------------------------------
/whitelabel/docker/cdp711/build/templates/solr/conf/configsets/sample_techproducts_configs/conf/velocity/suggest.vm:
--------------------------------------------------------------------------------
1 | #**
2 | * Provides cynamic spelling suggestions
3 | * as you type in the search form
4 | *#
5 |
6 | #foreach($t in $response.response.terms.name)
7 | $t.key
8 | #end
9 |
--------------------------------------------------------------------------------
/whitelabel/docker/cdp711/docker-entrypoint.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | set -eax
3 |
4 | $SOLR_HOME/bin/solr zk upconfig -n schemalessTemplate -d $SOLR_HOME/schemalessTemplate -z $HOSTNAME:2181/solr
5 |
6 | exec /usr/bin/supervisord -c /etc/supervisor/conf.d/supervisord.conf
--------------------------------------------------------------------------------
/whitelabel/docker/cdp711/get-docker-cmd.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | echo "Finding whether docker can be run without sudo"
4 |
5 | # attempt to run an innocuous docker command
6 | set +e
7 | docker ps &> /dev/null
8 | EXIT_CODE=$?
9 | set -e
10 |
11 | # check exit code; if 1, assume permission error
12 | if [ ${EXIT_CODE} -eq 0 ]; then
13 | echo "Command \"docker ps\" succeeded, using \"docker\" as command"
14 | DOCKER_CMD="docker"
15 | else
16 | echo "Command \"docker ps\" failed, using \"sudo docker\" as command"
17 | DOCKER_CMD="sudo docker"
18 | fi
--------------------------------------------------------------------------------
/whitelabel/docker/vanilla-hadoop2/build-and-tag.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | set -ax
4 |
5 | cd docker
6 | docker build . -t registry.gitlab.com/agilefactory/agile.wasp2/wasp-hadoop-vanilla:2
7 |
8 | #docker login registry.gitlab.com/agilefactory/agile.wasp2
9 | #docker push registry.gitlab.com/agilefactory/agile.wasp2/cdh7:base
10 | #docker push registry.gitlab.com/agilefactory/agile.wasp2/cdh7:worker
11 |
--------------------------------------------------------------------------------
/whitelabel/docker/vanilla-hadoop2/docker/install-scripts/common/apt-clean-caches.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | echo "Cleaning apt caches"
4 |
5 | rm -rf /var/lib/apt/lists/*
--------------------------------------------------------------------------------
/whitelabel/docker/vanilla-hadoop2/docker/install-scripts/common/bash-defaults.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | set -o errexit # exit when a command fails
4 | set -o nounset # exit when a script tries to use undeclared variables
5 | set -o xtrace # trace what gets executed
6 | #set -o pipefail # fail pipeline if a command part of a pipeline fails
--------------------------------------------------------------------------------
/whitelabel/docker/vanilla-hadoop2/docker/install-scripts/configure-spark.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | source ./common/bash-defaults.sh
4 |
5 | echo "Configuring spark classpath"
6 | hadoop classpath --glob | sed 's/:/\n/g' | grep 'jar' | grep -v netty | grep -v jackson | grep -v capacity-scheduler | sed 's|^|cp |g' | sed 's|$| $SPARK_HOME/jars|g' | bash
--------------------------------------------------------------------------------
/whitelabel/docker/vanilla-hadoop2/docker/install-scripts/install-hadoop.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | source ./common/bash-defaults.sh
4 |
5 | wget --progress=bar:force:noscroll -O hadoop.tar.gz https://downloads.apache.org/hadoop/common/stable2/hadoop-2.10.1.tar.gz
6 |
7 | mkdir -p $HADOOP_HOME
8 |
9 | tar -xvf hadoop.tar.gz \
10 | --directory=$HADOOP_HOME\
11 | --strip 1 \
12 | --exclude=hadoop-2.10.1/share/doc
13 |
14 | rm hadoop.tar.gz
--------------------------------------------------------------------------------
/whitelabel/docker/vanilla-hadoop2/docker/install-scripts/install-kafka.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | source ./common/bash-defaults.sh
3 |
4 | # Download and unpackage kafka 2.6.2
5 | KAFKA_VERSION=2.6.2
6 | KAFKA_PKG_NAME=kafka_2.12-$KAFKA_VERSION
7 | wget --progress=bar:force:noscroll \
8 | --no-check-certificate https://downloads.apache.org/kafka/$KAFKA_VERSION/$KAFKA_PKG_NAME.tgz
9 | tar -xzvf $KAFKA_PKG_NAME.tgz
10 | rm $KAFKA_PKG_NAME.tgz
11 | mv $KAFKA_PKG_NAME $KAFKA_HOME
12 |
13 |
--------------------------------------------------------------------------------
/whitelabel/docker/vanilla-hadoop2/docker/install-scripts/install-prerequisites.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | source ./common/bash-defaults.sh
4 |
5 | apt-get update
6 | apt-get install -y apt-transport-https
7 | apt-get install -y ca-certificates wget sudo
8 | apt-get install -y openjdk-8-jdk-headless software-properties-common python-software-properties #hadoop prerequisites
9 | apt-get install -y gettext-base
10 | apt-get install -y multitail
11 | apt-get install -y supervisor
12 |
13 | exec bash ./common/apt-clean-caches.sh
--------------------------------------------------------------------------------
/whitelabel/docker/vanilla-hadoop2/docker/install-scripts/install-solr.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | source ./common/bash-defaults.sh
3 |
4 | # Download and unpackage solr 8
5 | SOLR_VERSION=8.5.1
6 | SOLR_PKG_NAME=solr-$SOLR_VERSION
7 |
8 | wget --progress=bar:force:noscroll --no-check-certificate https://archive.apache.org/dist/lucene/solr/$SOLR_VERSION/$SOLR_PKG_NAME.tgz
9 | tar -xzvf $SOLR_PKG_NAME.tgz
10 | rm $SOLR_PKG_NAME.tgz
11 | mv $SOLR_PKG_NAME $SOLR_HOME
12 |
13 | cp $SOLR_HOME/server/solr/solr.xml $SOLR_HOME/
--------------------------------------------------------------------------------
/whitelabel/docker/vanilla-hadoop2/docker/install-scripts/install-spark.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | source ./common/bash-defaults.sh
3 |
4 | # Download and unpackage spark 2.4.7
5 | SPARK_VERSION=2.4.7
6 | SPARK_PKG_NAME=spark-$SPARK_VERSION-bin-without-hadoop
7 | wget --progress=bar:force:noscroll \
8 | --no-check-certificate https://archive.apache.org/dist/spark/spark-$SPARK_VERSION/$SPARK_PKG_NAME.tgz
9 |
10 | tar -xzvf $SPARK_PKG_NAME.tgz
11 |
12 | rm $SPARK_PKG_NAME.tgz
13 | mv $SPARK_PKG_NAME $SPARK_HOME
--------------------------------------------------------------------------------
/whitelabel/docker/vanilla-hadoop2/docker/templates/hadoop/core-site.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | fs.defaultFS
5 | hdfs://$HOSTNAME:9000
6 | The name of the default file system. A URI whose
7 | scheme and authority determine the FileSystem implementation. The
8 | uri's scheme determines the config property (fs.SCHEME.impl) naming
9 | the FileSystem implementation class. The uri's authority is used to
10 | determine the host, port, etc. for a filesystem.
11 |
12 |
13 |
--------------------------------------------------------------------------------
/whitelabel/docker/vanilla-hadoop2/docker/templates/hbase/regionservers:
--------------------------------------------------------------------------------
1 | $HOSTNAME
--------------------------------------------------------------------------------
/whitelabel/docker/vanilla-hadoop2/get-docker-cmd.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | echo "Finding whether docker can be run without sudo"
4 |
5 | # attempt to run an innocuous docker command
6 | set +e
7 | docker ps &> /dev/null
8 | EXIT_CODE=$?
9 | set -e
10 |
11 | # check exit code; if 1, assume permission error
12 | if [ ${EXIT_CODE} -eq 0 ]; then
13 | echo "Command \"docker ps\" succeeded, using \"docker\" as command"
14 | DOCKER_CMD="docker"
15 | else
16 | echo "Command \"docker ps\" failed, using \"sudo docker\" as command"
17 | DOCKER_CMD="sudo docker"
18 | fi
--------------------------------------------------------------------------------
/whitelabel/docker/vanilla-hadoop2/log4j-consumer.properties:
--------------------------------------------------------------------------------
1 | log4j.rootLogger=WARN,stdout
2 | log4j.logger.it.agilelab=ERROR
3 | log4j.logger.akka.cluster=DEBUG
4 |
5 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender
6 | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
7 | log4j.appender.stdout.layout.ConversionPattern=[consumer] %-5p %d %c: %m%n
8 |
--------------------------------------------------------------------------------
/whitelabel/docker/vanilla-hadoop2/log4j-master.properties:
--------------------------------------------------------------------------------
1 | log4j.rootLogger=WARN,stdout
2 | log4j.logger.it.agilelab=ERROR
3 | log4j.logger.akka.cluster=DEBUG
4 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender
5 | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
6 | log4j.appender.stdout.layout.ConversionPattern=[master] %-5p %d %c: %m%n
7 |
--------------------------------------------------------------------------------
/whitelabel/docker/vanilla-hadoop2/log4j-producer.properties:
--------------------------------------------------------------------------------
1 | log4j.rootLogger=WARN,stdout
2 | log4j.logger.it.agilelab=ERROR
3 | log4j.logger.akka.cluster=DEBUG
4 |
5 |
6 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender
7 | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
8 | log4j.appender.stdout.layout.ConversionPattern=[producer] %-5p %d %c: %m%n
9 |
--------------------------------------------------------------------------------
/whitelabel/docker/vanilla-hadoop2/log4j-single.properties:
--------------------------------------------------------------------------------
1 | log4j.logger.it.agilelab=ERROR
2 | log4j.logger.akka.cluster=DEBUG
3 |
4 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender
5 | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
6 | log4j.appender.stdout.layout.ConversionPattern=[single] %-5p %d %c: %m%n
7 |
--------------------------------------------------------------------------------
/whitelabel/docker/vanilla-hadoop3/docker/install-scripts/common/apt-clean-caches.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | echo "Cleaning apt caches"
4 |
5 | rm -rf /var/lib/apt/lists/*
--------------------------------------------------------------------------------
/whitelabel/docker/vanilla-hadoop3/docker/install-scripts/common/bash-defaults.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | set -o errexit # exit when a command fails
4 | set -o nounset # exit when a script tries to use undeclared variables
5 | set -o xtrace # trace what gets executed
6 | #set -o pipefail # fail pipeline if a command part of a pipeline fails
--------------------------------------------------------------------------------
/whitelabel/docker/vanilla-hadoop3/docker/install-scripts/configure-spark.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | source ./common/bash-defaults.sh
4 |
5 | echo "Configuring spark classpath"
6 |
7 | hadoop classpath --glob | sed 's/:/\n/g' | grep 'jar' | grep -v netty | grep -v jackson | sed 's|^|cp |g' | sed 's|$| $SPARK_HOME/jars|g' | bash
--------------------------------------------------------------------------------
/whitelabel/docker/vanilla-hadoop3/docker/install-scripts/install-hadoop.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | source ./common/bash-defaults.sh
4 |
5 | wget --progress=bar:force:noscroll -O hadoop.tar.gz http://www-eu.apache.org/dist/hadoop/common/hadoop-3.2.2/hadoop-3.2.2.tar.gz
6 |
7 | mkdir -p $HADOOP_HOME
8 |
9 | tar -xvf hadoop.tar.gz \
10 | --directory=$HADOOP_HOME\
11 | --strip 1 \
12 | --exclude=hadoop-3.2.2/share/doc
13 |
14 | rm hadoop.tar.gz
--------------------------------------------------------------------------------
/whitelabel/docker/vanilla-hadoop3/docker/install-scripts/install-kafka.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | source ./common/bash-defaults.sh
3 |
4 | # Download and unpackage kafka 2.6.3
5 | KAFKA_VERSION=2.8.1
6 | KAFKA_PKG_NAME=kafka_2.12-$KAFKA_VERSION
7 | wget --progress=bar:force:noscroll \
8 | --no-check-certificate https://downloads.apache.org/kafka/$KAFKA_VERSION/$KAFKA_PKG_NAME.tgz
9 | tar -xzvf $KAFKA_PKG_NAME.tgz
10 | rm $KAFKA_PKG_NAME.tgz
11 | mv $KAFKA_PKG_NAME $KAFKA_HOME
--------------------------------------------------------------------------------
/whitelabel/docker/vanilla-hadoop3/docker/install-scripts/install-minio.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | source ./common/bash-defaults.sh
4 |
5 | wget https://dl.min.io/server/minio/release/linux-amd64/minio -O /minio
6 | chmod +x /minio
--------------------------------------------------------------------------------
/whitelabel/docker/vanilla-hadoop3/docker/install-scripts/install-mongo.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | source ./common/bash-defaults.sh
3 | export DEBIAN_FRONTEND=noninteractive
4 | rm /etc/mongod.conf
5 | apt-get install gnupg
6 | wget -qO - https://www.mongodb.org/static/pgp/server-5.0.asc | sudo apt-key add -
7 | echo "deb [ arch=amd64,arm64 ] https://repo.mongodb.org/apt/ubuntu focal/mongodb-org/5.0 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb-org-5.0.list
8 | apt-get update
9 | apt-get install -y mongodb-org
--------------------------------------------------------------------------------
/whitelabel/docker/vanilla-hadoop3/docker/install-scripts/install-prerequisites.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | source ./common/bash-defaults.sh
4 |
5 | apt-get update
6 | apt-get install -y apt-transport-https
7 | apt-get install -y ca-certificates wget sudo
8 | apt-get install -y openjdk-8-jdk-headless software-properties-common #hadoop prerequisites
9 | apt-get install -y gettext-base
10 | apt-get install -y multitail
11 | apt-get install -y supervisor
12 | apt-get install -y awscli
13 |
14 | apt-get update
15 | apt-get install -y python3
16 | apt-get install -y python3-flask python3-boto3
17 |
18 | exec bash ./common/apt-clean-caches.sh
--------------------------------------------------------------------------------
/whitelabel/docker/vanilla-hadoop3/docker/install-scripts/install-solr.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | source ./common/bash-defaults.sh
3 |
4 | # Download and unpackage solr 8
5 | SOLR_VERSION=8.5.1
6 | SOLR_PKG_NAME=solr-$SOLR_VERSION
7 |
8 | wget --progress=bar:force:noscroll --no-check-certificate https://archive.apache.org/dist/lucene/solr/$SOLR_VERSION/$SOLR_PKG_NAME.tgz
9 | tar -xzvf $SOLR_PKG_NAME.tgz
10 | rm $SOLR_PKG_NAME.tgz
11 | mv $SOLR_PKG_NAME $SOLR_HOME
12 |
13 | cp $SOLR_HOME/server/solr/solr.xml $SOLR_HOME/
--------------------------------------------------------------------------------
/whitelabel/docker/vanilla-hadoop3/docker/install-scripts/install-spark.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | source ./common/bash-defaults.sh
3 |
4 | # Download and unpackage spark 2.4.2 (the only release with scala 2.12)
5 | SPARK_VERSION=2.4.2
6 | SPARK_PKG_NAME=spark-$SPARK_VERSION-bin-without-hadoop
7 | wget --progress=bar:force:noscroll \
8 | --no-check-certificate https://archive.apache.org/dist/spark/spark-$SPARK_VERSION/$SPARK_PKG_NAME.tgz
9 |
10 | tar -xzvf $SPARK_PKG_NAME.tgz
11 |
12 | rm $SPARK_PKG_NAME.tgz
13 | mv $SPARK_PKG_NAME $SPARK_HOME
14 |
15 |
--------------------------------------------------------------------------------
/whitelabel/docker/vanilla-hadoop3/docker/templates/hadoop/core-site.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | fs.defaultFS
5 | hdfs://$HOSTNAME:9000
6 | The name of the default file system. A URI whose
7 | scheme and authority determine the FileSystem implementation. The
8 | uri's scheme determines the config property (fs.SCHEME.impl) naming
9 | the FileSystem implementation class. The uri's authority is used to
10 | determine the host, port, etc. for a filesystem.
11 |
12 |
13 |
--------------------------------------------------------------------------------
/whitelabel/docker/vanilla-hadoop3/docker/templates/hbase/regionservers:
--------------------------------------------------------------------------------
1 | $HOSTNAME
--------------------------------------------------------------------------------
/whitelabel/docker/vanilla-hadoop3/get-docker-cmd.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | echo "Finding whether docker can be run without sudo"
4 |
5 | # attempt to run an innocuous docker command
6 | set +e
7 | docker ps &> /dev/null
8 | EXIT_CODE=$?
9 | set -e
10 |
11 | # check exit code; if 1, assume permission error
12 | if [ ${EXIT_CODE} -eq 0 ]; then
13 | echo "Command \"docker ps\" succeeded, using \"docker\" as command"
14 | DOCKER_CMD="docker"
15 | else
16 | echo "Command \"docker ps\" failed, using \"sudo docker\" as command"
17 | DOCKER_CMD="sudo docker"
18 | fi
--------------------------------------------------------------------------------
/whitelabel/docker/vanilla-hadoop3/log4j-consumer.properties:
--------------------------------------------------------------------------------
1 | log4j.rootLogger=WARN,stdout
2 | log4j.logger.it.agilelab=INFO
3 | log4j.logger.akka.cluster=DEBUG
4 |
5 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender
6 | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
7 | log4j.appender.stdout.layout.ConversionPattern=[consumer] %-5p %d %c: %m%n
8 |
--------------------------------------------------------------------------------
/whitelabel/docker/vanilla-hadoop3/log4j-master.properties:
--------------------------------------------------------------------------------
1 | log4j.rootLogger=WARN,stdout
2 | log4j.logger.it.agilelab=ERROR
3 | log4j.logger.akka.cluster=DEBUG
4 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender
5 | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
6 | log4j.appender.stdout.layout.ConversionPattern=[master] %-5p %d %c: %m%n
7 |
--------------------------------------------------------------------------------
/whitelabel/docker/vanilla-hadoop3/log4j-producer.properties:
--------------------------------------------------------------------------------
1 | log4j.rootLogger=WARN,stdout
2 | log4j.logger.it.agilelab=ERROR
3 | log4j.logger.akka.cluster=DEBUG
4 |
5 |
6 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender
7 | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
8 | log4j.appender.stdout.layout.ConversionPattern=[producer] %-5p %d %c: %m%n
9 |
--------------------------------------------------------------------------------
/whitelabel/docker/vanilla2.12-hadoop2/build-and-tag.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | set -ax
4 |
5 | cd docker
6 | docker build . -t registry.gitlab.com/agilefactory/agile.wasp2/wasp-hadoop-vanilla-2.12:2
7 |
8 | #docker login registry.gitlab.com/agilefactory/agile.wasp2
9 | #docker push registry.gitlab.com/agilefactory/agile.wasp2/cdh7:base
10 | #docker push registry.gitlab.com/agilefactory/agile.wasp2/cdh7:worker
11 |
--------------------------------------------------------------------------------
/whitelabel/docker/vanilla2.12-hadoop2/docker/install-scripts/common/apt-clean-caches.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | echo "Cleaning apt caches"
4 |
5 | rm -rf /var/lib/apt/lists/*
--------------------------------------------------------------------------------
/whitelabel/docker/vanilla2.12-hadoop2/docker/install-scripts/common/bash-defaults.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | set -o errexit # exit when a command fails
4 | set -o nounset # exit when a script tries to use undeclared variables
5 | set -o xtrace # trace what gets executed
6 | #set -o pipefail # fail pipeline if a command part of a pipeline fails
--------------------------------------------------------------------------------
/whitelabel/docker/vanilla2.12-hadoop2/docker/install-scripts/configure-spark.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | source ./common/bash-defaults.sh
4 |
5 | echo "Configuring spark classpath"
6 | hadoop classpath --glob | sed 's/:/\n/g' | grep 'jar' | grep -v netty | grep -v jackson | grep -v capacity-scheduler | sed 's|^|cp |g' | sed 's|$| $SPARK_HOME/jars|g' | bash
--------------------------------------------------------------------------------
/whitelabel/docker/vanilla2.12-hadoop2/docker/install-scripts/install-hadoop.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | source ./common/bash-defaults.sh
4 |
5 | wget --progress=bar:force:noscroll -O hadoop.tar.gz https://downloads.apache.org/hadoop/common/stable2/hadoop-2.10.1.tar.gz
6 |
7 | mkdir -p $HADOOP_HOME
8 |
9 | tar -xvf hadoop.tar.gz \
10 | --directory=$HADOOP_HOME\
11 | --strip 1 \
12 | --exclude=hadoop-2.10.1/share/doc
13 |
14 | rm hadoop.tar.gz
--------------------------------------------------------------------------------
/whitelabel/docker/vanilla2.12-hadoop2/docker/install-scripts/install-kafka.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | source ./common/bash-defaults.sh
3 |
4 | # Download and unpackage kafka 2.6.2
5 | KAFKA_VERSION=2.6.2
6 | KAFKA_PKG_NAME=kafka_2.12-$KAFKA_VERSION
7 | wget --progress=bar:force:noscroll \
8 | --no-check-certificate https://downloads.apache.org/kafka/$KAFKA_VERSION/$KAFKA_PKG_NAME.tgz
9 | tar -xzvf $KAFKA_PKG_NAME.tgz
10 | rm $KAFKA_PKG_NAME.tgz
11 | mv $KAFKA_PKG_NAME $KAFKA_HOME
12 |
13 |
--------------------------------------------------------------------------------
/whitelabel/docker/vanilla2.12-hadoop2/docker/install-scripts/install-prerequisites.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | source ./common/bash-defaults.sh
4 |
5 | apt-get update
6 | apt-get install -y apt-transport-https
7 | apt-get install -y ca-certificates wget sudo
8 | apt-get install -y openjdk-8-jdk-headless software-properties-common python-software-properties #hadoop prerequisites
9 | apt-get install -y gettext-base
10 | apt-get install -y multitail
11 | apt-get install -y supervisor
12 |
13 | exec bash ./common/apt-clean-caches.sh
--------------------------------------------------------------------------------
/whitelabel/docker/vanilla2.12-hadoop2/docker/install-scripts/install-solr.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | source ./common/bash-defaults.sh
3 |
4 | # Download and unpackage solr 8
5 | SOLR_VERSION=8.5.1
6 | SOLR_PKG_NAME=solr-$SOLR_VERSION
7 |
8 | wget --progress=bar:force:noscroll --no-check-certificate https://archive.apache.org/dist/lucene/solr/$SOLR_VERSION/$SOLR_PKG_NAME.tgz
9 | tar -xzvf $SOLR_PKG_NAME.tgz
10 | rm $SOLR_PKG_NAME.tgz
11 | mv $SOLR_PKG_NAME $SOLR_HOME
12 |
13 | cp $SOLR_HOME/server/solr/solr.xml $SOLR_HOME/
--------------------------------------------------------------------------------
/whitelabel/docker/vanilla2.12-hadoop2/docker/install-scripts/install-spark.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | source ./common/bash-defaults.sh
3 |
4 | # Download and unpackage spark 2.4.7
5 | SPARK_VERSION=2.4.7
6 | SPARK_PKG_NAME=spark-$SPARK_VERSION-bin-without-hadoop-scala-2.12
7 | wget --progress=bar:force:noscroll \
8 | --no-check-certificate https://archive.apache.org/dist/spark/spark-$SPARK_VERSION/$SPARK_PKG_NAME.tgz
9 |
10 | tar -xzvf $SPARK_PKG_NAME.tgz
11 |
12 | rm $SPARK_PKG_NAME.tgz
13 | mv $SPARK_PKG_NAME $SPARK_HOME
--------------------------------------------------------------------------------
/whitelabel/docker/vanilla2.12-hadoop2/docker/templates/hadoop/core-site.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | fs.defaultFS
5 | hdfs://$HOSTNAME:9000
6 | The name of the default file system. A URI whose
7 | scheme and authority determine the FileSystem implementation. The
8 | uri's scheme determines the config property (fs.SCHEME.impl) naming
9 | the FileSystem implementation class. The uri's authority is used to
10 | determine the host, port, etc. for a filesystem.
11 |
12 |
13 |
--------------------------------------------------------------------------------
/whitelabel/docker/vanilla2.12-hadoop2/docker/templates/hbase/regionservers:
--------------------------------------------------------------------------------
1 | $HOSTNAME
--------------------------------------------------------------------------------
/whitelabel/docker/vanilla2.12-hadoop2/get-docker-cmd.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | echo "Finding whether docker can be run without sudo"
4 |
5 | # attempt to run an innocuous docker command
6 | set +e
7 | docker ps &> /dev/null
8 | EXIT_CODE=$?
9 | set -e
10 |
11 | # check exit code; if 1, assume permission error
12 | if [ ${EXIT_CODE} -eq 0 ]; then
13 | echo "Command \"docker ps\" succeeded, using \"docker\" as command"
14 | DOCKER_CMD="docker"
15 | else
16 | echo "Command \"docker ps\" failed, using \"sudo docker\" as command"
17 | DOCKER_CMD="sudo docker"
18 | fi
--------------------------------------------------------------------------------
/whitelabel/docker/vanilla2.12-hadoop2/log4j-consumer.properties:
--------------------------------------------------------------------------------
1 | log4j.rootLogger=WARN,stdout
2 | log4j.logger.it.agilelab=ERROR
3 | log4j.logger.akka.cluster=DEBUG
4 |
5 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender
6 | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
7 | log4j.appender.stdout.layout.ConversionPattern=[consumer] %-5p %d %c: %m%n
8 |
--------------------------------------------------------------------------------
/whitelabel/docker/vanilla2.12-hadoop2/log4j-master.properties:
--------------------------------------------------------------------------------
1 | log4j.rootLogger=WARN,stdout
2 | log4j.logger.it.agilelab=ERROR
3 | log4j.logger.akka.cluster=DEBUG
4 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender
5 | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
6 | log4j.appender.stdout.layout.ConversionPattern=[master] %-5p %d %c: %m%n
7 |
--------------------------------------------------------------------------------
/whitelabel/docker/vanilla2.12-hadoop2/log4j-producer.properties:
--------------------------------------------------------------------------------
1 | log4j.rootLogger=WARN,stdout
2 | log4j.logger.it.agilelab=ERROR
3 | log4j.logger.akka.cluster=DEBUG
4 |
5 |
6 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender
7 | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
8 | log4j.appender.stdout.layout.ConversionPattern=[producer] %-5p %d %c: %m%n
9 |
--------------------------------------------------------------------------------
/whitelabel/master/build.sbt:
--------------------------------------------------------------------------------
1 | /* !!! Custom Node Launcher usage: Add this in standalone applications !!! */
2 | Compile / mainClass := Some("it.agilelab.bigdata.wasp.whitelabel.master.launcher.MasterNodeLauncher")
--------------------------------------------------------------------------------
/whitelabel/models/src/main/scala/it/agilelab/bigdata/wasp/whitelabel/models/test/TestDocumentEncoder.scala:
--------------------------------------------------------------------------------
1 | package it.agilelab.bigdata.wasp.whitelabel.models.test
2 |
3 | case class TestDocumentEncoder(id: String,
4 | number: Int,
5 | nested: TestNestedDocument)
6 |
--------------------------------------------------------------------------------
/whitelabel/models/src/main/scala/it/agilelab/bigdata/wasp/whitelabel/models/test/TestEnrichmentModel.scala:
--------------------------------------------------------------------------------
1 | package it.agilelab.bigdata.wasp.whitelabel.models.test
2 |
3 | case class TestEnrichmentModel(id: String, exampleAuthor: String, timestamp: Long)
4 |
5 | case class TestEnrichmentResponseModel(id: String, text: String)
6 |
--------------------------------------------------------------------------------
/whitelabel/models/src/main/scala/it/agilelab/bigdata/wasp/whitelabel/models/test/TestFreeCodeModels.scala:
--------------------------------------------------------------------------------
1 | package it.agilelab.bigdata.wasp.whitelabel.models.test
2 |
3 | import it.agilelab.bigdata.wasp.models.FreeCodeModel
4 |
5 | object TestFreeCodeModels {
6 |
7 | val testFreeCode: FreeCodeModel = FreeCodeModel("test-freecode",
8 | """
9 | | dataFrames.head._2
10 | |""".stripMargin)
11 | }
12 |
--------------------------------------------------------------------------------
/whitelabel/models/src/main/scala/it/agilelab/bigdata/wasp/whitelabel/models/test/TestSqlSourceModel.scala:
--------------------------------------------------------------------------------
1 | package it.agilelab.bigdata.wasp.whitelabel.models.test
2 |
3 | import it.agilelab.bigdata.wasp.models.SqlSourceModel
4 |
5 | private[wasp] object TestSqlSouceModel {
6 |
7 | lazy val mySql = SqlSourceModel(
8 | name = "TestMySqlModel",
9 | connectionName = "mysql", // have to be present in jdbc-subConfig
10 | dbtable = "test_table",
11 | partitioningInfo = None,
12 | numPartitions = None,
13 | fetchSize = None
14 | )
15 | }
--------------------------------------------------------------------------------
/whitelabel/producers/build.sbt:
--------------------------------------------------------------------------------
1 | /* !!! Custom Node Launcher usage: Add this in standalone applications !!! */
2 | Compile / mainClass := Some("it.agilelab.bigdata.wasp.whitelabel.producers.launcher.ProducersNodeLauncher")
--------------------------------------------------------------------------------