├── .gitignore ├── LICENSE ├── README.md ├── docker ├── jupyter-all-spark │ ├── Dockerfile │ └── docker-compose.yml ├── jupyter-base │ ├── Dockerfile │ ├── docker-compose.yml │ └── scripts │ │ └── jupyter-cmd.sh ├── jupyter-bloodhound │ ├── Dockerfile │ ├── docker-compose.yml │ └── notebooks │ │ ├── bloodhound_alternative_visualizations.ipynb │ │ └── playbook_kerberoastable_users.ipynb ├── jupyter-graphframes │ └── Dockerfile ├── jupyter-hunt │ ├── Dockerfile │ ├── docker-compose.yml │ └── spark │ │ └── spark-defaults.conf ├── jupyter-pwsh │ ├── Dockerfile │ └── docker-compose.yml ├── jupyter-pyspark │ ├── Dockerfile │ ├── docker-compose.yml │ ├── kernels │ │ └── pyspark_kernel.json │ └── spark │ │ ├── log4j.properties │ │ └── spark-defaults.conf └── jupyter-rto │ ├── Dockerfile │ └── docker-compose.yml ├── docs ├── Makefile ├── build │ ├── doctrees │ │ ├── docker.doctree │ │ ├── environment.pickle │ │ ├── index.doctree │ │ ├── jupyter.doctree │ │ ├── jupyter_hunt.doctree │ │ ├── jupyter_rto.doctree │ │ ├── jupyter_spark.doctree │ │ ├── license.doctree │ │ └── zeppelin.doctree │ └── html │ │ ├── .buildinfo │ │ ├── .nojekyll │ │ ├── _images │ │ ├── docker-containers.png │ │ ├── jupyter-design.png │ │ ├── jupyter-evolution.png │ │ ├── jupyter-login.png │ │ ├── jupyter-main.png │ │ └── jupyter-samples.png │ │ ├── _sources │ │ ├── docker.rst.txt │ │ ├── index.rst.txt │ │ ├── jupyter.rst.txt │ │ ├── jupyter_hunt.rst.txt │ │ ├── jupyter_rto.rst.txt │ │ ├── jupyter_spark.rst.txt │ │ ├── license.rst.txt │ │ └── zeppelin.rst.txt │ │ ├── _static │ │ ├── ajax-loader.gif │ │ ├── basic.css │ │ ├── comment-bright.png │ │ ├── comment-close.png │ │ ├── comment.png │ │ ├── css │ │ │ ├── badge_only.css │ │ │ └── theme.css │ │ ├── docker-containers.png │ │ ├── doctools.js │ │ ├── documentation_options.js │ │ ├── down-pressed.png │ │ ├── down.png │ │ ├── file.png │ │ ├── fonts │ │ │ ├── Inconsolata-Bold.ttf │ │ │ ├── Inconsolata-Regular.ttf │ │ │ ├── Inconsolata.ttf │ │ │ ├── Lato-Bold.ttf │ │ │ ├── Lato-Regular.ttf │ │ │ ├── Lato │ │ │ │ ├── lato-bold.eot │ │ │ │ ├── lato-bold.ttf │ │ │ │ ├── lato-bold.woff │ │ │ │ ├── lato-bold.woff2 │ │ │ │ ├── lato-bolditalic.eot │ │ │ │ ├── lato-bolditalic.ttf │ │ │ │ ├── lato-bolditalic.woff │ │ │ │ ├── lato-bolditalic.woff2 │ │ │ │ ├── lato-italic.eot │ │ │ │ ├── lato-italic.ttf │ │ │ │ ├── lato-italic.woff │ │ │ │ ├── lato-italic.woff2 │ │ │ │ ├── lato-regular.eot │ │ │ │ ├── lato-regular.ttf │ │ │ │ ├── lato-regular.woff │ │ │ │ └── lato-regular.woff2 │ │ │ ├── RobotoSlab-Bold.ttf │ │ │ ├── RobotoSlab-Regular.ttf │ │ │ ├── RobotoSlab │ │ │ │ ├── roboto-slab-v7-bold.eot │ │ │ │ ├── roboto-slab-v7-bold.ttf │ │ │ │ ├── roboto-slab-v7-bold.woff │ │ │ │ ├── roboto-slab-v7-bold.woff2 │ │ │ │ ├── roboto-slab-v7-regular.eot │ │ │ │ ├── roboto-slab-v7-regular.ttf │ │ │ │ ├── roboto-slab-v7-regular.woff │ │ │ │ └── roboto-slab-v7-regular.woff2 │ │ │ ├── fontawesome-webfont.eot │ │ │ ├── fontawesome-webfont.svg │ │ │ ├── fontawesome-webfont.ttf │ │ │ ├── fontawesome-webfont.woff │ │ │ └── fontawesome-webfont.woff2 │ │ ├── jquery-3.2.1.js │ │ ├── jquery.js │ │ ├── js │ │ │ ├── modernizr.min.js │ │ │ └── theme.js │ │ ├── jupyter-design.png │ │ ├── jupyter-evolution.png │ │ ├── jupyter-installed-token.png │ │ ├── jupyter-login.png │ │ ├── jupyter-main.png │ │ ├── jupyter-samples.png │ │ ├── language_data.js │ │ ├── minus.png │ │ ├── plus.png │ │ ├── pygments.css │ │ ├── searchtools.js │ │ ├── underscore-1.3.1.js │ │ ├── underscore.js │ │ ├── up-pressed.png │ │ ├── up.png │ │ └── websupport.js │ │ ├── docker.html │ │ ├── genindex.html │ │ ├── index.html │ │ ├── jupyter.html │ │ ├── jupyter_hunt.html │ │ ├── jupyter_rto.html │ │ ├── jupyter_spark.html │ │ ├── license.html │ │ ├── objects.inv │ │ ├── search.html │ │ ├── searchindex.js │ │ └── zeppelin.html ├── make.bat └── source │ ├── _static │ ├── docker-containers.png │ ├── jupyter-design.png │ ├── jupyter-evolution.png │ ├── jupyter-installed-token.png │ ├── jupyter-login.png │ ├── jupyter-main.png │ └── jupyter-samples.png │ ├── conf.py │ ├── docker.rst │ ├── index.rst │ ├── jupyter.rst │ ├── jupyter_hunt.rst │ ├── jupyter_rto.rst │ ├── jupyter_spark.rst │ ├── license.rst │ └── zeppelin.rst └── scripts └── docker_install.sh /.gitignore: -------------------------------------------------------------------------------- 1 | 2 | docker/jupyter-base/.DS_Store 3 | docker/jupyter-hunt/.DS_Store 4 | docker/jupyter-rto/.DS_Store 5 | docs/.DS_Store 6 | docs/source/.DS_Store 7 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Notebooks Forge 2 | 3 | A project dedicated to build and provide ``Notebooks`` servers for ``Defensive`` and ``Offensive`` operators to: 4 | 5 | * Design playbooks 6 | * Demonstrate how techniques can be used 7 | * Showcase when and why an operator would want to use a technique 8 | * Document engagements procedures 9 | * Prototype new ways to analyze data extracted from endpoints in a more dynamic, flexible and language-agnostic way. 10 | 11 | This project supports two notebook server types such as [Jupyter](https://jupyter.org/) and [Zeppelin](https://zeppelin.apache.org/) notebooks. 12 | 13 | ## What is a Notebook? 14 | 15 | Think of a notebook as a document that you can access via a web interface that allows you to save input (i.e live code) and output (i.e code execution results) of interactive sessions as well as important notes needed to explain the methodology and steps taken to perform specific tasks (i.e data analysis). 16 | 17 | # Current Status: Alpha 18 | 19 | The project is currently in an alpha stage, which means that the code and the functionality are still changing. We haven't yet tested the system with large data sources and in many scenarios. We invite you to try it and welcome any feedback. 20 | 21 | # Getting Started 22 | 23 | * [Wiki](https://notebooks-forge.readthedocs.io/en/latest/index.html) 24 | 25 | # Authors 26 | 27 | * Roberto Rodriguez [@Cyb3rWard0g](https://twitter.com/Cyb3rWard0g) 28 | * Jose Luis Rodriguez [@Cyb3rPandaH](https://twitter.com/Cyb3rPandaH) 29 | 30 | # Contributors 31 | 32 | # Contributing 33 | 34 | There are a few things that we would like to accomplish with this repo as shown in the To-Do list below. If you would like to contribute, please open an issue to track the request and then a PR when you are confident it would not conflict with the current build. 35 | 36 | # License: GPL-3.0 37 | 38 | [ Notebooks Forge GNU General Public License](https://github.com/Cyb3rWard0g/notebooks-forge/blob/master/LICENSE) 39 | 40 | # To-Do 41 | 42 | - [ ] Zeppelin Notebooks support 43 | - [ ] Implementation of Cypher for Apache Spark with Zeppelin 44 | - [ ] Create examples for offennsive and defensive use cases 45 | - [X] Logo 46 | 47 | More coming soon... -------------------------------------------------------------------------------- /docker/jupyter-all-spark/Dockerfile: -------------------------------------------------------------------------------- 1 | # Notebooks Forge script: Jupyter All Spark Environment Dockerfile 2 | # Author: Roberto Rodriguez (@Cyb3rWard0g) 3 | # License: GPL-3.0 4 | 5 | FROM cyb3rward0g/jupyter-pyspark:0.0.6 6 | LABEL maintainer="Roberto Rodriguez @Cyb3rWard0g" 7 | LABEL description="Notebooks Forge Jupyter Project." 8 | 9 | ENV DEBIAN_FRONTEND noninteractive 10 | 11 | USER root 12 | 13 | # *********** Installing Prerequisites *************** 14 | # -qq : No output except for errors 15 | RUN apt-get update -qq \ 16 | # ********** Installing R Notebooks Dependencies ********* 17 | && apt-get install -qqy fonts-dejavu tzdata gfortran gcc \ 18 | && ln -s /bin/tar /bin/gtar \ 19 | && apt-get -qy clean autoremove \ 20 | && rm -rf /var/lib/apt/lists/* 21 | 22 | USER $USER 23 | # **** Current Channels *********** 24 | #- https://repo.anaconda.com/pkgs/main/linux-64 25 | #- https://repo.anaconda.com/pkgs/main/noarch 26 | #- https://repo.anaconda.com/pkgs/free/linux-64 27 | #- https://repo.anaconda.com/pkgs/free/noarch 28 | #- https://repo.anaconda.com/pkgs/r/linux-64 29 | #- https://repo.anaconda.com/pkgs/r/noarch 30 | RUN conda install --quiet --yes \ 31 | # Installing Scala Kernel 32 | 'spylon-kernel=0.4.1' \ 33 | # ********** R Dependencies ************** 34 | 'r-base=3.6.1' \ 35 | 'r-irkernel=1.0.2' \ 36 | 'r-ggplot2=3.2.1' \ 37 | 'r-sparklyr=1.0.5' \ 38 | 'r-rcurl=1.95*' \ 39 | # *********** Clean ***************** 40 | && conda clean -tipy \ 41 | && conda build purge-all \ 42 | && rm -rf /home/$USER/.cache/yarn \ 43 | && python3 -m pip install --upgrade pip \ 44 | # *********** Install Scala Kernel ************* 45 | && python3 -m spylon_kernel install --sys-prefix -------------------------------------------------------------------------------- /docker/jupyter-all-spark/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3.5' 2 | 3 | services: 4 | jupyter-all-spark: 5 | Image: cyb3rward0g/jupyter-all-spark:0.0.1 6 | container_name: jupyter-all-spark 7 | environment: 8 | JUPYTER_TYPE: lab 9 | JUPYTER_BASE_URL: /jupyter 10 | ports: 11 | - "8888:8888" 12 | restart: always 13 | networks: 14 | hunting: 15 | 16 | networks: 17 | hunting: 18 | driver: bridge 19 | 20 | volumes: 21 | notebooks: 22 | driver: local -------------------------------------------------------------------------------- /docker/jupyter-base/Dockerfile: -------------------------------------------------------------------------------- 1 | # Notebooks forge script: Notebooks Forge Docker Image 2 | # Author: Roberto Rodriguez (@Cyb3rWard0g) 3 | # License: GPL-3.0 4 | # References: 5 | # https://github.com/jupyter/docker-stacks/blob/master/all-spark-notebook/Dockerfile 6 | # https://derflounder.wordpress.com/2016/07/11/editing-etcsudoers-to-manage-sudo-rights-for-users-and-groups/ 7 | 8 | FROM phusion/baseimage:0.11 9 | LABEL maintainer="Roberto Rodriguez @Cyb3rWard0g" 10 | LABEL description="Dockerfile Notebooks Forge Base Image.." 11 | 12 | ENV DEBIAN_FRONTEND noninteractive 13 | 14 | USER root 15 | 16 | # **** Set ARG Values **** 17 | ARG JUPYTER_USER=jupyter 18 | ARG JUPYTER_UID=810 19 | ARG JUPYTER_GID=810 20 | 21 | # *********** Setting Environment Variables *************** 22 | ENV LANG=C.UTF-8 LC_ALL=C.UTF-8 23 | ENV JUPYTER_DIR=/opt/jupyter 24 | ENV CONDA_DIR=/opt/conda 25 | ENV PATH /opt/conda/bin:$PATH 26 | # ********** Jupyter User ****** 27 | ENV USER ${JUPYTER_USER} 28 | ENV JUPYTER_UID ${JUPYTER_UID} 29 | ENV HOME /home/${JUPYTER_USER} 30 | ENV JUPYTER_GID=$JUPYTER_GID 31 | 32 | # *********** Installing Prerequisites *************** 33 | # ********** Installing Initial Requirements *************** 34 | RUN apt-get update --fix-missing && apt-get install -y --no-install-recommends \ 35 | wget sudo nano bzip2 ca-certificates libglib2.0-0 libxext6 libsm6 libxrender1 \ 36 | git mercurial subversion unzip zip \ 37 | # ********** Adding Jupyter User ************** 38 | && echo "auth requisite pam_deny.so" >> /etc/pam.d/su \ 39 | # remove the sudo rights for all users with admin privileges or in admin group 40 | && sed -i.bak -e 's/^%admin/#%admin/' /etc/sudoers \ 41 | && sed -i.bak -e 's/^%sudo/#%sudo/' /etc/sudoers \ 42 | && groupadd -g ${JUPYTER_GID} ${JUPYTER_USER} \ 43 | && useradd -m -s /bin/bash -u ${JUPYTER_UID} -g ${JUPYTER_GID} ${JUPYTER_USER} \ 44 | && bash -c 'mkdir -pv /opt/jupyter/{notebooks,scripts}' \ 45 | # ********** Clean APT ********** 46 | && apt-get -qy clean autoremove \ 47 | && rm -rf /var/lib/apt/lists/* \ 48 | && chown -R ${USER} /opt ${HOME} 49 | 50 | USER ${USER} 51 | # *********** Install Miniconda3 ******************** 52 | # **** Current Channels *********** 53 | #- https://repo.anaconda.com/pkgs/main/linux-64 54 | #- https://repo.anaconda.com/pkgs/main/noarch 55 | #- https://repo.anaconda.com/pkgs/free/linux-64 56 | #- https://repo.anaconda.com/pkgs/free/noarch 57 | #- https://repo.anaconda.com/pkgs/r/linux-64 58 | #- https://repo.anaconda.com/pkgs/r/noarch 59 | # ** Conda Issue ** 60 | # https://github.com/ContinuumIO/anaconda-issues/issues/11148 61 | RUN mkdir /home/${USER}/.conda \ 62 | && cd /tmp \ 63 | && wget --quiet https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O ~/anaconda.sh \ 64 | && /bin/bash ~/anaconda.sh -b -p /opt/conda \ 65 | && rm ~/anaconda.sh \ 66 | && conda config --system --prepend channels conda-forge \ 67 | && conda install --quiet --yes conda-build \ 68 | && conda install --quiet --yes \ 69 | 'python=3.7.5' \ 70 | 'conda=4.8.5' \ 71 | 'nbconvert=6.0.6' \ 72 | 'notebook=6.1.4' \ 73 | 'jupyterhub=1.1.0' \ 74 | 'jupyterlab=2.2.8' \ 75 | 'pandas=1.1.2' \ 76 | && conda update --all --quiet --yes \ 77 | # *********** Installing Jupyter Extensions ***************** 78 | && jupyter labextension install @jupyterlab/celltags \ 79 | && rm -rf $CONDA_DIR/share/jupyter/lab/staging \ 80 | # *********** Clean ***************** 81 | && npm cache clean --force \ 82 | && conda clean -tipy \ 83 | && conda build purge-all \ 84 | && rm -rf /home/$JUPYTER_USER/.cache/yarn 85 | 86 | # *********** Adding HELK scripts and files to Container *************** 87 | COPY scripts/* ${JUPYTER_DIR}/scripts/ 88 | 89 | EXPOSE 8888 90 | 91 | # *********** RUN HELK *************** 92 | WORKDIR ${HOME} 93 | CMD ["/opt/jupyter/scripts/jupyter-cmd.sh"] -------------------------------------------------------------------------------- /docker/jupyter-base/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3.5' 2 | 3 | services: 4 | jupyter-base: 5 | image: cyb3rward0g/jupyter-base:0.0.7 6 | container_name: jupyter-base 7 | environment: 8 | JUPYTER_TYPE: lab 9 | JUPYTER_BASE_URL: /jupyter 10 | ports: 11 | - "8888:8888" 12 | restart: always 13 | networks: 14 | hunting: 15 | 16 | networks: 17 | hunting: 18 | driver: bridge 19 | 20 | volumes: 21 | notebooks: 22 | driver: local 23 | -------------------------------------------------------------------------------- /docker/jupyter-base/scripts/jupyter-cmd.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Notebooks Forge script: jupyter-cmd.sh 4 | # Notebooks Forge script description: Runs Jupyter type and specific parameters 5 | # Notebooks Forge build Stage: Alpha 6 | # Author: Roberto Rodriguez (@Cyb3rWard0g) 7 | # License: GPL-3.0 8 | 9 | NOTEBOOK_INFO_TAG="[NOTEBOOK-JUPYTER-DOCKER-INSTALLATION-INFO]" 10 | NOTEBOOK_ERROR_TAG="[NOTEBOOK-JUPYTER-DOCKER-INSTALLATION-ERROR]" 11 | # ***** Defining Jupyter params array ********** 12 | params=() 13 | 14 | # **** Setting Jupyter Notebook Type ****** 15 | if [[ -z "$JUPYTER_TYPE" ]]; then 16 | JUPYTER_TYPE="notebook" 17 | fi 18 | 19 | # ***** Setting defaults and param variables *********** 20 | # ***** If a config is passed, it should be enough *********** 21 | if [[ "$JUPYTER_CONFIG" ]]; then 22 | # ***** The config file to load ******** 23 | params+=("--config=$JUPYTER_CONFIG") 24 | else 25 | # ***** The IP address the notebook server will listen on******* 26 | if [[ -z "$JUPYTER_IP" ]]; then 27 | JUPYTER_IP=0.0.0.0 28 | fi 29 | params+=("--ip=$JUPYTER_IP") 30 | 31 | # ***** The port the notebook server will listen on ******* 32 | if [[ -z "$JUPYTER_PORT" ]]; then 33 | JUPYTER_PORT=8888 34 | fi 35 | params+=("--port=$JUPYTER_PORT") 36 | 37 | # ***** The directory to use for notebooks and kernels ******* 38 | if [[ -z "$JUPYTER_NOTEBOOKS_DIR" ]]; then 39 | JUPYTER_NOTEBOOKS_DIR=/opt/jupyter/notebooks 40 | fi 41 | params+=("--notebook-dir=$JUPYTER_NOTEBOOKS_DIR") 42 | # ***** Default to no browser *********** 43 | # Don't open the notebook in a browser after startup. 44 | params+=("--no-browser") 45 | 46 | # ***** Buffer Manager ******* 47 | if [[ -z "$JUPYTER_MAX_BUFFER_SIZE" ]]; then 48 | # 0.5 GB by default 49 | JUPYTER_MAX_BUFFER_SIZE="536870912" 50 | fi 51 | params+=("--NotebookApp.max_buffer_size=$JUPYTER_MAX_BUFFER_SIZE") 52 | # ***** Running Jupyter Type & Parameters *********** 53 | if [[ "$JUPYTER_TYPE" == "notebook" ]]; then 54 | # ***** Base URL******* 55 | if [[ -z "$JUPYTER_BASE_URL" ]]; then 56 | JUPYTER_BASE_URL="/" 57 | fi 58 | params+=("--NotebookApp.base_url=$JUPYTER_BASE_URL") 59 | elif [[ "$JUPYTER_TYPE" == "lab" ]]; then 60 | # ***** Base URL******* 61 | if [[ -z "$JUPYTER_BASE_URL" ]]; then 62 | JUPYTER_BASE_URL="/" 63 | fi 64 | params+=("--LabApp.base_url=$JUPYTER_BASE_URL") 65 | else 66 | echo "$NOTEBOOK_ERROR_TAG You did not enter a valid Jupyter type: $JUPYTER_TYPE.." 67 | exit 1 68 | fi 69 | fi 70 | # ***** Running Jupyter Type & Parameters *********** 71 | echo "$NOTEBOOK_INFO_TAG Running Jupyter Type: $JUPYTER_TYPE.." 72 | echo "$NOTEBOOK_INFO_TAG Running the following parameters ${params[@]}" 73 | echo "$NOTEBOOK_INFO_TAG Starting Jupyter $JUPYTER_TYPE.." 74 | if [[ "$JUPYTER_TYPE" == "notebook" ]]; then 75 | jupyter notebook ${params[@]} 76 | elif [[ "$JUPYTER_TYPE" == "lab" ]]; then 77 | jupyter lab ${params[@]} 78 | fi -------------------------------------------------------------------------------- /docker/jupyter-bloodhound/Dockerfile: -------------------------------------------------------------------------------- 1 | # Notebooks Forge script: Jupyter Bloodhound Dockerfile 2 | # Notebooks Forge Stage: Alpha 3 | # Author: Roberto Rodriguez (@Cyb3rWard0g) 4 | # License: GPL-3.0 5 | 6 | FROM cyb3rward0g/jupyter-base:0.0.7 7 | LABEL maintainer="Roberto Rodriguez @Cyb3rWard0g" 8 | LABEL description="Notebooks Forge Jupyter Project." 9 | 10 | ENV DEBIAN_FRONTEND noninteractive 11 | 12 | USER ${USER} 13 | 14 | RUN python3 -m pip install --upgrade pip \ 15 | # *********** Install Libraries ************ 16 | && python3 -m pip install py2neo==4.3.0 plotly==4.3.0 altair==3.2.0 ipywidgets==7.5.1 \ 17 | # *********** Install Extensions ******************* 18 | # Jupyter widgets extension 19 | && jupyter labextension install @jupyter-widgets/jupyterlab-manager@1.1 \ 20 | # jupyterlab renderer support 21 | && jupyter labextension install jupyterlab-plotly@1.3.0 --no-build \ 22 | # FigureWidget support 23 | && jupyter labextension install plotlywidget@1.3.0 --no-build \ 24 | # Build extensions (must be done to activate extensions since --no-build is used above) 25 | && jupyter lab build --minimize=False --dev-build=False -------------------------------------------------------------------------------- /docker/jupyter-bloodhound/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3.5' 2 | 3 | services: 4 | jupyter-bloodhound: 5 | image: cyb3rward0g/jupyter-bloodhound:0.0.1 6 | container_name: jupyter-bloodhound 7 | volumes: 8 | - notebooks:/opt/helk/jupyter/notebooks 9 | environment: 10 | JUPYTER_TYPE: lab 11 | JUPYTER_BASE_URL: /jupyter 12 | ports: 13 | - "8888:8888" 14 | restart: always 15 | networks: 16 | hunting: 17 | 18 | networks: 19 | hunting: 20 | driver: bridge 21 | 22 | volumes: 23 | notebooks: 24 | driver: local -------------------------------------------------------------------------------- /docker/jupyter-bloodhound/notebooks/playbook_kerberoastable_users.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# BloodHound Playbook: Explore Kerberoastable users\n", 8 | "---------------------------------------------------\n", 9 | "* **Content Author:** Andy Robbins and Rohan Vazarkar\n", 10 | "* **Notebook Author:** Roberto Rodriguez @Cyb3rWard0g\n", 11 | "* **Referece:** https://youtu.be/fqYoOoghqdE?t=1218" 12 | ] 13 | }, 14 | { 15 | "cell_type": "markdown", 16 | "metadata": {}, 17 | "source": [ 18 | "## Count Users with Service Principal Name Set " 19 | ] 20 | }, 21 | { 22 | "cell_type": "markdown", 23 | "metadata": {}, 24 | "source": [ 25 | "When sharphound finds a user with a Service Principal Name set, it property named `hasspn` in the User node to `True`. Therefore, if we want to count the number users with that property set, we just need to query for users with `hasspn = True`." 26 | ] 27 | }, 28 | { 29 | "cell_type": "code", 30 | "execution_count": 1, 31 | "metadata": {}, 32 | "outputs": [], 33 | "source": [ 34 | "from py2neo import Graph" 35 | ] 36 | }, 37 | { 38 | "cell_type": "code", 39 | "execution_count": 2, 40 | "metadata": {}, 41 | "outputs": [], 42 | "source": [ 43 | "g = Graph(\"bolt://206.189.85.93:7687\", auth=(\"neo4j\", \"BloodHound\"))" 44 | ] 45 | }, 46 | { 47 | "cell_type": "code", 48 | "execution_count": 3, 49 | "metadata": {}, 50 | "outputs": [], 51 | "source": [ 52 | "users_hasspn_count = g.run(\"\"\"\n", 53 | "MATCH (u:User {hasspn:true})\n", 54 | "RETURN COUNT(u)\n", 55 | "\"\"\").to_data_frame()" 56 | ] 57 | }, 58 | { 59 | "cell_type": "code", 60 | "execution_count": 4, 61 | "metadata": {}, 62 | "outputs": [ 63 | { 64 | "data": { 65 | "text/html": [ 66 | "
\n", 67 | "\n", 80 | "\n", 81 | " \n", 82 | " \n", 83 | " \n", 84 | " \n", 85 | " \n", 86 | " \n", 87 | " \n", 88 | " \n", 89 | " \n", 90 | " \n", 91 | " \n", 92 | " \n", 93 | "
COUNT(u)
06
\n", 94 | "
" 95 | ], 96 | "text/plain": [ 97 | " COUNT(u)\n", 98 | "0 6" 99 | ] 100 | }, 101 | "execution_count": 4, 102 | "metadata": {}, 103 | "output_type": "execute_result" 104 | } 105 | ], 106 | "source": [ 107 | "users_hasspn_count" 108 | ] 109 | }, 110 | { 111 | "cell_type": "code", 112 | "execution_count": 5, 113 | "metadata": {}, 114 | "outputs": [ 115 | { 116 | "data": { 117 | "text/html": [ 118 | "
\n", 119 | "\n", 132 | "\n", 133 | " \n", 134 | " \n", 135 | " \n", 136 | " \n", 137 | " \n", 138 | " \n", 139 | " \n", 140 | " \n", 141 | " \n", 142 | " \n", 143 | " \n", 144 | " \n", 145 | " \n", 146 | " \n", 147 | " \n", 148 | " \n", 149 | " \n", 150 | " \n", 151 | " \n", 152 | " \n", 153 | " \n", 154 | " \n", 155 | " \n", 156 | " \n", 157 | " \n", 158 | " \n", 159 | " \n", 160 | " \n", 161 | " \n", 162 | " \n", 163 | " \n", 164 | " \n", 165 | "
u.name
0SQLSVC@TOKYO.JAPAN.LOCAL
1SCANSERVICE@TOKYO.JAPAN.LOCAL
2KRBTGT@JAPAN.LOCAL
3BACKUPLDAP@TOKYO.JAPAN.LOCAL
4KRBTGT@TOKYO.JAPAN.LOCAL
5KRBTGT@SINGAPORE.LOCAL
\n", 166 | "
" 167 | ], 168 | "text/plain": [ 169 | " u.name\n", 170 | "0 SQLSVC@TOKYO.JAPAN.LOCAL\n", 171 | "1 SCANSERVICE@TOKYO.JAPAN.LOCAL\n", 172 | "2 KRBTGT@JAPAN.LOCAL\n", 173 | "3 BACKUPLDAP@TOKYO.JAPAN.LOCAL\n", 174 | "4 KRBTGT@TOKYO.JAPAN.LOCAL\n", 175 | "5 KRBTGT@SINGAPORE.LOCAL" 176 | ] 177 | }, 178 | "execution_count": 5, 179 | "metadata": {}, 180 | "output_type": "execute_result" 181 | } 182 | ], 183 | "source": [ 184 | "g.run(\"\"\"\n", 185 | "MATCH (u:User {hasspn:true})\n", 186 | "RETURN u.name\n", 187 | "\"\"\").to_data_frame()" 188 | ] 189 | }, 190 | { 191 | "cell_type": "markdown", 192 | "metadata": {}, 193 | "source": [ 194 | "## Retrieve Kerberoastable Users with Path to DA " 195 | ] 196 | }, 197 | { 198 | "cell_type": "markdown", 199 | "metadata": {}, 200 | "source": [ 201 | "We can limit our results and return only Kereberoastable users with paths to DA. We can find Kerberoastable users with a path to DA and also see the length of the path to see which one is the closest." 202 | ] 203 | }, 204 | { 205 | "cell_type": "code", 206 | "execution_count": 6, 207 | "metadata": {}, 208 | "outputs": [], 209 | "source": [ 210 | "krb_users_path_to_DA = g.run(\"\"\"\n", 211 | "MATCH (u:User {hasspn:true})\n", 212 | "MATCH (g:Group {name:'DOMAIN ADMINS@JAPAN.LOCAL'})\n", 213 | "MATCH p = shortestPath(\n", 214 | " (u)-[*1..]->(g)\n", 215 | ")\n", 216 | "RETURN u.name,LENGTH(p)\n", 217 | "ORDER BY LENGTH(p) ASC\n", 218 | "\"\"\").to_data_frame()" 219 | ] 220 | }, 221 | { 222 | "cell_type": "code", 223 | "execution_count": 7, 224 | "metadata": {}, 225 | "outputs": [ 226 | { 227 | "data": { 228 | "text/html": [ 229 | "
\n", 230 | "\n", 243 | "\n", 244 | " \n", 245 | " \n", 246 | " \n", 247 | " \n", 248 | " \n", 249 | " \n", 250 | " \n", 251 | " \n", 252 | " \n", 253 | " \n", 254 | " \n", 255 | " \n", 256 | " \n", 257 | " \n", 258 | " \n", 259 | " \n", 260 | " \n", 261 | " \n", 262 | " \n", 263 | "
u.nameLENGTH(p)
0SQLSVC@TOKYO.JAPAN.LOCAL3
1BACKUPLDAP@TOKYO.JAPAN.LOCAL5
\n", 264 | "
" 265 | ], 266 | "text/plain": [ 267 | " u.name LENGTH(p)\n", 268 | "0 SQLSVC@TOKYO.JAPAN.LOCAL 3\n", 269 | "1 BACKUPLDAP@TOKYO.JAPAN.LOCAL 5" 270 | ] 271 | }, 272 | "execution_count": 7, 273 | "metadata": {}, 274 | "output_type": "execute_result" 275 | } 276 | ], 277 | "source": [ 278 | "krb_users_path_to_DA" 279 | ] 280 | }, 281 | { 282 | "cell_type": "markdown", 283 | "metadata": {}, 284 | "source": [ 285 | "## Return Most Privileged Kerberoastable users" 286 | ] 287 | }, 288 | { 289 | "cell_type": "markdown", 290 | "metadata": {}, 291 | "source": [ 292 | "What if we do not have kerberoastable users with a path to DA? We can still look for most privileged Kerberoastable users based on how many computers they have local admins rights on. " 293 | ] 294 | }, 295 | { 296 | "cell_type": "code", 297 | "execution_count": 8, 298 | "metadata": {}, 299 | "outputs": [], 300 | "source": [ 301 | "privileged_kerberoastable_users = g.run(\"\"\"\n", 302 | "MATCH (u:User {hasspn:true})\n", 303 | "OPTIONAL MATCH (u)-[:AdminTo]->(c1:Computer)\n", 304 | "OPTIONAL MATCH (u)-[:MemberOf*1..]->(:Group)-[:AdminTo]->(c2:Computer)\n", 305 | "WITH u,COLLECT(c1) + COLLECT(c2) AS tempVar\n", 306 | "UNWIND tempVar AS comps\n", 307 | "RETURN u.name,COUNT(DISTINCT(comps))\n", 308 | "ORDER BY COUNT(DISTINCT(comps)) DESC\n", 309 | "\"\"\").to_data_frame()" 310 | ] 311 | }, 312 | { 313 | "cell_type": "code", 314 | "execution_count": 9, 315 | "metadata": {}, 316 | "outputs": [ 317 | { 318 | "data": { 319 | "text/html": [ 320 | "
\n", 321 | "\n", 334 | "\n", 335 | " \n", 336 | " \n", 337 | " \n", 338 | " \n", 339 | " \n", 340 | " \n", 341 | " \n", 342 | " \n", 343 | " \n", 344 | " \n", 345 | " \n", 346 | " \n", 347 | " \n", 348 | " \n", 349 | "
u.nameCOUNT(DISTINCT(comps))
0SQLSVC@TOKYO.JAPAN.LOCAL1
\n", 350 | "
" 351 | ], 352 | "text/plain": [ 353 | " u.name COUNT(DISTINCT(comps))\n", 354 | "0 SQLSVC@TOKYO.JAPAN.LOCAL 1" 355 | ] 356 | }, 357 | "execution_count": 9, 358 | "metadata": {}, 359 | "output_type": "execute_result" 360 | } 361 | ], 362 | "source": [ 363 | "privileged_kerberoastable_users" 364 | ] 365 | }, 366 | { 367 | "cell_type": "code", 368 | "execution_count": null, 369 | "metadata": {}, 370 | "outputs": [], 371 | "source": [] 372 | } 373 | ], 374 | "metadata": { 375 | "kernelspec": { 376 | "display_name": "Python 3", 377 | "language": "python", 378 | "name": "python3" 379 | }, 380 | "language_info": { 381 | "codemirror_mode": { 382 | "name": "ipython", 383 | "version": 3 384 | }, 385 | "file_extension": ".py", 386 | "mimetype": "text/x-python", 387 | "name": "python", 388 | "nbconvert_exporter": "python", 389 | "pygments_lexer": "ipython3", 390 | "version": "3.7.3" 391 | } 392 | }, 393 | "nbformat": 4, 394 | "nbformat_minor": 4 395 | } 396 | -------------------------------------------------------------------------------- /docker/jupyter-graphframes/Dockerfile: -------------------------------------------------------------------------------- 1 | # Author: Roberto Rodriguez (@Cyb3rWard0g) 2 | # License: GPL-3.0 3 | 4 | FROM cyb3rward0g/jupyter-pyspark:0.0.6 5 | LABEL maintainer="Roberto Rodriguez @Cyb3rWard0g" 6 | LABEL description="Notebooks Forge Jupyter Project" 7 | 8 | ENV DEBIAN_FRONTEND noninteractive 9 | 10 | # *********** Setting Environment Variables *************** 11 | ENV GRAPHFRAMES_VERSION=0.8.0 12 | ENV SPARK_GF_VERSION=3.0 13 | ENV SCALA_GF_VERSION=2.12 14 | 15 | USER $USER 16 | # *********** Download Graphframes Jar *************** 17 | RUN wget http://dl.bintray.com/spark-packages/maven/graphframes/graphframes/${GRAPHFRAMES_VERSION}-spark${SPARK_GF_VERSION}-s_${SCALA_GF_VERSION}/graphframes-${GRAPHFRAMES_VERSION}-spark${SPARK_GF_VERSION}-s_${SCALA_GF_VERSION}.jar -P ${SPARK_HOME}/jars/ \ 18 | && cp ${SPARK_HOME}/jars/graphframes* ${SPARK_HOME}/graphframes.zip -------------------------------------------------------------------------------- /docker/jupyter-hunt/Dockerfile: -------------------------------------------------------------------------------- 1 | # Notebooks Forge script: Jupyter Hunt Environment Dockerfile 2 | # Author: Roberto Rodriguez (@Cyb3rWard0g) 3 | # License: GPL-3.0 4 | 5 | FROM cyb3rward0g/jupyter-pyspark:0.0.6 6 | LABEL maintainer="Roberto Rodriguez @Cyb3rWard0g" 7 | LABEL description="Notebooks Forge Jupyter Project." 8 | 9 | ENV DEBIAN_FRONTEND noninteractive 10 | 11 | # *********** Setting Environment Variables *************** 12 | ENV GRAPHFRAMES_VERSION=0.7.0 13 | ENV KAFKA_VERSION=2.4.0 14 | ENV SCALA_VERSION=2.11 15 | ENV SLF4J_API_VERSION=1.7.29 16 | ENV LZ4_JAVA=1.6.0 17 | ENV SNAPPY_JAVA=1.1.7.3 18 | ENV ESHADOOP_VERSION=7.5.2 19 | ENV ESHADOOP_DIR=${JUPYTER_DIR}/es-hadoop 20 | 21 | USER $USER 22 | # **** Current Channels *********** 23 | #- https://repo.anaconda.com/pkgs/main/linux-64 24 | #- https://repo.anaconda.com/pkgs/main/noarch 25 | #- https://repo.anaconda.com/pkgs/free/linux-64 26 | #- https://repo.anaconda.com/pkgs/free/noarch 27 | #- https://repo.anaconda.com/pkgs/r/linux-64 28 | #- https://repo.anaconda.com/pkgs/r/noarch 29 | RUN mkdir -v ${ESHADOOP_DIR} \ 30 | # *********** Install Jupyter Notebook & Extra Packages with Conda ************* 31 | && conda install --quiet --yes \ 32 | 'altair=4.1.0' \ 33 | 's3fs=0.4.2' \ 34 | 'elasticsearch-dsl=7.0.0' \ 35 | 'matplotlib=3.2.1' \ 36 | 'networkx=2.4' \ 37 | 'nxviz=0.6.2' \ 38 | && conda update --all --quiet --yes \ 39 | # *********** Clean ***************** 40 | && conda clean -tipy \ 41 | && conda build purge-all \ 42 | && rm -rf /home/$USER/.cache/yarn \ 43 | # *********** Install Pip packages not availabe via conda ************ 44 | && python3 -m pip install ksql==0.5.1.1 confluent-kafka==1.4.1 splunk-sdk==1.6.12 Kqlmagic==0.1.111.post15 neo4j==1.7.6 openhunt==1.6.5 pyarrow==0.17.0 msticpy==0.4.0 \ 45 | # *********** Download ES-Hadoop *************** 46 | && wget https://artifacts.elastic.co/downloads/elasticsearch-hadoop/elasticsearch-hadoop-${ESHADOOP_VERSION}.zip -P ${ESHADOOP_DIR}/ \ 47 | && unzip -j ${ESHADOOP_DIR}/*.zip -d ${ESHADOOP_DIR}/ \ 48 | && rm ${ESHADOOP_DIR}/*.zip \ 49 | # *********** Download Graphframes Jar *************** 50 | && wget http://dl.bintray.com/spark-packages/maven/graphframes/graphframes/${GRAPHFRAMES_VERSION}-spark2.4-s_2.11/graphframes-${GRAPHFRAMES_VERSION}-spark2.4-s_2.11.jar -P ${SPARK_HOME}/jars/ \ 51 | && cp ${SPARK_HOME}/jars/graphframes* ${SPARK_HOME}/graphframes.zip \ 52 | # *********** Download Extra Jars *************** 53 | && wget https://repo1.maven.org/maven2/org/apache/spark/spark-sql-kafka-0-10_${SCALA_VERSION}/${SPARK_VERSION}/spark-sql-kafka-0-10_${SCALA_VERSION}-${SPARK_VERSION}.jar -P ${SPARK_HOME}/jars/ \ 54 | && wget https://repo1.maven.org/maven2/org/apache/kafka/kafka-clients/${KAFKA_VERSION}/kafka-clients-${KAFKA_VERSION}.jar -P ${SPARK_HOME}/jars/ \ 55 | && wget https://repo1.maven.org/maven2/org/slf4j/slf4j-api/${SLF4J_API_VERSION}/slf4j-api-${SLF4J_API_VERSION}.jar -P ${SPARK_HOME}/jars/ \ 56 | && wget https://repo1.maven.org/maven2/org/spark-project/spark/unused/1.0.0/unused-1.0.0.jar -P ${SPARK_HOME}/jars/ \ 57 | && wget https://repo1.maven.org/maven2/org/lz4/lz4-java/${LZ4_JAVA}/lz4-java-${LZ4_JAVA}.jar -P ${SPARK_HOME}/jars \ 58 | && wget https://repo1.maven.org/maven2/org/xerial/snappy/snappy-java/${SNAPPY_JAVA}/snappy-java-${SNAPPY_JAVA}.jar -P ${SPARK_HOME}/jars/ 59 | 60 | # *********** Adding HELK scripts and files to Container *************** 61 | COPY spark/* ${SPARK_HOME}/conf/ 62 | 63 | USER root 64 | 65 | RUN chown -R ${USER} ${JUPYTER_DIR} ${HOME} ${SPARK_HOME} 66 | 67 | USER ${USER} -------------------------------------------------------------------------------- /docker/jupyter-hunt/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3.5' 2 | 3 | services: 4 | jupyter-hunt: 5 | #image: cyb3rwardog/jupyter-hunt:0.0.7 6 | build: ./ 7 | container_name: jupyter-hunt 8 | volumes: 9 | - notebooks:/opt/jupyter/notebooks 10 | environment: 11 | JUPYTER_TYPE: lab 12 | JUPYTER_BASE_URL: /jupyter 13 | ports: 14 | - "8888:8888" 15 | restart: always 16 | networks: 17 | hunting: 18 | 19 | networks: 20 | hunting: 21 | driver: bridge 22 | 23 | volumes: 24 | notebooks: 25 | driver: local 26 | -------------------------------------------------------------------------------- /docker/jupyter-hunt/spark/spark-defaults.conf: -------------------------------------------------------------------------------- 1 | # HELK build Stage: Alpha 2 | # Author: Roberto Rodriguez (@Cyb3rWard0g) 3 | # License: GPL-3.0 4 | 5 | # HELK References: 6 | # https://spark.apache.org/docs/latest/configuration.html 7 | # https://graphframes.github.io/quick-start.html 8 | # https://spark-packages.org/package/graphframes/graphframes 9 | # https://spark.apache.org/docs/latest/sql-programming-guide.html#pyspark-usage-guide-for-pandas-with-apache-arrow 10 | 11 | # ************ Application Properties **************** 12 | # Logs the effective SparkConf as INFO when a SparkContext is started. Default: false 13 | spark.logConf true 14 | # The cluster manager to connect to. 15 | # spark.master spark://helk-spark-master:7077 16 | # Restarts the driver automatically if it fails with a non-zero exit status 17 | spark.driver.supervise true 18 | 19 | # ************ Runtime Environment **************** 20 | # Sets the number of latest rolling log files that are going to be retained by the system. Older log files will be deleted. 21 | spark.executor.logs.rolling.maxRetainedFiles 20 22 | # Set the strategy of rolling of executor logs. 23 | spark.executor.logs.rolling.strategy spark.executor.logs.rolling.time.interval 24 | # Comma-separated list of jars to include on the driver and executor classpaths. Globs are allowed. 25 | spark.jars /opt/jupyter/es-hadoop/elasticsearch-hadoop-7.5.2.jar 26 | # Comma-separated list of Maven coordinates of jars to include on the driver and executor classpaths. 27 | # The coordinates should be groupId:artifactId:version. 28 | #spark.jars.packages graphframes:graphframes:0.7.0-spark2.4-s_2.11,org.apache.spark:spark-sql-kafka-0-10_2.11:2.4.0 29 | #spark.jars.packages org.apache.spark:spark-sql-kafka-0-10_2.11:2.3.1,databricks:spark-sklearn:0.2.3 30 | 31 | # ************ Spark UI **************** 32 | # Base directory in which Spark events are logged 33 | spark.eventLog.dir /opt/jupyter/spark/logs 34 | # Whether to log Spark events, useful for reconstructing the Web UI after the application has finished. 35 | spark.eventLog.enabled true 36 | # Enable running Spark Master as reverse proxy for worker and application UIs. 37 | # In this mode, Spark master will reverse proxy the worker and application UIs to enable access without requiring direct access to their hosts. 38 | spark.ui.reverseProxy true 39 | 40 | spark.sql.execution.arrow.enabled true 41 | 42 | # Enables the external shuffle service. This service preserves the shuffle files written by executors so the executors can be safely removed 43 | spark.shuffle.service.enabled true 44 | 45 | # ************ Dynamic Allocation ************** 46 | # Whether to use dynamic resource allocation, which scales the number of executors registered with this application up and down based on the workload 47 | spark.dynamicAllocation.enabled true 48 | # If dynamic allocation is enabled and an executor has been idle for more than this duration, the executor will be removed 49 | spark.dynamicAllocation.executorIdleTimeout 15s 50 | 51 | # Amount of memory to use per executor process, in MiB unless otherwise specified. (e.g. 2g, 8g). 52 | spark.executor.memory 1g -------------------------------------------------------------------------------- /docker/jupyter-pwsh/Dockerfile: -------------------------------------------------------------------------------- 1 | # Notebooks Forge script: Jupyter Powershell Dockerfile 2 | # Notebooks Forge Stage: Alpha 3 | # Author: Roberto Rodriguez (@Cyb3rWard0g) 4 | # License: GPL-3.0 5 | 6 | FROM cyb3rward0g/jupyter-base:0.0.5 7 | LABEL maintainer="Roberto Rodriguez @Cyb3rWard0g" 8 | LABEL description="Notebooks Forge Jupyter Project." 9 | 10 | ENV DEBIAN_FRONTEND noninteractive 11 | 12 | USER root 13 | 14 | RUN curl -sLO https://packages.microsoft.com/config/ubuntu/18.04/packages-microsoft-prod.deb \ 15 | && dpkg -i packages-microsoft-prod.deb \ 16 | && apt-get update --fix-missing \ 17 | && apt-get install -y --no-install-recommends powershell \ 18 | # ********** Clean APT ********** 19 | && apt-get -qy clean autoremove \ 20 | && rm -rf /var/lib/apt/lists/* 21 | 22 | USER ${USER} 23 | 24 | RUN python3 -m pip install --upgrade pip \ 25 | # *********** Install powershell kernel ************ 26 | && python3 -m pip install powershell-kernel==0.1.2 \ 27 | && python3 -m powershell_kernel.install --powershell-command pwsh -------------------------------------------------------------------------------- /docker/jupyter-pwsh/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3.5' 2 | 3 | services: 4 | jupyter-pwsh: 5 | build: ./ 6 | container_name: jupyter-pwsh 7 | volumes: 8 | - notebooks:/opt/helk/jupyter/notebooks 9 | environment: 10 | JUPYTER_TYPE: lab 11 | JUPYTER_BASE_URL: /jupyter 12 | ports: 13 | - "8888:8888" 14 | restart: always 15 | networks: 16 | hunting: 17 | 18 | networks: 19 | hunting: 20 | driver: bridge 21 | 22 | volumes: 23 | notebooks: 24 | driver: local -------------------------------------------------------------------------------- /docker/jupyter-pyspark/Dockerfile: -------------------------------------------------------------------------------- 1 | # Notebooks Forge script: Jupyter PySpark Environment Dockerfile 2 | # Author: Roberto Rodriguez (@Cyb3rWard0g) 3 | # License: GPL-3.0 4 | 5 | FROM cyb3rward0g/jupyter-base:0.0.7 6 | LABEL maintainer="Roberto Rodriguez @Cyb3rWard0g" 7 | LABEL description="Notebooks Forge Jupyter Project." 8 | 9 | ENV DEBIAN_FRONTEND noninteractive 10 | 11 | USER root 12 | 13 | # *********** Spark Env Variables *************** 14 | ENV SPARK_VERSION=3.0.1 15 | ENV APACHE_HADOOP_VERSION=2.7 16 | ENV SPARK_HOME=/opt/jupyter/spark 17 | 18 | # *********** Installing Prerequisites *************** 19 | # -qq : No output except for errors 20 | RUN apt-get update -qq \ 21 | && apt-get install -qqy openjdk-8-jre-headless ca-certificates-java \ 22 | && apt-get -qy clean autoremove \ 23 | && rm -rf /var/lib/apt/lists/* \ 24 | # *********** Installing Spark *************** 25 | && bash -c 'mkdir -pv /opt/jupyter/spark/logs' \ 26 | && wget -c http://apache.claz.org/spark/spark-${SPARK_VERSION}/spark-${SPARK_VERSION}-bin-hadoop${APACHE_HADOOP_VERSION}.tgz -O - | tar xvz -C ${SPARK_HOME} --strip-components=1 \ 27 | && chown -R ${USER} ${JUPYTER_DIR} ${HOME} 28 | 29 | # *********** Adding scripts and files to Container *************** 30 | COPY spark/* ${SPARK_HOME}/conf/ 31 | COPY kernels/pyspark_kernel.json /usr/local/share/jupyter/kernels/pyspark3/kernel.json 32 | 33 | RUN chown -R ${USER} ${JUPYTER_DIR} ${HOME} ${SPARK_HOME} \ 34 | && chown ${USER} /usr/local/share/jupyter/kernels/pyspark3/kernel.json 35 | 36 | EXPOSE 8000 37 | 38 | USER ${USER} -------------------------------------------------------------------------------- /docker/jupyter-pyspark/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3.5' 2 | 3 | services: 4 | jupyter-pyspark: 5 | image: cyb3rward0g/jupyter-pyspark:0.0.4 6 | container_name: jupyter-pyspark 7 | environment: 8 | JUPYTER_TYPE: notebook 9 | JUPYTER_BASE_URL: /jupyter 10 | ports: 11 | - "8888:8888" 12 | restart: always 13 | networks: 14 | hunting: 15 | 16 | networks: 17 | hunting: 18 | driver: bridge 19 | 20 | volumes: 21 | notebooks: 22 | driver: local -------------------------------------------------------------------------------- /docker/jupyter-pyspark/kernels/pyspark_kernel.json: -------------------------------------------------------------------------------- 1 | { 2 | "display_name": "PySpark_Python3", 3 | "language": "python", 4 | "argv": [ 5 | "/opt/conda/bin/python3", 6 | "-m", 7 | "ipykernel_launcher", 8 | "-f", 9 | "{connection_file}" 10 | ], 11 | "env": { 12 | "SPARK_HOME": "/opt/jupyter/spark/", 13 | "PYTHONPATH": "/opt/jupyter/spark/python/:/opt/jupyter/spark/python/lib/py4j-0.10.9-src.zip:/opt/jupyter/spark/graphframes.zip", 14 | "PYSPARK_PYTHON": "/opt/conda/bin/python3" 15 | } 16 | } -------------------------------------------------------------------------------- /docker/jupyter-pyspark/spark/log4j.properties: -------------------------------------------------------------------------------- 1 | # 2 | # Licensed to the Apache Software Foundation (ASF) under one or more 3 | # contributor license agreements. See the NOTICE file distributed with 4 | # this work for additional information regarding copyright ownership. 5 | # The ASF licenses this file to You under the Apache License, Version 2.0 6 | # (the "License"); you may not use this file except in compliance with 7 | # the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | # 17 | 18 | # Set everything to be logged to the console 19 | log4j.rootCategory=WARN, console 20 | log4j.appender.console=org.apache.log4j.ConsoleAppender 21 | log4j.appender.console.target=System.err 22 | log4j.appender.console.layout=org.apache.log4j.PatternLayout 23 | log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n 24 | 25 | # Set the default spark-shell log level to WARN. When running the spark-shell, the 26 | # log level for this class is used to overwrite the root logger's log level, so that 27 | # the user can have different defaults for the shell and regular Spark apps. 28 | log4j.logger.org.apache.spark.repl.Main=WARN 29 | 30 | # Settings to quiet third party logs that are too verbose 31 | log4j.logger.org.spark_project.jetty=WARN 32 | log4j.logger.org.spark_project.jetty.util.component.AbstractLifeCycle=ERROR 33 | log4j.logger.org.apache.spark.repl.SparkIMain$exprTyper=INFO 34 | log4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=INFO 35 | log4j.logger.org.apache.parquet=ERROR 36 | log4j.logger.parquet=ERROR 37 | 38 | # SPARK-9183: Settings to avoid annoying messages when looking up nonexistent UDFs in SparkSQL with Hive support 39 | log4j.logger.org.apache.hadoop.hive.metastore.RetryingHMSHandler=FATAL 40 | log4j.logger.org.apache.hadoop.hive.ql.exec.FunctionRegistry=ERROR 41 | -------------------------------------------------------------------------------- /docker/jupyter-pyspark/spark/spark-defaults.conf: -------------------------------------------------------------------------------- 1 | # HELK build Stage: Alpha 2 | # Author: Roberto Rodriguez (@Cyb3rWard0g) 3 | # License: GPL-3.0 4 | 5 | # HELK References: 6 | # https://spark.apache.org/docs/latest/configuration.html 7 | # https://graphframes.github.io/quick-start.html 8 | # https://spark-packages.org/package/graphframes/graphframes 9 | # https://spark.apache.org/docs/latest/sql-programming-guide.html#pyspark-usage-guide-for-pandas-with-apache-arrow 10 | 11 | # ************ Application Properties **************** 12 | # Logs the effective SparkConf as INFO when a SparkContext is started. Default: false 13 | spark.logConf true 14 | # The cluster manager to connect to. 15 | # spark.master spark://helk-spark-master:7077 16 | # Restarts the driver automatically if it fails with a non-zero exit status 17 | spark.driver.supervise true 18 | 19 | # ************ Runtime Environment **************** 20 | # Sets the number of latest rolling log files that are going to be retained by the system. Older log files will be deleted. 21 | spark.executor.logs.rolling.maxRetainedFiles 20 22 | # Set the strategy of rolling of executor logs. 23 | spark.executor.logs.rolling.strategy spark.executor.logs.rolling.time.interval 24 | # Comma-separated list of jars to include on the driver and executor classpaths. Globs are allowed. 25 | 26 | # Comma-separated list of Maven coordinates of jars to include on the driver and executor classpaths. 27 | # The coordinates should be groupId:artifactId:version. 28 | #spark.jars.packages graphframes:graphframes:0.7.0-spark2.4-s_2.11,org.apache.spark:spark-sql-kafka-0-10_2.11:2.4.0 29 | #spark.jars.packages org.apache.spark:spark-sql-kafka-0-10_2.11:2.3.1,databricks:spark-sklearn:0.2.3 30 | 31 | # ************ Spark UI **************** 32 | # Base directory in which Spark events are logged 33 | spark.eventLog.dir /opt/jupyter/spark/logs 34 | # Whether to log Spark events, useful for reconstructing the Web UI after the application has finished. 35 | spark.eventLog.enabled true 36 | # Enable running Spark Master as reverse proxy for worker and application UIs. 37 | # In this mode, Spark master will reverse proxy the worker and application UIs to enable access without requiring direct access to their hosts. 38 | #spark.ui.reverseProxy true 39 | 40 | #spark.sql.execution.arrow.enabled true 41 | 42 | # Enables the external shuffle service. This service preserves the shuffle files written by executors so the executors can be safely removed 43 | #spark.shuffle.service.enabled true 44 | 45 | # ************ Dynamic Allocation ************** 46 | # Whether to use dynamic resource allocation, which scales the number of executors registered with this application up and down based on the workload 47 | #spark.dynamicAllocation.enabled true 48 | # If dynamic allocation is enabled and an executor has been idle for more than this duration, the executor will be removed 49 | #spark.dynamicAllocation.executorIdleTimeout 15s 50 | 51 | # Amount of memory to use per executor process, in MiB unless otherwise specified. (e.g. 2g, 8g). 52 | #spark.executor.memory 1g -------------------------------------------------------------------------------- /docker/jupyter-rto/Dockerfile: -------------------------------------------------------------------------------- 1 | # Notebooks Forge script: Jupyter RTO Dockerfile 2 | # Notebooks Forge Stage: Alpha 3 | # Author: Roberto Rodriguez (@Cyb3rWard0g) 4 | # License: GPL-3.0 5 | 6 | FROM cyb3rward0g/jupyter-base:0.0.5 7 | LABEL maintainer="Roberto Rodriguez @Cyb3rWard0g" 8 | LABEL description="Notebooks Forge Jupyter Project." 9 | 10 | ENV DEBIAN_FRONTEND noninteractive 11 | 12 | USER ${USER} 13 | 14 | RUN python3 -m pip install --upgrade pip \ 15 | # *********** Install neo4j ************ 16 | && python3 -m pip install neo4j==1.7.6 \ 17 | # *********** Download pycobalt *************** 18 | && mkdir -p /opt/pycobalt \ 19 | && git clone https://github.com/dcsync/pycobalt.git /opt/pycobalt \ 20 | && cd /opt/pycobalt \ 21 | && python3 setup.py install \ 22 | # *********** Download Faction C2 Client *************** 23 | && mkdir -pv /opt/faction/cli \ 24 | && git clone --single-branch --branch=master https://github.com/FactionC2/CLI /opt/faction/cli \ 25 | && cd /opt/faction/cli \ 26 | && python3 -m pip install pipenv \ 27 | && python3 -m pipenv install --system \ 28 | && python3 setup.py install -------------------------------------------------------------------------------- /docker/jupyter-rto/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3.5' 2 | 3 | services: 4 | jupyter-rto: 5 | image: cyb3rward0g/jupyter-rto:0.0.2 6 | container_name: jupyter-rto 7 | volumes: 8 | - notebooks:/opt/helk/jupyter/notebooks 9 | environment: 10 | JUPYTER_TYPE: lab 11 | JUPYTER_BASE_URL: /jupyter 12 | ports: 13 | - "8888:8888" 14 | restart: always 15 | networks: 16 | hunting: 17 | 18 | networks: 19 | hunting: 20 | driver: bridge 21 | 22 | volumes: 23 | notebooks: 24 | driver: local -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | SOURCEDIR = source 8 | BUILDDIR = build 9 | 10 | # Put it first so that "make" without argument is like "make help". 11 | help: 12 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 13 | 14 | .PHONY: help Makefile 15 | 16 | # Catch-all target: route all unknown targets to Sphinx using the new 17 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 18 | %: Makefile 19 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) -------------------------------------------------------------------------------- /docs/build/doctrees/docker.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/doctrees/docker.doctree -------------------------------------------------------------------------------- /docs/build/doctrees/environment.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/doctrees/environment.pickle -------------------------------------------------------------------------------- /docs/build/doctrees/index.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/doctrees/index.doctree -------------------------------------------------------------------------------- /docs/build/doctrees/jupyter.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/doctrees/jupyter.doctree -------------------------------------------------------------------------------- /docs/build/doctrees/jupyter_hunt.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/doctrees/jupyter_hunt.doctree -------------------------------------------------------------------------------- /docs/build/doctrees/jupyter_rto.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/doctrees/jupyter_rto.doctree -------------------------------------------------------------------------------- /docs/build/doctrees/jupyter_spark.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/doctrees/jupyter_spark.doctree -------------------------------------------------------------------------------- /docs/build/doctrees/license.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/doctrees/license.doctree -------------------------------------------------------------------------------- /docs/build/doctrees/zeppelin.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/doctrees/zeppelin.doctree -------------------------------------------------------------------------------- /docs/build/html/.buildinfo: -------------------------------------------------------------------------------- 1 | # Sphinx build info version 1 2 | # This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done. 3 | config: 1d004c778a20e39a161d51665d26d32c 4 | tags: 645f666f9bcd5a90fca523b33c5a78b7 5 | -------------------------------------------------------------------------------- /docs/build/html/.nojekyll: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/html/.nojekyll -------------------------------------------------------------------------------- /docs/build/html/_images/docker-containers.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/html/_images/docker-containers.png -------------------------------------------------------------------------------- /docs/build/html/_images/jupyter-design.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/html/_images/jupyter-design.png -------------------------------------------------------------------------------- /docs/build/html/_images/jupyter-evolution.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/html/_images/jupyter-evolution.png -------------------------------------------------------------------------------- /docs/build/html/_images/jupyter-login.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/html/_images/jupyter-login.png -------------------------------------------------------------------------------- /docs/build/html/_images/jupyter-main.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/html/_images/jupyter-main.png -------------------------------------------------------------------------------- /docs/build/html/_images/jupyter-samples.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/html/_images/jupyter-samples.png -------------------------------------------------------------------------------- /docs/build/html/_sources/docker.rst.txt: -------------------------------------------------------------------------------- 1 | Docker Notebook Deployments 2 | =========================== 3 | 4 | Docker technology allows the project to package notebook applications with all its libraries and dependencies in "containers" and make them portable among any operating system. 5 | This allows security analytst to deploy the notebook servers on any system they use daily for hunting research. 6 | 7 | What are Docker Containers? 8 | ########################### 9 | 10 | According to `Docker docs `_, a container is a standard unit of software that packages up code and all its dependencies so the application runs quickly and reliably from one computing environment to another. 11 | A Docker container image is a lightweight, standalone, executable package of software that includes everything needed to run an application: code, runtime, system tools, system libraries and settings. 12 | 13 | .. image:: _static/docker-containers.png 14 | :alt: Docker Containers 15 | :scale: 50% 16 | 17 | There are two notebook environments being supported by the project. 18 | 19 | Jupyter Notebooks Install 20 | ######################### 21 | 22 | Requirements 23 | ************ 24 | 25 | * `Git `_ : Git is a free and open source distributed version control system designed to handle everything from small to very large projects with speed and efficiency. 26 | * `Docker CE `_ : Docker Community Edition (CE) is ideal for developers and small teams looking to get started with Docker and experimenting with container-based apps. 27 | * `Docker Compose `_ : a tool for defining and running multi-container Docker applications. 28 | 29 | Steps 30 | ***** 31 | 32 | Git clone the `Notebooks Forge project `_ and change your current directory to the projects directory. 33 | 34 | .. code-block:: console 35 | 36 | $ git clone https://github.com/Cyb3rWard0g/notebooks-forge.git 37 | $ cd notebooks-forge/ 38 | 39 | Change your current directory to the specific notebook you want to work with (``jupyter-hunt`` or ``jupyter-rto``) 40 | 41 | .. code-block:: console 42 | 43 | $ cd jupyter-hunt/ 44 | 45 | Run docker-compose pointing to the default compose file available in the folder. 46 | 47 | .. code-block:: console 48 | 49 | $ sudo docker-compose -f docker-compose.yml up --buil -d 50 | 51 | Once your container gets downloaded/run, you can check it if is running or not with the following commands: 52 | 53 | .. code-block:: console 54 | 55 | $ sudo docker ps 56 | 57 | Before accessing the Jupyter notebook server via your favorite web browser, you will have to get the access token the application initialized with. 58 | You can get it with the following command: 59 | 60 | .. code-block:: console 61 | 62 | $ sudo docker exec -ti jupyter-hunt jupyter notebook list | grep "token" | sed 's/.*token=\([^ ]*\).*/\1/' 63 | 64 | Open your favorite browser at ``http://:8888```. You will then be prompted with a login box to enter the token. 65 | 66 | .. image:: _static/jupyter-login.png 67 | :alt: Jupyter Login 68 | :scale: 50% 69 | 70 | That's it! You are now ready to use your Jupyter Notebook server. 71 | 72 | .. image:: _static/jupyter-main.png 73 | :alt: Jupyter Main 74 | :scale: 40% 75 | 76 | Zeppelin Notebooks Install 77 | ########################## 78 | 79 | Requirements 80 | ************ 81 | 82 | * `Git `_ : Git is a free and open source distributed version control system designed to handle everything from small to very large projects with speed and efficiency. 83 | * `Docker CE `_ : Docker Community Edition (CE) is ideal for developers and small teams looking to get started with Docker and experimenting with container-based apps. 84 | * `Docker Compose `_ : a tool for defining and running multi-container Docker applications. 85 | 86 | Steps 87 | ***** 88 | 89 | Coming soon.. -------------------------------------------------------------------------------- /docs/build/html/_sources/index.rst.txt: -------------------------------------------------------------------------------- 1 | .. Notebooks Forge documentation master file, created by 2 | sphinx-quickstart on Wed Apr 17 11:44:45 2019. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Notebooks Forge 7 | =============== 8 | 9 | A project dedicated to build and provide ``Notebooks`` servers for ``Defensive`` and ``Offensive`` operators to: 10 | 11 | * Design playbooks 12 | * Demonstrate how techniques can be used 13 | * Showcase when and why an operator would want to use a technique 14 | * Document engagements procedures 15 | * Prototype new ways to analyze data extracted from endpoints in a more dynamic, flexible and language-agnostic way. 16 | 17 | This project supports two notebook server types such as `Jupyter `_ and `Zeppelin `_. 18 | 19 | 20 | What is a Notebook? 21 | ******************* 22 | 23 | Think of a notebook as a document that you can access via a web interface that allows you to save input (i.e live code) and output (i.e code execution results / evaluated code output) of interactive sessions as well as important notes needed to explain the methodology and steps taken to perform specific tasks (i.e data analysis). 24 | 25 | .. toctree:: 26 | :maxdepth: 2 27 | :caption: Notebook Environments: 28 | 29 | Jupyter Notebook 30 | Zeppelin Notebook 31 | 32 | .. toctree:: 33 | :maxdepth: 2 34 | :caption: Notebook Deployments: 35 | 36 | Docker 37 | 38 | .. toctree:: 39 | :maxdepth: 2 40 | :caption: Licenses: 41 | 42 | GNU General Public License V3 -------------------------------------------------------------------------------- /docs/build/html/_sources/jupyter.rst.txt: -------------------------------------------------------------------------------- 1 | Jupyter Notebook 2 | ================ 3 | 4 | The Jupyter Notebook is an open-source web application that allows you to create and share documents that contain live code, equations, visualizations and narrative text. 5 | Uses include: data cleaning and transformation, numerical simulation, statistical modeling, data visualization, machine learning, and much more. 6 | 7 | The Jupyter Notebook project is the evolution of the IPython Notebook library which was developed primarily to enhance the default python interactive console by enabling scientific operations and advanced data analytics capabilities via sharable web documents. 8 | 9 | .. image:: _static/jupyter-evolution.png 10 | :alt: Jupyter Evolution 11 | :scale: 50% 12 | 13 | Nowadays, the Jupyter Notebook project not only supports Python but also over 40 programming languages such as R, Julia, Scala and PySpark. 14 | In fact, its name was originally derived from three programming languages: Julia, Python and R which made it one of the first language-agnostic notebook applications, and now considered one of the most preferred environments for data scientists and engineers in the community to explore and analyze data. 15 | 16 | .. image:: _static/jupyter-samples.png 17 | :alt: Jupyter Sample 18 | :scale: 50% 19 | 20 | How do Jupyter Notebooks Work? 21 | ############################## 22 | 23 | Jupyter Notebooks work with what is called a two-process model based on a kernel-client infrastructure. 24 | This model applies a similar concept to the `Read-Evaluate-Print Loop (REPL) `_ programming environment that takes a single user's inputs, evaluates them, and returns the result to the user. 25 | 26 | .. image:: _static/jupyter-design.png 27 | :alt: Jupyter Design 28 | :scale: 70% 29 | 30 | Based on the two-process model concept, we can explain the main components of Jupyter the following way: 31 | 32 | Jupyter Client 33 | ************** 34 | 35 | * It allows a user to send code to the kernel and it could be in a form of a `Qt Console `_ or a browser via notebook documents. 36 | * From a REPL perspective, the client does the read and print operations. 37 | * Notebooks are hosted by the Jupyter web server which uses Tornado to serve HTTP requests. 38 | 39 | Jupyter Kernel 40 | ************** 41 | 42 | * It receives the code sent by the client, executes it, and returns the results back to the client for display. A kernel process can have multiple clients communicating with it which is why this model is also referred as the decoupled two-process model. 43 | * From a REPL perspective, the kernel does the evaluate operation. 44 | * kernel and clients communicate via an interactive computing protocol based on an asynchronous messaging library named `ZeroMQ `_ (low-level transport layer) and WebSockets (TCP-based) 45 | 46 | Jupyter Notebook Document 47 | ************************* 48 | 49 | * Notebooks are automatically saved and stored on disk in the open source JavaScript Object Notation (JSON) format and with a .ipynb extension. 50 | 51 | Jupyter Notebooks Servers 52 | ######################### 53 | 54 | .. toctree:: 55 | :maxdepth: 2 56 | 57 | Jupyter Spark 58 | Jupyter Hunt 59 | Jupyter RTO -------------------------------------------------------------------------------- /docs/build/html/_sources/jupyter_hunt.rst.txt: -------------------------------------------------------------------------------- 1 | Jupyter Hunt Server 2 | =================== 3 | 4 | A notebook server built for defensive operators with several tools to connect to known SIEMs and be able to analyze data to find potential adversaries in the network. 5 | This server is built on the top of the `Jupyter Spark` server available in this repo in order to provide advanced analytics capabilities via Apache Spark. 6 | 7 | Jupyter Python Libraries 8 | ######################## 9 | 10 | Pandas 11 | ****** 12 | 13 | `Pandas `_ is an open source, BSD-licensed library providing high-performance, easy-to-use data structures and data analysis tools for the Python programming language. 14 | 15 | Altair 16 | ****** 17 | 18 | `Altair `_ is a declarative statistical visualization library for Python. 19 | With Altair, you can spend more time understanding your data and its meaning. 20 | Altair's API is simple, friendly and consistent and built on top of the powerful `Vega-Lite `_ JSON specification. 21 | 22 | S3Fs 23 | **** 24 | 25 | `S3Fs `_ is a Pythonic file interface to S3. It builds on top of `boto3 `_. 26 | The top-level class S3FileSystem holds connection information and allows typical file-system style operations like cp, mv, ls, du, glob, etc., as well as put/get of local files to/from S3. 27 | 28 | Elasticsearch-DSL 29 | ***************** 30 | 31 | `Elasticsearch DSL `_ is a high-level library whose aim is to help with writing and running queries against Elasticsearch. 32 | It is built on top of the official low-level client (`elasticsearch-py `_). 33 | It provides a more convenient and idiomatic way to write and manipulate queries. 34 | It stays close to the Elasticsearch JSON DSL, mirroring its terminology and structure. 35 | It exposes the whole range of the DSL from Python either directly using defined classes or a queryset-like expressions. 36 | 37 | Matplotlib 38 | ********** 39 | 40 | `Matplotlib `_ is a Python 2D plotting library which produces publication-quality figures in a variety of hardcopy formats and interactive environments across platforms. 41 | Matplotlib can be used in Python scripts, the Python and IPython shell (à la MATLAB or Mathematica), web application servers, and various graphical user interface toolkits. 42 | 43 | Scikit-learn 44 | ************ 45 | 46 | `Scikit-learn `_ is a Python module for machine learning built on top of SciPy and distributed under the 3-Clause BSD license. 47 | 48 | KSQL-Python 49 | *********** 50 | 51 | `KSQL-Python `_ is a python wrapper for the KSQL REST API. Easily interact with the KSQL REST API using this library. 52 | 53 | Confluent-Kafka-Python 54 | ********************** 55 | 56 | `Confluent-kafka-python `_ is Confluent's Python client for `Apache Kafka `_ and the `Confluent Platform `_. 57 | 58 | Splunk-SDK 59 | ********** 60 | 61 | The `Splunk Software Development Kit (SDK) `_ for Python contains library code and examples designed to enable developers to build applications using Splunk. 62 | 63 | Kqlmagic 64 | ******** 65 | 66 | The `Kqlmagic `_ magic extension enables notebook experience, exploring Microsoft Azure Monitor data: Azure Data Explorer (Kusto), ApplicationInsights, and LogAnalytics data, from Jupyter notebook (Python3 kernel), using kql (Kusto Query language). 67 | 68 | Neo4j 69 | ***** 70 | 71 | The official `Neo4j driver for Python `_ supports Neo4j 3.0 and above and Python versions 2.7, 3.4, 3.5, 3.6, and 3.7. 72 | It connects to the database using the binary protocol. It aims to be minimal, while being idiomatic to Python. 73 | 74 | Networkx 75 | ******** 76 | 77 | `NetworkX `_ is a Python package for the creation, manipulation, and study of the structure, dynamics, and functions of complex networks. 78 | 79 | Nxviz 80 | ***** 81 | 82 | `Nxviz `_ is a graph visualization package for NetworkX. With nxviz, you can create beautiful graph visualizations by a declarative API. 83 | 84 | Jupyter Kernels Available 85 | ######################### 86 | 87 | IPython Kernel (Python) 88 | ************************* 89 | 90 | The Jupyter team maintains the `IPython kernel `_ since the Jupyter notebook server depends on the IPython kernel functionality. 91 | Many other languages, in addition to Python, may be used in the notebook. 92 | 93 | PySpark Kernel (Python) 94 | ************************ 95 | 96 | A python Kernel to enable `Apache Spark for python `_. 97 | Writing PySpark Applications is really no different than writing normal Python applications or packages. 98 | It’s quite similar to writing command-line applications in particular. 99 | Spark doesn’t have a build concept, just Python scripts, so to run an application, you simply execute the script against the cluster. 100 | 101 | Syplon Kernel (Scala/Python) 102 | ***************************** 103 | 104 | A Scala kernel for Apache Spark that uses `metakernel `_ in combination with `py4j `_. 105 | 106 | R Kernel (R) 107 | ************ 108 | 109 | An R kernel for `Apache SparkR `_. 110 | SparkR is an R package that provides a light-weight frontend to use Apache Spark from R. 111 | In Spark 2.4.1, SparkR provides a distributed data frame implementation that supports operations like selection, filtering, aggregation etc. (similar to R data frames, dplyr) but on large datasets. 112 | SparkR also supports distributed machine learning using MLlib. -------------------------------------------------------------------------------- /docs/build/html/_sources/jupyter_rto.rst.txt: -------------------------------------------------------------------------------- 1 | Jupyter Red Team Operations (RTO) Server 2 | ======================================== 3 | 4 | A notebook server built for offensive operators with a few libraries to connect to known tools such as Bloodhound and Cobalt Strike. 5 | 6 | Jupyter Python Libraries 7 | ######################## 8 | 9 | Neo4j Pytho Driver 10 | ****************** 11 | 12 | `Neo4j Bolt driver `_ for Python 13 | 14 | PyCobalt 15 | ******** 16 | 17 | `PyCobalt `_ is a Python API for Cobalt Strike 18 | 19 | Jupyter Kernels Available 20 | ######################### 21 | 22 | IPython Kernel (Python) 23 | ************************* 24 | 25 | The Jupyter team maintains the `IPython kernel `_ since the Jupyter notebook server depends on the IPython kernel functionality. 26 | Many other languages, in addition to Python, may be used in the notebook. -------------------------------------------------------------------------------- /docs/build/html/_sources/jupyter_spark.rst.txt: -------------------------------------------------------------------------------- 1 | Jupyter Spark Server 2 | ==================== 3 | 4 | A notebook server built for any operator looking to leverage advanced analytics provided by Apache Spark. 5 | 6 | Jupyter Python Libraries 7 | ######################## 8 | 9 | Pandas 10 | ****** 11 | 12 | `Pandas `_ is an open source, BSD-licensed library providing high-performance, easy-to-use data structures and data analysis tools for the Python programming language. 13 | 14 | Jupyter Kernels Available 15 | ######################### 16 | 17 | IPython Kernel (Python) 18 | ************************* 19 | 20 | The Jupyter team maintains the `IPython kernel `_ since the Jupyter notebook server depends on the IPython kernel functionality. 21 | Many other languages, in addition to Python, may be used in the notebook. 22 | 23 | PySpark Kernel (Python) 24 | ************************ 25 | 26 | A python Kernel to enable `Apache Spark for python `_. 27 | Writing PySpark Applications is really no different than writing normal Python applications or packages. 28 | It’s quite similar to writing command-line applications in particular. 29 | Spark doesn’t have a build concept, just Python scripts, so to run an application, you simply execute the script against the cluster. 30 | 31 | Syplon Kernel (Scala/Python) 32 | ***************************** 33 | 34 | A Scala kernel for Apache Spark that uses `metakernel `_ in combination with `py4j `_. 35 | 36 | R Kernel (R) 37 | ************ 38 | 39 | An R kernel for `Apache SparkR `_. 40 | SparkR is an R package that provides a light-weight frontend to use Apache Spark from R. 41 | In Spark 2.4.1, SparkR provides a distributed data frame implementation that supports operations like selection, filtering, aggregation etc. (similar to R data frames, dplyr) but on large datasets. 42 | SparkR also supports distributed machine learning using MLlib. -------------------------------------------------------------------------------- /docs/build/html/_sources/zeppelin.rst.txt: -------------------------------------------------------------------------------- 1 | Zeppelin Notebook 2 | ================= 3 | 4 | Coming soon.. -------------------------------------------------------------------------------- /docs/build/html/_static/ajax-loader.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/html/_static/ajax-loader.gif -------------------------------------------------------------------------------- /docs/build/html/_static/comment-bright.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/html/_static/comment-bright.png -------------------------------------------------------------------------------- /docs/build/html/_static/comment-close.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/html/_static/comment-close.png -------------------------------------------------------------------------------- /docs/build/html/_static/comment.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/html/_static/comment.png -------------------------------------------------------------------------------- /docs/build/html/_static/css/badge_only.css: -------------------------------------------------------------------------------- 1 | .fa:before{-webkit-font-smoothing:antialiased}.clearfix{*zoom:1}.clearfix:before,.clearfix:after{display:table;content:""}.clearfix:after{clear:both}@font-face{font-family:FontAwesome;font-weight:normal;font-style:normal;src:url("../fonts/fontawesome-webfont.eot");src:url("../fonts/fontawesome-webfont.eot?#iefix") format("embedded-opentype"),url("../fonts/fontawesome-webfont.woff") format("woff"),url("../fonts/fontawesome-webfont.ttf") format("truetype"),url("../fonts/fontawesome-webfont.svg#FontAwesome") format("svg")}.fa:before{display:inline-block;font-family:FontAwesome;font-style:normal;font-weight:normal;line-height:1;text-decoration:inherit}a .fa{display:inline-block;text-decoration:inherit}li .fa{display:inline-block}li .fa-large:before,li .fa-large:before{width:1.875em}ul.fas{list-style-type:none;margin-left:2em;text-indent:-0.8em}ul.fas li .fa{width:.8em}ul.fas li .fa-large:before,ul.fas li .fa-large:before{vertical-align:baseline}.fa-book:before{content:""}.icon-book:before{content:""}.fa-caret-down:before{content:""}.icon-caret-down:before{content:""}.fa-caret-up:before{content:""}.icon-caret-up:before{content:""}.fa-caret-left:before{content:""}.icon-caret-left:before{content:""}.fa-caret-right:before{content:""}.icon-caret-right:before{content:""}.rst-versions{position:fixed;bottom:0;left:0;width:300px;color:#fcfcfc;background:#1f1d1d;font-family:"Lato","proxima-nova","Helvetica Neue",Arial,sans-serif;z-index:400}.rst-versions a{color:#2980B9;text-decoration:none}.rst-versions .rst-badge-small{display:none}.rst-versions .rst-current-version{padding:12px;background-color:#272525;display:block;text-align:right;font-size:90%;cursor:pointer;color:#27AE60;*zoom:1}.rst-versions .rst-current-version:before,.rst-versions .rst-current-version:after{display:table;content:""}.rst-versions .rst-current-version:after{clear:both}.rst-versions .rst-current-version .fa{color:#fcfcfc}.rst-versions .rst-current-version .fa-book{float:left}.rst-versions .rst-current-version .icon-book{float:left}.rst-versions .rst-current-version.rst-out-of-date{background-color:#E74C3C;color:#fff}.rst-versions .rst-current-version.rst-active-old-version{background-color:#F1C40F;color:#000}.rst-versions.shift-up{height:auto;max-height:100%;overflow-y:scroll}.rst-versions.shift-up .rst-other-versions{display:block}.rst-versions .rst-other-versions{font-size:90%;padding:12px;color:gray;display:none}.rst-versions .rst-other-versions hr{display:block;height:1px;border:0;margin:20px 0;padding:0;border-top:solid 1px #413d3d}.rst-versions .rst-other-versions dd{display:inline-block;margin:0}.rst-versions .rst-other-versions dd a{display:inline-block;padding:6px;color:#fcfcfc}.rst-versions.rst-badge{width:auto;bottom:20px;right:20px;left:auto;border:none;max-width:300px;max-height:90%}.rst-versions.rst-badge .icon-book{float:none}.rst-versions.rst-badge .fa-book{float:none}.rst-versions.rst-badge.shift-up .rst-current-version{text-align:right}.rst-versions.rst-badge.shift-up .rst-current-version .fa-book{float:left}.rst-versions.rst-badge.shift-up .rst-current-version .icon-book{float:left}.rst-versions.rst-badge .rst-current-version{width:auto;height:30px;line-height:30px;padding:0 6px;display:block;text-align:center}@media screen and (max-width: 768px){.rst-versions{width:85%;display:none}.rst-versions.shift{display:block}} 2 | -------------------------------------------------------------------------------- /docs/build/html/_static/docker-containers.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/html/_static/docker-containers.png -------------------------------------------------------------------------------- /docs/build/html/_static/doctools.js: -------------------------------------------------------------------------------- 1 | /* 2 | * doctools.js 3 | * ~~~~~~~~~~~ 4 | * 5 | * Sphinx JavaScript utilities for all documentation. 6 | * 7 | * :copyright: Copyright 2007-2019 by the Sphinx team, see AUTHORS. 8 | * :license: BSD, see LICENSE for details. 9 | * 10 | */ 11 | 12 | /** 13 | * select a different prefix for underscore 14 | */ 15 | $u = _.noConflict(); 16 | 17 | /** 18 | * make the code below compatible with browsers without 19 | * an installed firebug like debugger 20 | if (!window.console || !console.firebug) { 21 | var names = ["log", "debug", "info", "warn", "error", "assert", "dir", 22 | "dirxml", "group", "groupEnd", "time", "timeEnd", "count", "trace", 23 | "profile", "profileEnd"]; 24 | window.console = {}; 25 | for (var i = 0; i < names.length; ++i) 26 | window.console[names[i]] = function() {}; 27 | } 28 | */ 29 | 30 | /** 31 | * small helper function to urldecode strings 32 | */ 33 | jQuery.urldecode = function(x) { 34 | return decodeURIComponent(x).replace(/\+/g, ' '); 35 | }; 36 | 37 | /** 38 | * small helper function to urlencode strings 39 | */ 40 | jQuery.urlencode = encodeURIComponent; 41 | 42 | /** 43 | * This function returns the parsed url parameters of the 44 | * current request. Multiple values per key are supported, 45 | * it will always return arrays of strings for the value parts. 46 | */ 47 | jQuery.getQueryParameters = function(s) { 48 | if (typeof s === 'undefined') 49 | s = document.location.search; 50 | var parts = s.substr(s.indexOf('?') + 1).split('&'); 51 | var result = {}; 52 | for (var i = 0; i < parts.length; i++) { 53 | var tmp = parts[i].split('=', 2); 54 | var key = jQuery.urldecode(tmp[0]); 55 | var value = jQuery.urldecode(tmp[1]); 56 | if (key in result) 57 | result[key].push(value); 58 | else 59 | result[key] = [value]; 60 | } 61 | return result; 62 | }; 63 | 64 | /** 65 | * highlight a given string on a jquery object by wrapping it in 66 | * span elements with the given class name. 67 | */ 68 | jQuery.fn.highlightText = function(text, className) { 69 | function highlight(node, addItems) { 70 | if (node.nodeType === 3) { 71 | var val = node.nodeValue; 72 | var pos = val.toLowerCase().indexOf(text); 73 | if (pos >= 0 && 74 | !jQuery(node.parentNode).hasClass(className) && 75 | !jQuery(node.parentNode).hasClass("nohighlight")) { 76 | var span; 77 | var isInSVG = jQuery(node).closest("body, svg, foreignObject").is("svg"); 78 | if (isInSVG) { 79 | span = document.createElementNS("http://www.w3.org/2000/svg", "tspan"); 80 | } else { 81 | span = document.createElement("span"); 82 | span.className = className; 83 | } 84 | span.appendChild(document.createTextNode(val.substr(pos, text.length))); 85 | node.parentNode.insertBefore(span, node.parentNode.insertBefore( 86 | document.createTextNode(val.substr(pos + text.length)), 87 | node.nextSibling)); 88 | node.nodeValue = val.substr(0, pos); 89 | if (isInSVG) { 90 | var bbox = span.getBBox(); 91 | var rect = document.createElementNS("http://www.w3.org/2000/svg", "rect"); 92 | rect.x.baseVal.value = bbox.x; 93 | rect.y.baseVal.value = bbox.y; 94 | rect.width.baseVal.value = bbox.width; 95 | rect.height.baseVal.value = bbox.height; 96 | rect.setAttribute('class', className); 97 | var parentOfText = node.parentNode.parentNode; 98 | addItems.push({ 99 | "parent": node.parentNode, 100 | "target": rect}); 101 | } 102 | } 103 | } 104 | else if (!jQuery(node).is("button, select, textarea")) { 105 | jQuery.each(node.childNodes, function() { 106 | highlight(this, addItems); 107 | }); 108 | } 109 | } 110 | var addItems = []; 111 | var result = this.each(function() { 112 | highlight(this, addItems); 113 | }); 114 | for (var i = 0; i < addItems.length; ++i) { 115 | jQuery(addItems[i].parent).before(addItems[i].target); 116 | } 117 | return result; 118 | }; 119 | 120 | /* 121 | * backward compatibility for jQuery.browser 122 | * This will be supported until firefox bug is fixed. 123 | */ 124 | if (!jQuery.browser) { 125 | jQuery.uaMatch = function(ua) { 126 | ua = ua.toLowerCase(); 127 | 128 | var match = /(chrome)[ \/]([\w.]+)/.exec(ua) || 129 | /(webkit)[ \/]([\w.]+)/.exec(ua) || 130 | /(opera)(?:.*version|)[ \/]([\w.]+)/.exec(ua) || 131 | /(msie) ([\w.]+)/.exec(ua) || 132 | ua.indexOf("compatible") < 0 && /(mozilla)(?:.*? rv:([\w.]+)|)/.exec(ua) || 133 | []; 134 | 135 | return { 136 | browser: match[ 1 ] || "", 137 | version: match[ 2 ] || "0" 138 | }; 139 | }; 140 | jQuery.browser = {}; 141 | jQuery.browser[jQuery.uaMatch(navigator.userAgent).browser] = true; 142 | } 143 | 144 | /** 145 | * Small JavaScript module for the documentation. 146 | */ 147 | var Documentation = { 148 | 149 | init : function() { 150 | this.fixFirefoxAnchorBug(); 151 | this.highlightSearchWords(); 152 | this.initIndexTable(); 153 | if (DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) { 154 | this.initOnKeyListeners(); 155 | } 156 | }, 157 | 158 | /** 159 | * i18n support 160 | */ 161 | TRANSLATIONS : {}, 162 | PLURAL_EXPR : function(n) { return n === 1 ? 0 : 1; }, 163 | LOCALE : 'unknown', 164 | 165 | // gettext and ngettext don't access this so that the functions 166 | // can safely bound to a different name (_ = Documentation.gettext) 167 | gettext : function(string) { 168 | var translated = Documentation.TRANSLATIONS[string]; 169 | if (typeof translated === 'undefined') 170 | return string; 171 | return (typeof translated === 'string') ? translated : translated[0]; 172 | }, 173 | 174 | ngettext : function(singular, plural, n) { 175 | var translated = Documentation.TRANSLATIONS[singular]; 176 | if (typeof translated === 'undefined') 177 | return (n == 1) ? singular : plural; 178 | return translated[Documentation.PLURALEXPR(n)]; 179 | }, 180 | 181 | addTranslations : function(catalog) { 182 | for (var key in catalog.messages) 183 | this.TRANSLATIONS[key] = catalog.messages[key]; 184 | this.PLURAL_EXPR = new Function('n', 'return +(' + catalog.plural_expr + ')'); 185 | this.LOCALE = catalog.locale; 186 | }, 187 | 188 | /** 189 | * add context elements like header anchor links 190 | */ 191 | addContextElements : function() { 192 | $('div[id] > :header:first').each(function() { 193 | $('\u00B6'). 194 | attr('href', '#' + this.id). 195 | attr('title', _('Permalink to this headline')). 196 | appendTo(this); 197 | }); 198 | $('dt[id]').each(function() { 199 | $('\u00B6'). 200 | attr('href', '#' + this.id). 201 | attr('title', _('Permalink to this definition')). 202 | appendTo(this); 203 | }); 204 | }, 205 | 206 | /** 207 | * workaround a firefox stupidity 208 | * see: https://bugzilla.mozilla.org/show_bug.cgi?id=645075 209 | */ 210 | fixFirefoxAnchorBug : function() { 211 | if (document.location.hash && $.browser.mozilla) 212 | window.setTimeout(function() { 213 | document.location.href += ''; 214 | }, 10); 215 | }, 216 | 217 | /** 218 | * highlight the search words provided in the url in the text 219 | */ 220 | highlightSearchWords : function() { 221 | var params = $.getQueryParameters(); 222 | var terms = (params.highlight) ? params.highlight[0].split(/\s+/) : []; 223 | if (terms.length) { 224 | var body = $('div.body'); 225 | if (!body.length) { 226 | body = $('body'); 227 | } 228 | window.setTimeout(function() { 229 | $.each(terms, function() { 230 | body.highlightText(this.toLowerCase(), 'highlighted'); 231 | }); 232 | }, 10); 233 | $('') 235 | .appendTo($('#searchbox')); 236 | } 237 | }, 238 | 239 | /** 240 | * init the domain index toggle buttons 241 | */ 242 | initIndexTable : function() { 243 | var togglers = $('img.toggler').click(function() { 244 | var src = $(this).attr('src'); 245 | var idnum = $(this).attr('id').substr(7); 246 | $('tr.cg-' + idnum).toggle(); 247 | if (src.substr(-9) === 'minus.png') 248 | $(this).attr('src', src.substr(0, src.length-9) + 'plus.png'); 249 | else 250 | $(this).attr('src', src.substr(0, src.length-8) + 'minus.png'); 251 | }).css('display', ''); 252 | if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) { 253 | togglers.click(); 254 | } 255 | }, 256 | 257 | /** 258 | * helper function to hide the search marks again 259 | */ 260 | hideSearchWords : function() { 261 | $('#searchbox .highlight-link').fadeOut(300); 262 | $('span.highlighted').removeClass('highlighted'); 263 | }, 264 | 265 | /** 266 | * make the url absolute 267 | */ 268 | makeURL : function(relativeURL) { 269 | return DOCUMENTATION_OPTIONS.URL_ROOT + '/' + relativeURL; 270 | }, 271 | 272 | /** 273 | * get the current relative url 274 | */ 275 | getCurrentURL : function() { 276 | var path = document.location.pathname; 277 | var parts = path.split(/\//); 278 | $.each(DOCUMENTATION_OPTIONS.URL_ROOT.split(/\//), function() { 279 | if (this === '..') 280 | parts.pop(); 281 | }); 282 | var url = parts.join('/'); 283 | return path.substring(url.lastIndexOf('/') + 1, path.length - 1); 284 | }, 285 | 286 | initOnKeyListeners: function() { 287 | $(document).keyup(function(event) { 288 | var activeElementType = document.activeElement.tagName; 289 | // don't navigate when in search box or textarea 290 | if (activeElementType !== 'TEXTAREA' && activeElementType !== 'INPUT' && activeElementType !== 'SELECT') { 291 | switch (event.keyCode) { 292 | case 37: // left 293 | var prevHref = $('link[rel="prev"]').prop('href'); 294 | if (prevHref) { 295 | window.location.href = prevHref; 296 | return false; 297 | } 298 | case 39: // right 299 | var nextHref = $('link[rel="next"]').prop('href'); 300 | if (nextHref) { 301 | window.location.href = nextHref; 302 | return false; 303 | } 304 | } 305 | } 306 | }); 307 | } 308 | }; 309 | 310 | // quick alias for translations 311 | _ = Documentation.gettext; 312 | 313 | $(document).ready(function() { 314 | Documentation.init(); 315 | }); 316 | -------------------------------------------------------------------------------- /docs/build/html/_static/documentation_options.js: -------------------------------------------------------------------------------- 1 | var DOCUMENTATION_OPTIONS = { 2 | URL_ROOT: document.getElementById("documentation_options").getAttribute('data-url_root'), 3 | VERSION: '0.0.1', 4 | LANGUAGE: 'None', 5 | COLLAPSE_INDEX: false, 6 | FILE_SUFFIX: '.html', 7 | HAS_SOURCE: true, 8 | SOURCELINK_SUFFIX: '.txt', 9 | NAVIGATION_WITH_KEYS: false, 10 | }; -------------------------------------------------------------------------------- /docs/build/html/_static/down-pressed.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/html/_static/down-pressed.png -------------------------------------------------------------------------------- /docs/build/html/_static/down.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/html/_static/down.png -------------------------------------------------------------------------------- /docs/build/html/_static/file.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/html/_static/file.png -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/Inconsolata-Bold.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/html/_static/fonts/Inconsolata-Bold.ttf -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/Inconsolata-Regular.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/html/_static/fonts/Inconsolata-Regular.ttf -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/Inconsolata.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/html/_static/fonts/Inconsolata.ttf -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/Lato-Bold.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/html/_static/fonts/Lato-Bold.ttf -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/Lato-Regular.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/html/_static/fonts/Lato-Regular.ttf -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/Lato/lato-bold.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/html/_static/fonts/Lato/lato-bold.eot -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/Lato/lato-bold.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/html/_static/fonts/Lato/lato-bold.ttf -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/Lato/lato-bold.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/html/_static/fonts/Lato/lato-bold.woff -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/Lato/lato-bold.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/html/_static/fonts/Lato/lato-bold.woff2 -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/Lato/lato-bolditalic.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/html/_static/fonts/Lato/lato-bolditalic.eot -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/Lato/lato-bolditalic.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/html/_static/fonts/Lato/lato-bolditalic.ttf -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/Lato/lato-bolditalic.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/html/_static/fonts/Lato/lato-bolditalic.woff -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/Lato/lato-bolditalic.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/html/_static/fonts/Lato/lato-bolditalic.woff2 -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/Lato/lato-italic.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/html/_static/fonts/Lato/lato-italic.eot -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/Lato/lato-italic.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/html/_static/fonts/Lato/lato-italic.ttf -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/Lato/lato-italic.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/html/_static/fonts/Lato/lato-italic.woff -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/Lato/lato-italic.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/html/_static/fonts/Lato/lato-italic.woff2 -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/Lato/lato-regular.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/html/_static/fonts/Lato/lato-regular.eot -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/Lato/lato-regular.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/html/_static/fonts/Lato/lato-regular.ttf -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/Lato/lato-regular.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/html/_static/fonts/Lato/lato-regular.woff -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/Lato/lato-regular.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/html/_static/fonts/Lato/lato-regular.woff2 -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/RobotoSlab-Bold.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/html/_static/fonts/RobotoSlab-Bold.ttf -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/RobotoSlab-Regular.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/html/_static/fonts/RobotoSlab-Regular.ttf -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/RobotoSlab/roboto-slab-v7-bold.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/html/_static/fonts/RobotoSlab/roboto-slab-v7-bold.eot -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/RobotoSlab/roboto-slab-v7-bold.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/html/_static/fonts/RobotoSlab/roboto-slab-v7-bold.ttf -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/RobotoSlab/roboto-slab-v7-bold.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/html/_static/fonts/RobotoSlab/roboto-slab-v7-bold.woff -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/RobotoSlab/roboto-slab-v7-bold.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/html/_static/fonts/RobotoSlab/roboto-slab-v7-bold.woff2 -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/RobotoSlab/roboto-slab-v7-regular.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/html/_static/fonts/RobotoSlab/roboto-slab-v7-regular.eot -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/RobotoSlab/roboto-slab-v7-regular.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/html/_static/fonts/RobotoSlab/roboto-slab-v7-regular.ttf -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/RobotoSlab/roboto-slab-v7-regular.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/html/_static/fonts/RobotoSlab/roboto-slab-v7-regular.woff -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/RobotoSlab/roboto-slab-v7-regular.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/html/_static/fonts/RobotoSlab/roboto-slab-v7-regular.woff2 -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/fontawesome-webfont.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/html/_static/fonts/fontawesome-webfont.eot -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/fontawesome-webfont.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/html/_static/fonts/fontawesome-webfont.ttf -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/fontawesome-webfont.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/html/_static/fonts/fontawesome-webfont.woff -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/fontawesome-webfont.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/html/_static/fonts/fontawesome-webfont.woff2 -------------------------------------------------------------------------------- /docs/build/html/_static/js/theme.js: -------------------------------------------------------------------------------- 1 | /* sphinx_rtd_theme version 0.4.3 | MIT license */ 2 | /* Built 20190212 16:02 */ 3 | require=function r(s,a,l){function c(e,n){if(!a[e]){if(!s[e]){var i="function"==typeof require&&require;if(!n&&i)return i(e,!0);if(u)return u(e,!0);var t=new Error("Cannot find module '"+e+"'");throw t.code="MODULE_NOT_FOUND",t}var o=a[e]={exports:{}};s[e][0].call(o.exports,function(n){return c(s[e][1][n]||n)},o,o.exports,r,s,a,l)}return a[e].exports}for(var u="function"==typeof require&&require,n=0;n"),i("table.docutils.footnote").wrap("
"),i("table.docutils.citation").wrap("
"),i(".wy-menu-vertical ul").not(".simple").siblings("a").each(function(){var e=i(this);expand=i(''),expand.on("click",function(n){return t.toggleCurrent(e),n.stopPropagation(),!1}),e.prepend(expand)})},reset:function(){var n=encodeURI(window.location.hash)||"#";try{var e=$(".wy-menu-vertical"),i=e.find('[href="'+n+'"]');if(0===i.length){var t=$('.document [id="'+n.substring(1)+'"]').closest("div.section");0===(i=e.find('[href="#'+t.attr("id")+'"]')).length&&(i=e.find('[href="#"]'))}0this.docHeight||(this.navBar.scrollTop(i),this.winPosition=n)},onResize:function(){this.winResize=!1,this.winHeight=this.win.height(),this.docHeight=$(document).height()},hashChange:function(){this.linkScroll=!0,this.win.one("hashchange",function(){this.linkScroll=!1})},toggleCurrent:function(n){var e=n.closest("li");e.siblings("li.current").removeClass("current"),e.siblings().find("li.current").removeClass("current"),e.find("> ul li.current").removeClass("current"),e.toggleClass("current")}},"undefined"!=typeof window&&(window.SphinxRtdTheme={Navigation:e.exports.ThemeNav,StickyNav:e.exports.ThemeNav}),function(){for(var r=0,n=["ms","moz","webkit","o"],e=0;e0 62 | var meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$"; // [C]VC[V] is m=1 63 | var mgr1 = "^(" + C + ")?" + V + C + V + C; // [C]VCVC... is m>1 64 | var s_v = "^(" + C + ")?" + v; // vowel in stem 65 | 66 | this.stemWord = function (w) { 67 | var stem; 68 | var suffix; 69 | var firstch; 70 | var origword = w; 71 | 72 | if (w.length < 3) 73 | return w; 74 | 75 | var re; 76 | var re2; 77 | var re3; 78 | var re4; 79 | 80 | firstch = w.substr(0,1); 81 | if (firstch == "y") 82 | w = firstch.toUpperCase() + w.substr(1); 83 | 84 | // Step 1a 85 | re = /^(.+?)(ss|i)es$/; 86 | re2 = /^(.+?)([^s])s$/; 87 | 88 | if (re.test(w)) 89 | w = w.replace(re,"$1$2"); 90 | else if (re2.test(w)) 91 | w = w.replace(re2,"$1$2"); 92 | 93 | // Step 1b 94 | re = /^(.+?)eed$/; 95 | re2 = /^(.+?)(ed|ing)$/; 96 | if (re.test(w)) { 97 | var fp = re.exec(w); 98 | re = new RegExp(mgr0); 99 | if (re.test(fp[1])) { 100 | re = /.$/; 101 | w = w.replace(re,""); 102 | } 103 | } 104 | else if (re2.test(w)) { 105 | var fp = re2.exec(w); 106 | stem = fp[1]; 107 | re2 = new RegExp(s_v); 108 | if (re2.test(stem)) { 109 | w = stem; 110 | re2 = /(at|bl|iz)$/; 111 | re3 = new RegExp("([^aeiouylsz])\\1$"); 112 | re4 = new RegExp("^" + C + v + "[^aeiouwxy]$"); 113 | if (re2.test(w)) 114 | w = w + "e"; 115 | else if (re3.test(w)) { 116 | re = /.$/; 117 | w = w.replace(re,""); 118 | } 119 | else if (re4.test(w)) 120 | w = w + "e"; 121 | } 122 | } 123 | 124 | // Step 1c 125 | re = /^(.+?)y$/; 126 | if (re.test(w)) { 127 | var fp = re.exec(w); 128 | stem = fp[1]; 129 | re = new RegExp(s_v); 130 | if (re.test(stem)) 131 | w = stem + "i"; 132 | } 133 | 134 | // Step 2 135 | re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/; 136 | if (re.test(w)) { 137 | var fp = re.exec(w); 138 | stem = fp[1]; 139 | suffix = fp[2]; 140 | re = new RegExp(mgr0); 141 | if (re.test(stem)) 142 | w = stem + step2list[suffix]; 143 | } 144 | 145 | // Step 3 146 | re = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/; 147 | if (re.test(w)) { 148 | var fp = re.exec(w); 149 | stem = fp[1]; 150 | suffix = fp[2]; 151 | re = new RegExp(mgr0); 152 | if (re.test(stem)) 153 | w = stem + step3list[suffix]; 154 | } 155 | 156 | // Step 4 157 | re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/; 158 | re2 = /^(.+?)(s|t)(ion)$/; 159 | if (re.test(w)) { 160 | var fp = re.exec(w); 161 | stem = fp[1]; 162 | re = new RegExp(mgr1); 163 | if (re.test(stem)) 164 | w = stem; 165 | } 166 | else if (re2.test(w)) { 167 | var fp = re2.exec(w); 168 | stem = fp[1] + fp[2]; 169 | re2 = new RegExp(mgr1); 170 | if (re2.test(stem)) 171 | w = stem; 172 | } 173 | 174 | // Step 5 175 | re = /^(.+?)e$/; 176 | if (re.test(w)) { 177 | var fp = re.exec(w); 178 | stem = fp[1]; 179 | re = new RegExp(mgr1); 180 | re2 = new RegExp(meq1); 181 | re3 = new RegExp("^" + C + v + "[^aeiouwxy]$"); 182 | if (re.test(stem) || (re2.test(stem) && !(re3.test(stem)))) 183 | w = stem; 184 | } 185 | re = /ll$/; 186 | re2 = new RegExp(mgr1); 187 | if (re.test(w) && re2.test(w)) { 188 | re = /.$/; 189 | w = w.replace(re,""); 190 | } 191 | 192 | // and turn initial Y back to y 193 | if (firstch == "y") 194 | w = firstch.toLowerCase() + w.substr(1); 195 | return w; 196 | } 197 | } 198 | 199 | 200 | 201 | 202 | 203 | var splitChars = (function() { 204 | var result = {}; 205 | var singles = [96, 180, 187, 191, 215, 247, 749, 885, 903, 907, 909, 930, 1014, 1648, 206 | 1748, 1809, 2416, 2473, 2481, 2526, 2601, 2609, 2612, 2615, 2653, 2702, 207 | 2706, 2729, 2737, 2740, 2857, 2865, 2868, 2910, 2928, 2948, 2961, 2971, 208 | 2973, 3085, 3089, 3113, 3124, 3213, 3217, 3241, 3252, 3295, 3341, 3345, 209 | 3369, 3506, 3516, 3633, 3715, 3721, 3736, 3744, 3748, 3750, 3756, 3761, 210 | 3781, 3912, 4239, 4347, 4681, 4695, 4697, 4745, 4785, 4799, 4801, 4823, 211 | 4881, 5760, 5901, 5997, 6313, 7405, 8024, 8026, 8028, 8030, 8117, 8125, 212 | 8133, 8181, 8468, 8485, 8487, 8489, 8494, 8527, 11311, 11359, 11687, 11695, 213 | 11703, 11711, 11719, 11727, 11735, 12448, 12539, 43010, 43014, 43019, 43587, 214 | 43696, 43713, 64286, 64297, 64311, 64317, 64319, 64322, 64325, 65141]; 215 | var i, j, start, end; 216 | for (i = 0; i < singles.length; i++) { 217 | result[singles[i]] = true; 218 | } 219 | var ranges = [[0, 47], [58, 64], [91, 94], [123, 169], [171, 177], [182, 184], [706, 709], 220 | [722, 735], [741, 747], [751, 879], [888, 889], [894, 901], [1154, 1161], 221 | [1318, 1328], [1367, 1368], [1370, 1376], [1416, 1487], [1515, 1519], [1523, 1568], 222 | [1611, 1631], [1642, 1645], [1750, 1764], [1767, 1773], [1789, 1790], [1792, 1807], 223 | [1840, 1868], [1958, 1968], [1970, 1983], [2027, 2035], [2038, 2041], [2043, 2047], 224 | [2070, 2073], [2075, 2083], [2085, 2087], [2089, 2307], [2362, 2364], [2366, 2383], 225 | [2385, 2391], [2402, 2405], [2419, 2424], [2432, 2436], [2445, 2446], [2449, 2450], 226 | [2483, 2485], [2490, 2492], [2494, 2509], [2511, 2523], [2530, 2533], [2546, 2547], 227 | [2554, 2564], [2571, 2574], [2577, 2578], [2618, 2648], [2655, 2661], [2672, 2673], 228 | [2677, 2692], [2746, 2748], [2750, 2767], [2769, 2783], [2786, 2789], [2800, 2820], 229 | [2829, 2830], [2833, 2834], [2874, 2876], [2878, 2907], [2914, 2917], [2930, 2946], 230 | [2955, 2957], [2966, 2968], [2976, 2978], [2981, 2983], [2987, 2989], [3002, 3023], 231 | [3025, 3045], [3059, 3076], [3130, 3132], [3134, 3159], [3162, 3167], [3170, 3173], 232 | [3184, 3191], [3199, 3204], [3258, 3260], [3262, 3293], [3298, 3301], [3312, 3332], 233 | [3386, 3388], [3390, 3423], [3426, 3429], [3446, 3449], [3456, 3460], [3479, 3481], 234 | [3518, 3519], [3527, 3584], [3636, 3647], [3655, 3663], [3674, 3712], [3717, 3718], 235 | [3723, 3724], [3726, 3731], [3752, 3753], [3764, 3772], [3774, 3775], [3783, 3791], 236 | [3802, 3803], [3806, 3839], [3841, 3871], [3892, 3903], [3949, 3975], [3980, 4095], 237 | [4139, 4158], [4170, 4175], [4182, 4185], [4190, 4192], [4194, 4196], [4199, 4205], 238 | [4209, 4212], [4226, 4237], [4250, 4255], [4294, 4303], [4349, 4351], [4686, 4687], 239 | [4702, 4703], [4750, 4751], [4790, 4791], [4806, 4807], [4886, 4887], [4955, 4968], 240 | [4989, 4991], [5008, 5023], [5109, 5120], [5741, 5742], [5787, 5791], [5867, 5869], 241 | [5873, 5887], [5906, 5919], [5938, 5951], [5970, 5983], [6001, 6015], [6068, 6102], 242 | [6104, 6107], [6109, 6111], [6122, 6127], [6138, 6159], [6170, 6175], [6264, 6271], 243 | [6315, 6319], [6390, 6399], [6429, 6469], [6510, 6511], [6517, 6527], [6572, 6592], 244 | [6600, 6607], [6619, 6655], [6679, 6687], [6741, 6783], [6794, 6799], [6810, 6822], 245 | [6824, 6916], [6964, 6980], [6988, 6991], [7002, 7042], [7073, 7085], [7098, 7167], 246 | [7204, 7231], [7242, 7244], [7294, 7400], [7410, 7423], [7616, 7679], [7958, 7959], 247 | [7966, 7967], [8006, 8007], [8014, 8015], [8062, 8063], [8127, 8129], [8141, 8143], 248 | [8148, 8149], [8156, 8159], [8173, 8177], [8189, 8303], [8306, 8307], [8314, 8318], 249 | [8330, 8335], [8341, 8449], [8451, 8454], [8456, 8457], [8470, 8472], [8478, 8483], 250 | [8506, 8507], [8512, 8516], [8522, 8525], [8586, 9311], [9372, 9449], [9472, 10101], 251 | [10132, 11263], [11493, 11498], [11503, 11516], [11518, 11519], [11558, 11567], 252 | [11622, 11630], [11632, 11647], [11671, 11679], [11743, 11822], [11824, 12292], 253 | [12296, 12320], [12330, 12336], [12342, 12343], [12349, 12352], [12439, 12444], 254 | [12544, 12548], [12590, 12592], [12687, 12689], [12694, 12703], [12728, 12783], 255 | [12800, 12831], [12842, 12880], [12896, 12927], [12938, 12976], [12992, 13311], 256 | [19894, 19967], [40908, 40959], [42125, 42191], [42238, 42239], [42509, 42511], 257 | [42540, 42559], [42592, 42593], [42607, 42622], [42648, 42655], [42736, 42774], 258 | [42784, 42785], [42889, 42890], [42893, 43002], [43043, 43055], [43062, 43071], 259 | [43124, 43137], [43188, 43215], [43226, 43249], [43256, 43258], [43260, 43263], 260 | [43302, 43311], [43335, 43359], [43389, 43395], [43443, 43470], [43482, 43519], 261 | [43561, 43583], [43596, 43599], [43610, 43615], [43639, 43641], [43643, 43647], 262 | [43698, 43700], [43703, 43704], [43710, 43711], [43715, 43738], [43742, 43967], 263 | [44003, 44015], [44026, 44031], [55204, 55215], [55239, 55242], [55292, 55295], 264 | [57344, 63743], [64046, 64047], [64110, 64111], [64218, 64255], [64263, 64274], 265 | [64280, 64284], [64434, 64466], [64830, 64847], [64912, 64913], [64968, 65007], 266 | [65020, 65135], [65277, 65295], [65306, 65312], [65339, 65344], [65371, 65381], 267 | [65471, 65473], [65480, 65481], [65488, 65489], [65496, 65497]]; 268 | for (i = 0; i < ranges.length; i++) { 269 | start = ranges[i][0]; 270 | end = ranges[i][1]; 271 | for (j = start; j <= end; j++) { 272 | result[j] = true; 273 | } 274 | } 275 | return result; 276 | })(); 277 | 278 | function splitQuery(query) { 279 | var result = []; 280 | var start = -1; 281 | for (var i = 0; i < query.length; i++) { 282 | if (splitChars[query.charCodeAt(i)]) { 283 | if (start !== -1) { 284 | result.push(query.slice(start, i)); 285 | start = -1; 286 | } 287 | } else if (start === -1) { 288 | start = i; 289 | } 290 | } 291 | if (start !== -1) { 292 | result.push(query.slice(start)); 293 | } 294 | return result; 295 | } 296 | 297 | 298 | -------------------------------------------------------------------------------- /docs/build/html/_static/minus.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/html/_static/minus.png -------------------------------------------------------------------------------- /docs/build/html/_static/plus.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/html/_static/plus.png -------------------------------------------------------------------------------- /docs/build/html/_static/pygments.css: -------------------------------------------------------------------------------- 1 | .highlight .hll { background-color: #ffffcc } 2 | .highlight { background: #f8f8f8; } 3 | .highlight .c { color: #408080; font-style: italic } /* Comment */ 4 | .highlight .err { border: 1px solid #FF0000 } /* Error */ 5 | .highlight .k { color: #008000; font-weight: bold } /* Keyword */ 6 | .highlight .o { color: #666666 } /* Operator */ 7 | .highlight .ch { color: #408080; font-style: italic } /* Comment.Hashbang */ 8 | .highlight .cm { color: #408080; font-style: italic } /* Comment.Multiline */ 9 | .highlight .cp { color: #BC7A00 } /* Comment.Preproc */ 10 | .highlight .cpf { color: #408080; font-style: italic } /* Comment.PreprocFile */ 11 | .highlight .c1 { color: #408080; font-style: italic } /* Comment.Single */ 12 | .highlight .cs { color: #408080; font-style: italic } /* Comment.Special */ 13 | .highlight .gd { color: #A00000 } /* Generic.Deleted */ 14 | .highlight .ge { font-style: italic } /* Generic.Emph */ 15 | .highlight .gr { color: #FF0000 } /* Generic.Error */ 16 | .highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */ 17 | .highlight .gi { color: #00A000 } /* Generic.Inserted */ 18 | .highlight .go { color: #888888 } /* Generic.Output */ 19 | .highlight .gp { color: #000080; font-weight: bold } /* Generic.Prompt */ 20 | .highlight .gs { font-weight: bold } /* Generic.Strong */ 21 | .highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */ 22 | .highlight .gt { color: #0044DD } /* Generic.Traceback */ 23 | .highlight .kc { color: #008000; font-weight: bold } /* Keyword.Constant */ 24 | .highlight .kd { color: #008000; font-weight: bold } /* Keyword.Declaration */ 25 | .highlight .kn { color: #008000; font-weight: bold } /* Keyword.Namespace */ 26 | .highlight .kp { color: #008000 } /* Keyword.Pseudo */ 27 | .highlight .kr { color: #008000; font-weight: bold } /* Keyword.Reserved */ 28 | .highlight .kt { color: #B00040 } /* Keyword.Type */ 29 | .highlight .m { color: #666666 } /* Literal.Number */ 30 | .highlight .s { color: #BA2121 } /* Literal.String */ 31 | .highlight .na { color: #7D9029 } /* Name.Attribute */ 32 | .highlight .nb { color: #008000 } /* Name.Builtin */ 33 | .highlight .nc { color: #0000FF; font-weight: bold } /* Name.Class */ 34 | .highlight .no { color: #880000 } /* Name.Constant */ 35 | .highlight .nd { color: #AA22FF } /* Name.Decorator */ 36 | .highlight .ni { color: #999999; font-weight: bold } /* Name.Entity */ 37 | .highlight .ne { color: #D2413A; font-weight: bold } /* Name.Exception */ 38 | .highlight .nf { color: #0000FF } /* Name.Function */ 39 | .highlight .nl { color: #A0A000 } /* Name.Label */ 40 | .highlight .nn { color: #0000FF; font-weight: bold } /* Name.Namespace */ 41 | .highlight .nt { color: #008000; font-weight: bold } /* Name.Tag */ 42 | .highlight .nv { color: #19177C } /* Name.Variable */ 43 | .highlight .ow { color: #AA22FF; font-weight: bold } /* Operator.Word */ 44 | .highlight .w { color: #bbbbbb } /* Text.Whitespace */ 45 | .highlight .mb { color: #666666 } /* Literal.Number.Bin */ 46 | .highlight .mf { color: #666666 } /* Literal.Number.Float */ 47 | .highlight .mh { color: #666666 } /* Literal.Number.Hex */ 48 | .highlight .mi { color: #666666 } /* Literal.Number.Integer */ 49 | .highlight .mo { color: #666666 } /* Literal.Number.Oct */ 50 | .highlight .sa { color: #BA2121 } /* Literal.String.Affix */ 51 | .highlight .sb { color: #BA2121 } /* Literal.String.Backtick */ 52 | .highlight .sc { color: #BA2121 } /* Literal.String.Char */ 53 | .highlight .dl { color: #BA2121 } /* Literal.String.Delimiter */ 54 | .highlight .sd { color: #BA2121; font-style: italic } /* Literal.String.Doc */ 55 | .highlight .s2 { color: #BA2121 } /* Literal.String.Double */ 56 | .highlight .se { color: #BB6622; font-weight: bold } /* Literal.String.Escape */ 57 | .highlight .sh { color: #BA2121 } /* Literal.String.Heredoc */ 58 | .highlight .si { color: #BB6688; font-weight: bold } /* Literal.String.Interpol */ 59 | .highlight .sx { color: #008000 } /* Literal.String.Other */ 60 | .highlight .sr { color: #BB6688 } /* Literal.String.Regex */ 61 | .highlight .s1 { color: #BA2121 } /* Literal.String.Single */ 62 | .highlight .ss { color: #19177C } /* Literal.String.Symbol */ 63 | .highlight .bp { color: #008000 } /* Name.Builtin.Pseudo */ 64 | .highlight .fm { color: #0000FF } /* Name.Function.Magic */ 65 | .highlight .vc { color: #19177C } /* Name.Variable.Class */ 66 | .highlight .vg { color: #19177C } /* Name.Variable.Global */ 67 | .highlight .vi { color: #19177C } /* Name.Variable.Instance */ 68 | .highlight .vm { color: #19177C } /* Name.Variable.Magic */ 69 | .highlight .il { color: #666666 } /* Literal.Number.Integer.Long */ -------------------------------------------------------------------------------- /docs/build/html/_static/underscore.js: -------------------------------------------------------------------------------- 1 | // Underscore.js 1.3.1 2 | // (c) 2009-2012 Jeremy Ashkenas, DocumentCloud Inc. 3 | // Underscore is freely distributable under the MIT license. 4 | // Portions of Underscore are inspired or borrowed from Prototype, 5 | // Oliver Steele's Functional, and John Resig's Micro-Templating. 6 | // For all details and documentation: 7 | // http://documentcloud.github.com/underscore 8 | (function(){function q(a,c,d){if(a===c)return a!==0||1/a==1/c;if(a==null||c==null)return a===c;if(a._chain)a=a._wrapped;if(c._chain)c=c._wrapped;if(a.isEqual&&b.isFunction(a.isEqual))return a.isEqual(c);if(c.isEqual&&b.isFunction(c.isEqual))return c.isEqual(a);var e=l.call(a);if(e!=l.call(c))return false;switch(e){case "[object String]":return a==String(c);case "[object Number]":return a!=+a?c!=+c:a==0?1/a==1/c:a==+c;case "[object Date]":case "[object Boolean]":return+a==+c;case "[object RegExp]":return a.source== 9 | c.source&&a.global==c.global&&a.multiline==c.multiline&&a.ignoreCase==c.ignoreCase}if(typeof a!="object"||typeof c!="object")return false;for(var f=d.length;f--;)if(d[f]==a)return true;d.push(a);var f=0,g=true;if(e=="[object Array]"){if(f=a.length,g=f==c.length)for(;f--;)if(!(g=f in a==f in c&&q(a[f],c[f],d)))break}else{if("constructor"in a!="constructor"in c||a.constructor!=c.constructor)return false;for(var h in a)if(b.has(a,h)&&(f++,!(g=b.has(c,h)&&q(a[h],c[h],d))))break;if(g){for(h in c)if(b.has(c, 10 | h)&&!f--)break;g=!f}}d.pop();return g}var r=this,G=r._,n={},k=Array.prototype,o=Object.prototype,i=k.slice,H=k.unshift,l=o.toString,I=o.hasOwnProperty,w=k.forEach,x=k.map,y=k.reduce,z=k.reduceRight,A=k.filter,B=k.every,C=k.some,p=k.indexOf,D=k.lastIndexOf,o=Array.isArray,J=Object.keys,s=Function.prototype.bind,b=function(a){return new m(a)};if(typeof exports!=="undefined"){if(typeof module!=="undefined"&&module.exports)exports=module.exports=b;exports._=b}else r._=b;b.VERSION="1.3.1";var j=b.each= 11 | b.forEach=function(a,c,d){if(a!=null)if(w&&a.forEach===w)a.forEach(c,d);else if(a.length===+a.length)for(var e=0,f=a.length;e2;a== 12 | null&&(a=[]);if(y&&a.reduce===y)return e&&(c=b.bind(c,e)),f?a.reduce(c,d):a.reduce(c);j(a,function(a,b,i){f?d=c.call(e,d,a,b,i):(d=a,f=true)});if(!f)throw new TypeError("Reduce of empty array with no initial value");return d};b.reduceRight=b.foldr=function(a,c,d,e){var f=arguments.length>2;a==null&&(a=[]);if(z&&a.reduceRight===z)return e&&(c=b.bind(c,e)),f?a.reduceRight(c,d):a.reduceRight(c);var g=b.toArray(a).reverse();e&&!f&&(c=b.bind(c,e));return f?b.reduce(g,c,d,e):b.reduce(g,c)};b.find=b.detect= 13 | function(a,c,b){var e;E(a,function(a,g,h){if(c.call(b,a,g,h))return e=a,true});return e};b.filter=b.select=function(a,c,b){var e=[];if(a==null)return e;if(A&&a.filter===A)return a.filter(c,b);j(a,function(a,g,h){c.call(b,a,g,h)&&(e[e.length]=a)});return e};b.reject=function(a,c,b){var e=[];if(a==null)return e;j(a,function(a,g,h){c.call(b,a,g,h)||(e[e.length]=a)});return e};b.every=b.all=function(a,c,b){var e=true;if(a==null)return e;if(B&&a.every===B)return a.every(c,b);j(a,function(a,g,h){if(!(e= 14 | e&&c.call(b,a,g,h)))return n});return e};var E=b.some=b.any=function(a,c,d){c||(c=b.identity);var e=false;if(a==null)return e;if(C&&a.some===C)return a.some(c,d);j(a,function(a,b,h){if(e||(e=c.call(d,a,b,h)))return n});return!!e};b.include=b.contains=function(a,c){var b=false;if(a==null)return b;return p&&a.indexOf===p?a.indexOf(c)!=-1:b=E(a,function(a){return a===c})};b.invoke=function(a,c){var d=i.call(arguments,2);return b.map(a,function(a){return(b.isFunction(c)?c||a:a[c]).apply(a,d)})};b.pluck= 15 | function(a,c){return b.map(a,function(a){return a[c]})};b.max=function(a,c,d){if(!c&&b.isArray(a))return Math.max.apply(Math,a);if(!c&&b.isEmpty(a))return-Infinity;var e={computed:-Infinity};j(a,function(a,b,h){b=c?c.call(d,a,b,h):a;b>=e.computed&&(e={value:a,computed:b})});return e.value};b.min=function(a,c,d){if(!c&&b.isArray(a))return Math.min.apply(Math,a);if(!c&&b.isEmpty(a))return Infinity;var e={computed:Infinity};j(a,function(a,b,h){b=c?c.call(d,a,b,h):a;bd?1:0}),"value")};b.groupBy=function(a,c){var d={},e=b.isFunction(c)?c:function(a){return a[c]};j(a,function(a,b){var c=e(a,b);(d[c]||(d[c]=[])).push(a)});return d};b.sortedIndex=function(a, 17 | c,d){d||(d=b.identity);for(var e=0,f=a.length;e>1;d(a[g])=0})})};b.difference=function(a){var c=b.flatten(i.call(arguments,1));return b.filter(a,function(a){return!b.include(c,a)})};b.zip=function(){for(var a=i.call(arguments),c=b.max(b.pluck(a,"length")),d=Array(c),e=0;e=0;d--)b=[a[d].apply(this,b)];return b[0]}}; 24 | b.after=function(a,b){return a<=0?b():function(){if(--a<1)return b.apply(this,arguments)}};b.keys=J||function(a){if(a!==Object(a))throw new TypeError("Invalid object");var c=[],d;for(d in a)b.has(a,d)&&(c[c.length]=d);return c};b.values=function(a){return b.map(a,b.identity)};b.functions=b.methods=function(a){var c=[],d;for(d in a)b.isFunction(a[d])&&c.push(d);return c.sort()};b.extend=function(a){j(i.call(arguments,1),function(b){for(var d in b)a[d]=b[d]});return a};b.defaults=function(a){j(i.call(arguments, 25 | 1),function(b){for(var d in b)a[d]==null&&(a[d]=b[d])});return a};b.clone=function(a){return!b.isObject(a)?a:b.isArray(a)?a.slice():b.extend({},a)};b.tap=function(a,b){b(a);return a};b.isEqual=function(a,b){return q(a,b,[])};b.isEmpty=function(a){if(b.isArray(a)||b.isString(a))return a.length===0;for(var c in a)if(b.has(a,c))return false;return true};b.isElement=function(a){return!!(a&&a.nodeType==1)};b.isArray=o||function(a){return l.call(a)=="[object Array]"};b.isObject=function(a){return a===Object(a)}; 26 | b.isArguments=function(a){return l.call(a)=="[object Arguments]"};if(!b.isArguments(arguments))b.isArguments=function(a){return!(!a||!b.has(a,"callee"))};b.isFunction=function(a){return l.call(a)=="[object Function]"};b.isString=function(a){return l.call(a)=="[object String]"};b.isNumber=function(a){return l.call(a)=="[object Number]"};b.isNaN=function(a){return a!==a};b.isBoolean=function(a){return a===true||a===false||l.call(a)=="[object Boolean]"};b.isDate=function(a){return l.call(a)=="[object Date]"}; 27 | b.isRegExp=function(a){return l.call(a)=="[object RegExp]"};b.isNull=function(a){return a===null};b.isUndefined=function(a){return a===void 0};b.has=function(a,b){return I.call(a,b)};b.noConflict=function(){r._=G;return this};b.identity=function(a){return a};b.times=function(a,b,d){for(var e=0;e/g,">").replace(/"/g,""").replace(/'/g,"'").replace(/\//g,"/")};b.mixin=function(a){j(b.functions(a), 28 | function(c){K(c,b[c]=a[c])})};var L=0;b.uniqueId=function(a){var b=L++;return a?a+b:b};b.templateSettings={evaluate:/<%([\s\S]+?)%>/g,interpolate:/<%=([\s\S]+?)%>/g,escape:/<%-([\s\S]+?)%>/g};var t=/.^/,u=function(a){return a.replace(/\\\\/g,"\\").replace(/\\'/g,"'")};b.template=function(a,c){var d=b.templateSettings,d="var __p=[],print=function(){__p.push.apply(__p,arguments);};with(obj||{}){__p.push('"+a.replace(/\\/g,"\\\\").replace(/'/g,"\\'").replace(d.escape||t,function(a,b){return"',_.escape("+ 29 | u(b)+"),'"}).replace(d.interpolate||t,function(a,b){return"',"+u(b)+",'"}).replace(d.evaluate||t,function(a,b){return"');"+u(b).replace(/[\r\n\t]/g," ")+";__p.push('"}).replace(/\r/g,"\\r").replace(/\n/g,"\\n").replace(/\t/g,"\\t")+"');}return __p.join('');",e=new Function("obj","_",d);return c?e(c,b):function(a){return e.call(this,a,b)}};b.chain=function(a){return b(a).chain()};var m=function(a){this._wrapped=a};b.prototype=m.prototype;var v=function(a,c){return c?b(a).chain():a},K=function(a,c){m.prototype[a]= 30 | function(){var a=i.call(arguments);H.call(a,this._wrapped);return v(c.apply(b,a),this._chain)}};b.mixin(b);j("pop,push,reverse,shift,sort,splice,unshift".split(","),function(a){var b=k[a];m.prototype[a]=function(){var d=this._wrapped;b.apply(d,arguments);var e=d.length;(a=="shift"||a=="splice")&&e===0&&delete d[0];return v(d,this._chain)}});j(["concat","join","slice"],function(a){var b=k[a];m.prototype[a]=function(){return v(b.apply(this._wrapped,arguments),this._chain)}});m.prototype.chain=function(){this._chain= 31 | true;return this};m.prototype.value=function(){return this._wrapped}}).call(this); 32 | -------------------------------------------------------------------------------- /docs/build/html/_static/up-pressed.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/html/_static/up-pressed.png -------------------------------------------------------------------------------- /docs/build/html/_static/up.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/html/_static/up.png -------------------------------------------------------------------------------- /docs/build/html/genindex.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | Index — Notebooks Forge 0.0.1 documentation 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 |
45 | 46 | 101 | 102 |
103 | 104 | 105 | 111 | 112 | 113 |
114 | 115 |
116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 | 127 | 128 | 129 | 130 | 131 | 132 | 133 |
134 | 135 |
    136 | 137 |
  • Docs »
  • 138 | 139 |
  • Index
  • 140 | 141 | 142 |
  • 143 | 144 | 145 | 146 |
  • 147 | 148 |
149 | 150 | 151 |
152 |
153 |
154 |
155 | 156 | 157 |

Index

158 | 159 |
160 | 161 |
162 | 163 | 164 |
165 | 166 |
167 |
168 | 169 | 170 |
171 | 172 |
173 |

174 | © Copyright 2019, Roberto Rodriguez, Jose Luis Rodriguez 175 | 176 |

177 |
178 | Built with Sphinx using a theme provided by Read the Docs. 179 | 180 |
181 | 182 |
183 |
184 | 185 |
186 | 187 |
188 | 189 | 190 | 191 | 196 | 197 | 198 | 199 | 200 | 201 | 202 | 203 | -------------------------------------------------------------------------------- /docs/build/html/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | Notebooks Forge — Notebooks Forge 0.0.1 documentation 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 |
45 | 46 | 101 | 102 |
103 | 104 | 105 | 111 | 112 | 113 |
114 | 115 |
116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 | 127 | 128 | 129 | 130 | 131 | 132 | 133 |
134 | 135 |
    136 | 137 |
  • Docs »
  • 138 | 139 |
  • Notebooks Forge
  • 140 | 141 | 142 |
  • 143 | 144 | 145 | View page source 146 | 147 | 148 |
  • 149 | 150 |
151 | 152 | 153 |
154 |
155 |
156 |
157 | 158 |
159 |

Notebooks Forge

160 |

A project dedicated to build and provide Notebooks servers for Defensive and Offensive operators to:

161 |
    162 |
  • Design playbooks
  • 163 |
  • Demonstrate how techniques can be used
  • 164 |
  • Showcase when and why an operator would want to use a technique
  • 165 |
  • Document engagements procedures
  • 166 |
  • Prototype new ways to analyze data extracted from endpoints in a more dynamic, flexible and language-agnostic way.
  • 167 |
168 |

This project supports two notebook server types such as Jupyter and Zeppelin.

169 |
170 |

What is a Notebook?

171 |

Think of a notebook as a document that you can access via a web interface that allows you to save input (i.e live code) and output (i.e code execution results / evaluated code output) of interactive sessions as well as important notes needed to explain the methodology and steps taken to perform specific tasks (i.e data analysis).

172 |
173 |

Notebook Environments:

174 | 182 |
183 |
184 |

Notebook Deployments:

185 | 193 |
194 |
195 |

Licenses:

196 | 199 |
200 |
201 |
202 | 203 | 204 |
205 | 206 |
207 |
208 | 209 | 215 | 216 | 217 |
218 | 219 |
220 |

221 | © Copyright 2019, Roberto Rodriguez, Jose Luis Rodriguez 222 | 223 |

224 |
225 | Built with Sphinx using a theme provided by Read the Docs. 226 | 227 |
228 | 229 |
230 |
231 | 232 |
233 | 234 |
235 | 236 | 237 | 238 | 243 | 244 | 245 | 246 | 247 | 248 | 249 | 250 | -------------------------------------------------------------------------------- /docs/build/html/jupyter_rto.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | Jupyter Red Team Operations (RTO) Server — Notebooks Forge 0.0.1 documentation 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 |
46 | 47 | 115 | 116 |
117 | 118 | 119 | 125 | 126 | 127 |
128 | 129 |
130 | 131 | 132 | 133 | 134 | 135 | 136 | 137 | 138 | 139 | 140 | 141 | 142 | 143 | 144 | 145 | 146 | 147 |
148 | 149 |
    150 | 151 |
  • Docs »
  • 152 | 153 |
  • Jupyter Notebook »
  • 154 | 155 |
  • Jupyter Red Team Operations (RTO) Server
  • 156 | 157 | 158 |
  • 159 | 160 | 161 | View page source 162 | 163 | 164 |
  • 165 | 166 |
167 | 168 | 169 |
170 |
171 |
172 |
173 | 174 |
175 |

Jupyter Red Team Operations (RTO) Server

176 |

A notebook server built for offensive operators with a few libraries to connect to known tools such as Bloodhound and Cobalt Strike.

177 |
178 |

Jupyter Python Libraries

179 |
180 |

Neo4j Pytho Driver

181 |

Neo4j Bolt driver for Python

182 |
183 |
184 |

PyCobalt

185 |

PyCobalt is a Python API for Cobalt Strike

186 |
187 |
188 |
189 |

Jupyter Kernels Available

190 |
191 |

IPython Kernel (Python)

192 |

The Jupyter team maintains the IPython kernel since the Jupyter notebook server depends on the IPython kernel functionality. 193 | Many other languages, in addition to Python, may be used in the notebook.

194 |
195 |
196 |
197 | 198 | 199 |
200 | 201 |
202 |
203 | 204 | 212 | 213 | 214 |
215 | 216 |
217 |

218 | © Copyright 2019, Roberto Rodriguez, Jose Luis Rodriguez 219 | 220 |

221 |
222 | Built with Sphinx using a theme provided by Read the Docs. 223 | 224 |
225 | 226 |
227 |
228 | 229 |
230 | 231 |
232 | 233 | 234 | 235 | 240 | 241 | 242 | 243 | 244 | 245 | 246 | 247 | -------------------------------------------------------------------------------- /docs/build/html/jupyter_spark.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | Jupyter Spark Server — Notebooks Forge 0.0.1 documentation 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 |
46 | 47 | 115 | 116 |
117 | 118 | 119 | 125 | 126 | 127 |
128 | 129 |
130 | 131 | 132 | 133 | 134 | 135 | 136 | 137 | 138 | 139 | 140 | 141 | 142 | 143 | 144 | 145 | 146 | 147 |
148 | 149 | 167 | 168 | 169 |
170 |
171 |
172 |
173 | 174 |
175 |

Jupyter Spark Server

176 |

A notebook server built for any operator looking to leverage advanced analytics provided by Apache Spark.

177 |
178 |

Jupyter Python Libraries

179 |
180 |

Pandas

181 |

Pandas is an open source, BSD-licensed library providing high-performance, easy-to-use data structures and data analysis tools for the Python programming language.

182 |
183 |
184 |
185 |

Jupyter Kernels Available

186 |
187 |

IPython Kernel (Python)

188 |

The Jupyter team maintains the IPython kernel since the Jupyter notebook server depends on the IPython kernel functionality. 189 | Many other languages, in addition to Python, may be used in the notebook.

190 |
191 |
192 |

PySpark Kernel (Python)

193 |

A python Kernel to enable Apache Spark for python. 194 | Writing PySpark Applications is really no different than writing normal Python applications or packages. 195 | It’s quite similar to writing command-line applications in particular. 196 | Spark doesn’t have a build concept, just Python scripts, so to run an application, you simply execute the script against the cluster.

197 |
198 |
199 |

Syplon Kernel (Scala/Python)

200 |

A Scala kernel for Apache Spark that uses metakernel in combination with py4j.

201 |
202 |
203 |

R Kernel (R)

204 |

An R kernel for Apache SparkR. 205 | SparkR is an R package that provides a light-weight frontend to use Apache Spark from R. 206 | In Spark 2.4.1, SparkR provides a distributed data frame implementation that supports operations like selection, filtering, aggregation etc. (similar to R data frames, dplyr) but on large datasets. 207 | SparkR also supports distributed machine learning using MLlib.

208 |
209 |
210 |
211 | 212 | 213 |
214 | 215 |
216 |
217 | 218 | 226 | 227 | 228 |
229 | 230 |
231 |

232 | © Copyright 2019, Roberto Rodriguez, Jose Luis Rodriguez 233 | 234 |

235 |
236 | Built with Sphinx using a theme provided by Read the Docs. 237 | 238 |
239 | 240 |
241 |
242 | 243 |
244 | 245 |
246 | 247 | 248 | 249 | 254 | 255 | 256 | 257 | 258 | 259 | 260 | 261 | -------------------------------------------------------------------------------- /docs/build/html/objects.inv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/build/html/objects.inv -------------------------------------------------------------------------------- /docs/build/html/search.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | Search — Notebooks Forge 0.0.1 documentation 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 |
45 | 46 | 101 | 102 |
103 | 104 | 105 | 111 | 112 | 113 |
114 | 115 |
116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 | 127 | 128 | 129 | 130 | 131 | 132 | 133 |
134 | 135 |
    136 | 137 |
  • Docs »
  • 138 | 139 |
  • Search
  • 140 | 141 | 142 |
  • 143 | 144 | 145 | 146 |
  • 147 | 148 |
149 | 150 | 151 |
152 |
153 |
154 |
155 | 156 | 164 | 165 | 166 |
167 | 168 |
169 | 170 |
171 | 172 |
173 |
174 | 175 | 176 |
177 | 178 |
179 |

180 | © Copyright 2019, Roberto Rodriguez, Jose Luis Rodriguez 181 | 182 |

183 |
184 | Built with Sphinx using a theme provided by Read the Docs. 185 | 186 |
187 | 188 |
189 |
190 | 191 |
192 | 193 |
194 | 195 | 196 | 197 | 202 | 203 | 204 | 205 | 206 | 207 | 210 | 211 | 212 | 213 | 214 | 215 | 216 | -------------------------------------------------------------------------------- /docs/build/html/searchindex.js: -------------------------------------------------------------------------------- 1 | Search.setIndex({docnames:["docker","index","jupyter","jupyter_hunt","jupyter_rto","jupyter_spark","license","zeppelin"],envversion:{"sphinx.domains.c":1,"sphinx.domains.changeset":1,"sphinx.domains.cpp":1,"sphinx.domains.javascript":1,"sphinx.domains.math":2,"sphinx.domains.python":1,"sphinx.domains.rst":1,"sphinx.domains.std":1,"sphinx.ext.viewcode":1,sphinx:55},filenames:["docker.rst","index.rst","jupyter.rst","jupyter_hunt.rst","jupyter_rto.rst","jupyter_spark.rst","license.rst","zeppelin.rst"],objects:{},objnames:{},objtypes:{},terms:{"case":6,"class":[3,6],"default":[0,2],"final":6,"function":[3,4,5,6],"import":[1,6],"long":6,"new":[1,6],"public":[1,3,6],"return":[2,6],"short":6,"void":6,"while":3,AND:6,And:6,BEING:6,BUT:6,But:6,FOR:6,For:6,HAS:6,NOT:6,Not:6,One:[],SUCH:6,Such:6,THE:6,THERE:6,That:0,The:[2,3,4,5,6],There:0,These:6,USE:6,Use:6,Uses:2,WILL:6,WITH:6,With:3,abil:6,abl:3,about:6,abov:[3,6],absenc:6,absolut:6,abus:6,accept:6,access:[0,1,6],accompani:6,accord:[0,6],achiev:6,acknowledg:6,acquir:6,across:[3,6],action:6,activ:6,actual:6,adapt:6,add:6,added:6,addit:[3,4,5,6],address:[0,6],adopt:6,advanc:[2,3,5],advantag:[],advers:6,adversari:3,advis:6,affect:6,affero:6,affirm:6,afford:[],after:6,against:[3,5,6],aggreg:[3,5,6],agnost:[1,2],agre:6,agreement:6,aim:[3,6],all:[0,6],alleg:6,allow:[0,1,2,3,6],alon:[],along:6,alreadi:6,also:[2,3,5,6],altern:6,although:6,among:[0,6],analysi:[1,3,5],analyst:[],analyt:[2,3,5],analytst:0,analyz:[1,2,3],ancillari:6,ani:[0,5,6],anoth:0,anti:6,anyon:6,anyth:6,apach:[3,5],api:[3,4],app:0,appli:[2,6],applic:[0,2,3,5,6],applicationinsight:3,appropri:6,approxim:6,area:6,aris:6,arrang:6,articl:6,ask:6,assert:6,asset:6,associ:6,assum:6,assumpt:6,assur:6,asynchron:2,attach:6,attempt:6,attribut:6,author:6,automat:[2,6],avail:[0,2,6],avoid:6,awai:6,azur:3,back:2,bar:[],base:[0,2,6],basic:6,beauti:3,becaus:6,been:6,befor:0,behalf:6,being:[0,3,6],believ:6,below:6,benefit:6,best:6,between:6,beyond:6,binari:3,bloodhound:4,bodi:6,bolt:4,both:6,boto3:3,box:[0,6],brief:6,browser:[0,2],bsd:[3,5],buil:0,build:[1,3,5],built:[3,4,5],busi:6,call:[2,6],can:[0,1,2,3,6],cannot:6,capabl:[2,3],carri:6,cascad:[],caus:6,ceas:6,certain:6,cessat:6,chang:[0,6],character:6,charg:6,check:0,choos:6,circumst:6,circumvent:6,civil:6,claim:6,claus:3,clean:2,clear:6,clearli:6,client:3,clone:0,close:[3,6],cluster:[3,5],cobalt:4,code:[0,1,2,3,6],collect:6,com:0,combin:[3,5,6],come:[0,6,7],command:[0,3,5,6],commerci:6,commit:6,common:6,commun:[0,2,6],compil:6,complex:3,compli:6,complianc:6,compon:[2,6],compos:0,comput:[0,2,6],concept:[2,3,5],concern:6,condit:6,connect:[3,4,6],connector:[],consequ:6,consequenti:6,consid:[2,6],consist:[3,6],consol:2,conspicu:6,constantli:6,constitut:6,constru:6,consum:6,consumpt:[],contact:6,contain:[1,2,3,6],content:6,context:6,continu:6,contractu:6,contradict:6,contrast:6,contributor:6,control:[0,6],convei:6,conveni:[3,6],convey:6,copi:6,copyleft:6,copyright:6,correct:6,correspond:6,cost:6,could:[2,6],counterclaim:6,countri:6,cours:6,court:6,coven:6,cover:6,coverag:6,creat:[2,3],creation:3,creativ:[],criterion:6,cross:6,cure:6,current:0,custom:6,customarili:6,cyb3rward0g:0,dai:6,daili:0,damag:6,danger:6,data:[1,2,3,5,6],databas:3,datafram:[],dataset:[3,5],date:6,decemb:6,decid:6,declar:3,declin:6,decoupl:2,dedic:1,deem:6,defect:6,defens:[1,3,6],defin:[0,3,6],definit:6,demonstr:1,deni:6,denomin:6,depend:[0,3,4,5],deploi:0,deploy:1,deplpoy:[],depriv:6,deriv:2,design:[0,1,3,6],detail:6,detect:[],determin:6,develop:[0,2,3,6],devic:6,differ:[3,5,6],direct:6,directionali:[],directli:[3,6],directori:0,disclaim:6,discriminatori:6,disk:2,displai:[2,6],distinguish:6,distribut:[0,3,5,6],divers:[],doc:0,docker:1,document:[1,6],doe:[2,6],doesn:[3,5],domain:6,doubt:6,download:0,downstream:6,dplyr:[3,5],driver:3,durabl:6,dwell:6,dynam:[1,3,6],each:6,earlier:6,easi:[3,5],easier:[],easili:3,edit:0,effect:6,effici:0,effort:6,either:[3,6],electron:6,embodi:6,employ:6,enabl:[2,3,5,6],end:6,endpoint:1,enforc:6,engag:1,engin:2,enhanc:2,enrich:[],ensur:6,enter:[0,6],entir:6,entiti:6,environ:[0,1,2,3],equat:2,equival:6,erron:6,especi:[],essenti:6,etc:[3,5],evalu:[1,2],even:6,event:6,ever:6,everi:6,everyon:6,everyth:0,evolut:2,exact:6,exampl:[3,6],except:6,exclud:6,exclus:6,excus:6,exec:0,execut:[0,1,2,3,5,6],exercis:6,expect:6,expedit:[],experi:[0,3],expertis:[],explain:[1,2,6],explicitli:6,explor:[2,3],expos:3,express:[3,6],expressli:6,extend:6,extens:[2,3,6],extent:6,extract:1,facil:6,facilit:[],fact:2,fail:6,failur:6,fair:6,famili:6,fashion:6,favor:6,favorit:0,featur:6,fee:6,feel:[],few:4,figur:3,file:[0,3,6],filter:[3,5],fin:[],find:[3,6],first:[2,6],fit:6,fix:6,flexibl:1,flow:6,folder:0,follow:[0,2,6],forbid:6,forc:6,forg:0,form:[2,6],format:[2,3,6],found:6,foundat:6,frame:[3,5],free:[0,6],freedom:6,friendli:3,from:[0,1,2,3,5,6],frontend:[3,5],fsf:6,fulfil:6,full:6,fundament:6,further:6,futur:6,gener:[1,6],get:[0,3,6],git:0,github:0,give:6,given:6,glob:3,gnu:[1,6],govern:6,gpl:6,grant:6,graph:3,graphic:3,graphx:[],grati:6,greatest:6,grep:0,ground:[],guarante:6,gui:6,had:6,handl:0,hardcopi:3,has:6,have:[0,2,3,5,6],head:[],heard:[],help:3,hereaft:6,high:[3,5],highli:[],hive:[],hold:3,holder:6,hope:6,host:[2,6],household:6,how:[1,6],howev:6,html:6,http:[0,2,6],hunt:[0,2],huntingground:[],hypothet:6,idea:6,ideal:0,identifi:6,idiomat:3,imag:0,implement:[3,5,6],impli:6,impos:6,improv:[],inabl:6,inaccur:6,inc:6,incident:6,includ:[0,2,6],inclus:6,incompat:6,incorpor:6,increas:[],indemnif:6,independ:6,indic:6,individu:6,industri:6,inform:[3,6],infrastructur:2,infring:6,initi:[0,6],input:[1,2],insid:6,instal:[1,6],instead:6,intact:6,integr:[],intend:6,intent:6,interact:[1,2,3,6],interchang:6,interest:6,interf:6,interfac:[1,3,6],interpret:6,intim:6,introduc:[],intrus:[],intuit:[],invalid:6,ipynb:2,ipython:2,irrevoc:6,item:6,its:[0,2,3,6],itself:6,javascript:2,job:[],json:[2,3],julia:2,june:6,jupyt:1,just:[3,5],keep:6,kei:6,kernel:6,kind:6,kit:3,know:6,knowingli:6,knowledg:6,known:[3,4],kql:3,kusto:3,languag:[1,2,3,4,5,6],larg:[0,3,5],larger:6,later:6,law:6,lawsuit:6,layer:2,learn:[2,5],least:6,legal:6,lesser:6,level:[2,3],leverag:5,lgpl:6,liabil:6,liabl:6,librari:[0,2,6],licens:[1,3,5],license:6,licensor:6,light:[3,5],lightweight:0,like:[3,5,6],likewis:6,limit:6,line:[3,5,6],link:6,list:[0,6],lite:3,litig:6,live:[1,2],local:[3,6],localhost:[],loganalyt:3,login:0,look:[0,5],loop:2,loss:6,low:[2,3],machin:[2,3,5,6],made:[2,6],magic:3,mai:[3,4,5,6],mail:6,main:2,maintain:[3,4,5,6],major:6,make:[0,6],mani:[3,4,5],manipul:3,manner:6,manufactur:6,map:[],march:6,mark:6,mask:6,match:[],materi:6,mathematica:3,matlab:3,mean:[3,6],measur:6,medium:6,meet:6,menu:6,merchant:6,mere:6,merg:6,messag:2,met:6,metakernel:[3,5],method:6,methodolog:1,microsoft:3,might:6,minim:3,mirror:3,misrepresent:6,mllib:[3,5],mode:6,model:[2,6],modif:6,modifi:6,modul:3,monitor:3,mordor:[],more:[1,2,3,6],moreov:6,most:[2,6],motif:[],much:2,multi:0,multipl:2,must:6,name:[2,6],narr:2,nativ:[],natur:6,necessari:6,need:[0,1,6],neither:6,network:[3,6],next:6,nnotebook:[],non:6,noncommerci:6,nor:6,normal:[3,5,6],notat:2,note:1,notebook:[3,4,5],noth:6,notic:6,notifi:6,notwithstand:6,now:[0,2],nowadai:2,number:6,numer:2,object:[2,6],oblig:6,occasion:6,occur:6,offens:[1,4],offer:6,offici:[3,6],onc:0,one:[0,2,6],onli:[2,6],open:[0,2,3,5],oper:[0,1,2,3,5,6],optim:[],option:6,order:[3,6],org:6,organ:6,origin:[2,6],other:[3,4,5,6],otherwis:6,our:6,out:6,output:[1,6],outsid:6,over:2,own:6,packag:[0,3,5,6],pair:[],paper:6,paragraph:6,part:6,parti:6,particular:[3,5,6],pass:6,password:6,patent:6,pattern:6,payment:6,peer:6,perform:[1,3,5,6],perman:6,permiss:6,permit:6,perpetu:6,person:6,perspect:2,pertin:6,philosophi:6,physic:6,piec:6,pig:[],place:6,platform:3,playbook:1,pleas:6,plenti:[],plot:3,plu:6,point:0,pointer:6,portabl:0,portion:6,possess:6,possibl:6,potenti:3,power:[3,6],practic:6,pre:[],preambl:6,precis:6,predecessor:6,prefer:[2,6],present:6,preserv:6,prevent:6,previou:6,price:6,primarili:[2,6],print:2,prior:6,privat:6,problem:6,procedur:[1,6],process:2,procur:6,produc:[3,6],product:6,program:[2,3,5,6],programm:6,prohibit:6,project:[0,1,2],promin:6,prompt:0,propag:6,properti:6,proprietari:6,protect:6,protocol:[2,3,6],prototyp:1,prove:6,provid:[1,3,5,6],provis:6,provision:6,proxi:6,publicli:6,publish:6,purpos:6,pursuant:6,put:3,py4j:[3,5],pyspark:2,python3:3,python:2,qualifi:6,qualiti:[3,6],queri:3,queryset:3,question:[],quickli:0,quit:[3,5],rang:3,rdd:[],read:[2,6],readabl:6,readi:[0,6],readili:6,realli:[3,5],reason:6,receipt:6,receiv:[2,6],recipi:6,recogn:6,record:[],redistribut:6,reduc:[],refer:[2,6],refrain:6,regard:6,regardless:6,regener:6,reinstat:6,relationship:6,releas:6,relev:6,reli:6,reliabl:0,relicens:6,remain:6,remov:6,render:6,repair:6,repl:2,repo:3,repres:6,request:2,requir:6,research:0,resili:[],resolv:6,respect:6,respons:6,rest:3,restrict:6,result:[1,2,6],retain:6,review:6,revis:6,right:6,risk:6,rom:6,royalti:6,rto:[0,2],rule:6,run:[0,3,5,6],runtim:0,s3filesystem:3,safest:6,sai:6,sake:6,sale:6,same:6,satisfi:6,save:[1,2],scala:2,school:6,scienc:[],scientif:2,scientist:2,scipi:3,scope:6,script:[3,5,6],search:[],secondarili:6,section:6,secur:0,sed:0,see:6,select:[3,5],self:[],sell:6,semiconductor:6,send:2,sent:2,separ:6,serial:[],serv:[2,6],server:[0,1,6],servic:6,session:1,set:0,sever:3,shall:6,sharabl:2,share:[2,6],she:[],shell:3,should:6,show:6,showcas:1,siem:3,sign:6,signific:6,significantli:[],similar:[2,3,5,6],simpl:3,simpli:[3,5],simul:2,simultan:6,sinc:[3,4,5],singl:[2,6],small:0,softwar:[0,3,6],sold:6,sole:6,some:6,someon:[],soon:[0,7],sourc:[0,2,3,5,6],spare:6,spark:[2,3],sparkr:[3,5],speak:6,special:6,specif:[0,1,3,6],specifi:6,speed:0,spend:3,spirit:6,stai:3,stand:6,standalon:0,standard:[0,6],start:[0,6],state:6,statement:6,statist:[2,3],statu:6,step:[1,6],storag:6,store:2,strategi:[],stream:[],strike:4,structur:[3,5],studi:3,style:3,subdivid:6,subject:6,sublicens:6,subprogram:6,subroutin:6,subsect:6,substanti:6,sudo:0,sue:6,suffic:6,supplement:6,support:[0,1,2,3,5,6],sure:6,surrend:6,surviv:6,sustain:6,system:[0,3,6],systemat:6,take:[2,6],taken:1,tangibl:6,task:1,tcp:2,team:[0,3,5],technic:[],techniqu:1,technolog:[0,6],tell:6,term:6,termin:6,terminolog:3,test:[],text:2,than:[3,5,6],thei:[0,6],them:[0,2,6],therefor:6,thi:[0,1,2,3,6],thing:6,think:1,third:6,those:6,though:6,thought:[],threat:[],threaten:6,three:[2,6],through:6,thu:6,time:[3,6],token:0,too:6,tool:[0,3,4,5,6],toolkit:3,top:3,tornado:2,trade:6,trademark:6,transact:6,transfer:6,transform:2,transmiss:6,transpar:[],transport:2,treat:6,treati:6,two:[0,1,2,6],type:[1,6],typic:[3,6],unaccept:6,under:[3,6],understand:3,unit:0,unless:6,unlimit:6,unmodifi:6,unnecessari:6,unpack:6,until:6,upcom:[],updat:6,upon:[],use:[0,1,3,5,6],used:[1,3,4,5,6],useful:6,user:[2,3,6],uses:[2,3,5,6],using:[3,5,6],util:[],valid:6,varieti:3,variou:3,vega:3,verbatim:6,veri:0,version:[0,3,6],via:[0,1,2,3],view:6,violat:6,visibl:6,visual:[2,3],volum:6,wai:[1,2,3,6],waiv:6,waiver:6,want:[0,1,6],warranti:6,web:[0,1,2,3],websocket:2,weight:[3,5],welcom:6,well:[1,3,6],were:6,what:[2,6],whatev:6,when:[1,6],where:6,whether:6,which:[2,3,6],who:6,whole:[3,6],whom:6,whose:[3,6],why:[1,2,6],wide:6,window:6,wipo:6,wish:6,within:6,without:6,work:[0,1,6],worldwid:6,would:[1,6],wrapper:3,write:[3,5,6],written:6,www:6,year:6,yml:0,you:[0,1,2,3,5,6],your:[0,3,6],yourself:6,zeppelin:1,zeromq:2},titles:["Docker Notebook Deployments","Notebooks Forge","Jupyter Notebook","Jupyter Hunt Server","Jupyter Red Team Operations (RTO) Server","Jupyter Spark Server","Licenses","Zeppelin Notebook"],titleterms:{altair:3,analyt:[],avail:[3,4,5],client:2,confluent:3,contain:0,deploy:0,docker:0,document:2,driver:4,dsl:3,elasticsearch:3,forg:1,goal:[],graphfram:[],ground:[],hadoop:[],how:2,hunt:3,instal:0,ipython:[3,4,5],jupyt:[0,2,3,4,5],kafka:3,kernel:[2,3,4,5],kqlmagic:3,ksql:3,learn:3,librari:[3,4,5],licens:6,matplotlib:3,neo4j:[3,4],networkx:3,notebook:[0,1,2,7],nxviz:3,oper:4,panda:[3,5],pycobalt:4,pyspark:[3,5],pytho:4,python3:[],python:[3,4,5],pythonn3:[],red:4,requir:0,rto:4,s3f:3,scala:[3,5],scikit:3,sdk:3,server:[2,3,4,5],spark:5,splunk:3,sql:[],step:0,syplon:[3,5],team:4,what:[0,1],work:2,zeppelin:[0,7]}}) -------------------------------------------------------------------------------- /docs/build/html/zeppelin.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | Zeppelin Notebook — Notebooks Forge 0.0.1 documentation 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 |
46 | 47 | 102 | 103 |
104 | 105 | 106 | 112 | 113 | 114 |
115 | 116 |
117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 | 127 | 128 | 129 | 130 | 131 | 132 | 133 | 134 |
135 | 136 |
    137 | 138 |
  • Docs »
  • 139 | 140 |
  • Zeppelin Notebook
  • 141 | 142 | 143 |
  • 144 | 145 | 146 | View page source 147 | 148 | 149 |
  • 150 | 151 |
152 | 153 | 154 |
155 |
156 |
157 |
158 | 159 |
160 |

Zeppelin Notebook

161 |

Coming soon..

162 |
163 | 164 | 165 |
166 | 167 |
168 |
169 | 170 | 178 | 179 | 180 |
181 | 182 |
183 |

184 | © Copyright 2019, Roberto Rodriguez, Jose Luis Rodriguez 185 | 186 |

187 |
188 | Built with Sphinx using a theme provided by Read the Docs. 189 | 190 |
191 | 192 |
193 |
194 | 195 |
196 | 197 |
198 | 199 | 200 | 201 | 206 | 207 | 208 | 209 | 210 | 211 | 212 | 213 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=source 11 | set BUILDDIR=build 12 | 13 | if "%1" == "" goto help 14 | 15 | %SPHINXBUILD% >NUL 2>NUL 16 | if errorlevel 9009 ( 17 | echo. 18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 19 | echo.installed, then set the SPHINXBUILD environment variable to point 20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 21 | echo.may add the Sphinx directory to PATH. 22 | echo. 23 | echo.If you don't have Sphinx installed, grab it from 24 | echo.http://sphinx-doc.org/ 25 | exit /b 1 26 | ) 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /docs/source/_static/docker-containers.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/source/_static/docker-containers.png -------------------------------------------------------------------------------- /docs/source/_static/jupyter-design.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/source/_static/jupyter-design.png -------------------------------------------------------------------------------- /docs/source/_static/jupyter-evolution.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/source/_static/jupyter-evolution.png -------------------------------------------------------------------------------- /docs/source/_static/jupyter-installed-token.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/source/_static/jupyter-installed-token.png -------------------------------------------------------------------------------- /docs/source/_static/jupyter-login.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/source/_static/jupyter-login.png -------------------------------------------------------------------------------- /docs/source/_static/jupyter-main.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/source/_static/jupyter-main.png -------------------------------------------------------------------------------- /docs/source/_static/jupyter-samples.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OTRF/notebooks-forge/e964a2fc636de2f24bd418e51b80bfe7b04549fe/docs/source/_static/jupyter-samples.png -------------------------------------------------------------------------------- /docs/source/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Configuration file for the Sphinx documentation builder. 4 | # 5 | # This file does only contain a selection of the most common options. For a 6 | # full list see the documentation: 7 | # http://www.sphinx-doc.org/en/master/config 8 | 9 | # -- Path setup -------------------------------------------------------------- 10 | 11 | # If extensions (or modules to document with autodoc) are in another directory, 12 | # add these directories to sys.path here. If the directory is relative to the 13 | # documentation root, use os.path.abspath to make it absolute, like shown here. 14 | # 15 | # import os 16 | # import sys 17 | # sys.path.insert(0, os.path.abspath('.')) 18 | 19 | 20 | # -- Project information ----------------------------------------------------- 21 | 22 | project = 'Notebooks Forge' 23 | copyright = '2019, Roberto Rodriguez, Jose Luis Rodriguez' 24 | author = 'Roberto Rodriguez, Jose Luis Rodriguez' 25 | 26 | # The short X.Y version 27 | version = '' 28 | # The full version, including alpha/beta/rc tags 29 | release = '0.0.1' 30 | 31 | 32 | # -- General configuration --------------------------------------------------- 33 | 34 | # If your documentation needs a minimal Sphinx version, state it here. 35 | # 36 | # needs_sphinx = '1.0' 37 | 38 | # Add any Sphinx extension module names here, as strings. They can be 39 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 40 | # ones. 41 | extensions = [ 42 | 'sphinx.ext.autodoc', 43 | 'sphinx.ext.viewcode', 44 | 'sphinx.ext.githubpages', 45 | ] 46 | 47 | # Add any paths that contain templates here, relative to this directory. 48 | templates_path = ['_templates'] 49 | 50 | # The suffix(es) of source filenames. 51 | # You can specify multiple suffix as a list of string: 52 | # 53 | # source_suffix = ['.rst', '.md'] 54 | source_suffix = '.rst' 55 | 56 | # The master toctree document. 57 | master_doc = 'index' 58 | 59 | # The language for content autogenerated by Sphinx. Refer to documentation 60 | # for a list of supported languages. 61 | # 62 | # This is also used if you do content translation via gettext catalogs. 63 | # Usually you set "language" from the command line for these cases. 64 | language = None 65 | 66 | # List of patterns, relative to source directory, that match files and 67 | # directories to ignore when looking for source files. 68 | # This pattern also affects html_static_path and html_extra_path. 69 | exclude_patterns = [] 70 | 71 | # The name of the Pygments (syntax highlighting) style to use. 72 | pygments_style = None 73 | 74 | 75 | # -- Options for HTML output ------------------------------------------------- 76 | 77 | # The theme to use for HTML and HTML Help pages. See the documentation for 78 | # a list of builtin themes. 79 | # 80 | html_theme = 'sphinx_rtd_theme' 81 | 82 | # Theme options are theme-specific and customize the look and feel of a theme 83 | # further. For a list of options available for each theme, see the 84 | # documentation. 85 | # 86 | # html_theme_options = {} 87 | 88 | # Add any paths that contain custom static files (such as style sheets) here, 89 | # relative to this directory. They are copied after the builtin static files, 90 | # so a file named "default.css" will overwrite the builtin "default.css". 91 | html_static_path = ['_static'] 92 | 93 | # Custom sidebar templates, must be a dictionary that maps document names 94 | # to template names. 95 | # 96 | # The default sidebars (for documents that don't match any pattern) are 97 | # defined by theme itself. Builtin themes are using these templates by 98 | # default: ``['localtoc.html', 'relations.html', 'sourcelink.html', 99 | # 'searchbox.html']``. 100 | # 101 | # html_sidebars = {} 102 | 103 | 104 | # -- Options for HTMLHelp output --------------------------------------------- 105 | 106 | # Output file base name for HTML help builder. 107 | htmlhelp_basename = 'notebooksforgedoc' 108 | 109 | 110 | # -- Options for LaTeX output ------------------------------------------------ 111 | 112 | latex_elements = { 113 | # The paper size ('letterpaper' or 'a4paper'). 114 | # 115 | # 'papersize': 'letterpaper', 116 | 117 | # The font size ('10pt', '11pt' or '12pt'). 118 | # 119 | # 'pointsize': '10pt', 120 | 121 | # Additional stuff for the LaTeX preamble. 122 | # 123 | # 'preamble': '', 124 | 125 | # Latex figure (float) alignment 126 | # 127 | # 'figure_align': 'htbp', 128 | } 129 | 130 | # Grouping the document tree into LaTeX files. List of tuples 131 | # (source start file, target name, title, 132 | # author, documentclass [howto, manual, or own class]). 133 | latex_documents = [ 134 | (master_doc, 'NotebooksForge.tex', 'Notebooks Forge Documentation', 135 | 'Roberto Rodriguez, Jose Luis Rodriguez', 'manual'), 136 | ] 137 | 138 | 139 | # -- Options for manual page output ------------------------------------------ 140 | 141 | # One entry per manual page. List of tuples 142 | # (source start file, name, description, authors, manual section). 143 | man_pages = [ 144 | (master_doc, 'NotebooksForge', 'Notebooks Forge Documentation', 145 | [author], 1) 146 | ] 147 | 148 | 149 | # -- Options for Texinfo output ---------------------------------------------- 150 | 151 | # Grouping the document tree into Texinfo files. List of tuples 152 | # (source start file, target name, title, author, 153 | # dir menu entry, description, category) 154 | texinfo_documents = [ 155 | (master_doc, 'NotebooksForge', 'Notebooks Forge Documentation', 156 | author, 'NotebooksForge', 'One line description of project.', 157 | 'Miscellaneous'), 158 | ] 159 | 160 | 161 | # -- Options for Epub output ------------------------------------------------- 162 | 163 | # Bibliographic Dublin Core info. 164 | epub_title = project 165 | 166 | # The unique identifier of the text. This can be a ISBN number 167 | # or the project homepage. 168 | # 169 | # epub_identifier = '' 170 | 171 | # A unique identification for the text. 172 | # 173 | # epub_uid = '' 174 | 175 | # A list of files that should not be packed into the epub file. 176 | epub_exclude_files = ['search.html'] 177 | 178 | 179 | # -- Extension configuration ------------------------------------------------- 180 | -------------------------------------------------------------------------------- /docs/source/docker.rst: -------------------------------------------------------------------------------- 1 | Docker Notebook Deployments 2 | =========================== 3 | 4 | Docker technology allows the project to package notebook applications with all its libraries and dependencies in "containers" and make them portable among any operating system. 5 | This allows security analytst to deploy the notebook servers on any system they use daily for hunting research. 6 | 7 | What are Docker Containers? 8 | ########################### 9 | 10 | According to `Docker docs `_, a container is a standard unit of software that packages up code and all its dependencies so the application runs quickly and reliably from one computing environment to another. 11 | A Docker container image is a lightweight, standalone, executable package of software that includes everything needed to run an application: code, runtime, system tools, system libraries and settings. 12 | 13 | .. image:: _static/docker-containers.png 14 | :alt: Docker Containers 15 | :scale: 50% 16 | 17 | There are two notebook environments being supported by the project. 18 | 19 | Jupyter Notebooks Install 20 | ######################### 21 | 22 | Requirements 23 | ************ 24 | 25 | * `Git `_ : Git is a free and open source distributed version control system designed to handle everything from small to very large projects with speed and efficiency. 26 | * `Docker CE `_ : Docker Community Edition (CE) is ideal for developers and small teams looking to get started with Docker and experimenting with container-based apps. 27 | * `Docker Compose `_ : a tool for defining and running multi-container Docker applications. 28 | 29 | Steps 30 | ***** 31 | 32 | Git clone the `Notebooks Forge project `_ and change your current directory to the projects directory. 33 | 34 | .. code-block:: console 35 | 36 | $ git clone https://github.com/Cyb3rWard0g/notebooks-forge.git 37 | $ cd notebooks-forge/ 38 | 39 | Change your current directory to the specific notebook you want to work with (``jupyter-hunt`` or ``jupyter-rto``) 40 | 41 | .. code-block:: console 42 | 43 | $ cd jupyter-hunt/ 44 | 45 | Run docker-compose pointing to the default compose file available in the folder. 46 | 47 | .. code-block:: console 48 | 49 | $ sudo docker-compose -f docker-compose.yml up --buil -d 50 | 51 | Once your container gets downloaded/run, you can check it if is running or not with the following commands: 52 | 53 | .. code-block:: console 54 | 55 | $ sudo docker ps 56 | 57 | Before accessing the Jupyter notebook server via your favorite web browser, you will have to get the access token the application initialized with. 58 | You can get it with the following command: 59 | 60 | .. code-block:: console 61 | 62 | $ sudo docker exec -ti jupyter-hunt jupyter notebook list | grep "token" | sed 's/.*token=\([^ ]*\).*/\1/' 63 | 64 | Open your favorite browser at ``http://:8888```. You will then be prompted with a login box to enter the token. 65 | 66 | .. image:: _static/jupyter-login.png 67 | :alt: Jupyter Login 68 | :scale: 50% 69 | 70 | That's it! You are now ready to use your Jupyter Notebook server. 71 | 72 | .. image:: _static/jupyter-main.png 73 | :alt: Jupyter Main 74 | :scale: 40% 75 | 76 | Zeppelin Notebooks Install 77 | ########################## 78 | 79 | Requirements 80 | ************ 81 | 82 | * `Git `_ : Git is a free and open source distributed version control system designed to handle everything from small to very large projects with speed and efficiency. 83 | * `Docker CE `_ : Docker Community Edition (CE) is ideal for developers and small teams looking to get started with Docker and experimenting with container-based apps. 84 | * `Docker Compose `_ : a tool for defining and running multi-container Docker applications. 85 | 86 | Steps 87 | ***** 88 | 89 | Coming soon.. -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | .. Notebooks Forge documentation master file, created by 2 | sphinx-quickstart on Wed Apr 17 11:44:45 2019. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Notebooks Forge 7 | =============== 8 | 9 | A project dedicated to build and provide ``Notebooks`` servers for ``Defensive`` and ``Offensive`` operators to: 10 | 11 | * Design playbooks 12 | * Demonstrate how techniques can be used 13 | * Showcase when and why an operator would want to use a technique 14 | * Document engagements procedures 15 | * Prototype new ways to analyze data extracted from endpoints in a more dynamic, flexible and language-agnostic way. 16 | 17 | This project supports two notebook server types such as `Jupyter `_ and `Zeppelin `_. 18 | 19 | 20 | What is a Notebook? 21 | ******************* 22 | 23 | Think of a notebook as a document that you can access via a web interface that allows you to save input (i.e live code) and output (i.e code execution results / evaluated code output) of interactive sessions as well as important notes needed to explain the methodology and steps taken to perform specific tasks (i.e data analysis). 24 | 25 | .. toctree:: 26 | :maxdepth: 2 27 | :caption: Notebook Environments: 28 | 29 | Jupyter Notebook 30 | Zeppelin Notebook 31 | 32 | .. toctree:: 33 | :maxdepth: 2 34 | :caption: Notebook Deployments: 35 | 36 | Docker 37 | 38 | .. toctree:: 39 | :maxdepth: 2 40 | :caption: Licenses: 41 | 42 | GNU General Public License V3 -------------------------------------------------------------------------------- /docs/source/jupyter.rst: -------------------------------------------------------------------------------- 1 | Jupyter Notebook 2 | ================ 3 | 4 | The Jupyter Notebook is an open-source web application that allows you to create and share documents that contain live code, equations, visualizations and narrative text. 5 | Uses include: data cleaning and transformation, numerical simulation, statistical modeling, data visualization, machine learning, and much more. 6 | 7 | The Jupyter Notebook project is the evolution of the IPython Notebook library which was developed primarily to enhance the default python interactive console by enabling scientific operations and advanced data analytics capabilities via sharable web documents. 8 | 9 | .. image:: _static/jupyter-evolution.png 10 | :alt: Jupyter Evolution 11 | :scale: 50% 12 | 13 | Nowadays, the Jupyter Notebook project not only supports Python but also over 40 programming languages such as R, Julia, Scala and PySpark. 14 | In fact, its name was originally derived from three programming languages: Julia, Python and R which made it one of the first language-agnostic notebook applications, and now considered one of the most preferred environments for data scientists and engineers in the community to explore and analyze data. 15 | 16 | .. image:: _static/jupyter-samples.png 17 | :alt: Jupyter Sample 18 | :scale: 50% 19 | 20 | How do Jupyter Notebooks Work? 21 | ############################## 22 | 23 | Jupyter Notebooks work with what is called a two-process model based on a kernel-client infrastructure. 24 | This model applies a similar concept to the `Read-Evaluate-Print Loop (REPL) `_ programming environment that takes a single user's inputs, evaluates them, and returns the result to the user. 25 | 26 | .. image:: _static/jupyter-design.png 27 | :alt: Jupyter Design 28 | :scale: 70% 29 | 30 | Based on the two-process model concept, we can explain the main components of Jupyter the following way: 31 | 32 | Jupyter Client 33 | ************** 34 | 35 | * It allows a user to send code to the kernel and it could be in a form of a `Qt Console `_ or a browser via notebook documents. 36 | * From a REPL perspective, the client does the read and print operations. 37 | * Notebooks are hosted by the Jupyter web server which uses Tornado to serve HTTP requests. 38 | 39 | Jupyter Kernel 40 | ************** 41 | 42 | * It receives the code sent by the client, executes it, and returns the results back to the client for display. A kernel process can have multiple clients communicating with it which is why this model is also referred as the decoupled two-process model. 43 | * From a REPL perspective, the kernel does the evaluate operation. 44 | * kernel and clients communicate via an interactive computing protocol based on an asynchronous messaging library named `ZeroMQ `_ (low-level transport layer) and WebSockets (TCP-based) 45 | 46 | Jupyter Notebook Document 47 | ************************* 48 | 49 | * Notebooks are automatically saved and stored on disk in the open source JavaScript Object Notation (JSON) format and with a .ipynb extension. 50 | 51 | Jupyter Notebooks Servers 52 | ######################### 53 | 54 | .. toctree:: 55 | :maxdepth: 2 56 | 57 | Jupyter Spark 58 | Jupyter Hunt 59 | Jupyter RTO -------------------------------------------------------------------------------- /docs/source/jupyter_hunt.rst: -------------------------------------------------------------------------------- 1 | Jupyter Hunt Server 2 | =================== 3 | 4 | A notebook server built for defensive operators with several tools to connect to known SIEMs and be able to analyze data to find potential adversaries in the network. 5 | This server is built on the top of the `Jupyter Spark` server available in this repo in order to provide advanced analytics capabilities via Apache Spark. 6 | 7 | Jupyter Python Libraries 8 | ######################## 9 | 10 | Pandas 11 | ****** 12 | 13 | `Pandas `_ is an open source, BSD-licensed library providing high-performance, easy-to-use data structures and data analysis tools for the Python programming language. 14 | 15 | Altair 16 | ****** 17 | 18 | `Altair `_ is a declarative statistical visualization library for Python. 19 | With Altair, you can spend more time understanding your data and its meaning. 20 | Altair's API is simple, friendly and consistent and built on top of the powerful `Vega-Lite `_ JSON specification. 21 | 22 | S3Fs 23 | **** 24 | 25 | `S3Fs `_ is a Pythonic file interface to S3. It builds on top of `boto3 `_. 26 | The top-level class S3FileSystem holds connection information and allows typical file-system style operations like cp, mv, ls, du, glob, etc., as well as put/get of local files to/from S3. 27 | 28 | Elasticsearch-DSL 29 | ***************** 30 | 31 | `Elasticsearch DSL `_ is a high-level library whose aim is to help with writing and running queries against Elasticsearch. 32 | It is built on top of the official low-level client (`elasticsearch-py `_). 33 | It provides a more convenient and idiomatic way to write and manipulate queries. 34 | It stays close to the Elasticsearch JSON DSL, mirroring its terminology and structure. 35 | It exposes the whole range of the DSL from Python either directly using defined classes or a queryset-like expressions. 36 | 37 | Matplotlib 38 | ********** 39 | 40 | `Matplotlib `_ is a Python 2D plotting library which produces publication-quality figures in a variety of hardcopy formats and interactive environments across platforms. 41 | Matplotlib can be used in Python scripts, the Python and IPython shell (à la MATLAB or Mathematica), web application servers, and various graphical user interface toolkits. 42 | 43 | Scikit-learn 44 | ************ 45 | 46 | `Scikit-learn `_ is a Python module for machine learning built on top of SciPy and distributed under the 3-Clause BSD license. 47 | 48 | KSQL-Python 49 | *********** 50 | 51 | `KSQL-Python `_ is a python wrapper for the KSQL REST API. Easily interact with the KSQL REST API using this library. 52 | 53 | Confluent-Kafka-Python 54 | ********************** 55 | 56 | `Confluent-kafka-python `_ is Confluent's Python client for `Apache Kafka `_ and the `Confluent Platform `_. 57 | 58 | Splunk-SDK 59 | ********** 60 | 61 | The `Splunk Software Development Kit (SDK) `_ for Python contains library code and examples designed to enable developers to build applications using Splunk. 62 | 63 | Kqlmagic 64 | ******** 65 | 66 | The `Kqlmagic `_ magic extension enables notebook experience, exploring Microsoft Azure Monitor data: Azure Data Explorer (Kusto), ApplicationInsights, and LogAnalytics data, from Jupyter notebook (Python3 kernel), using kql (Kusto Query language). 67 | 68 | Neo4j 69 | ***** 70 | 71 | The official `Neo4j driver for Python `_ supports Neo4j 3.0 and above and Python versions 2.7, 3.4, 3.5, 3.6, and 3.7. 72 | It connects to the database using the binary protocol. It aims to be minimal, while being idiomatic to Python. 73 | 74 | Networkx 75 | ******** 76 | 77 | `NetworkX `_ is a Python package for the creation, manipulation, and study of the structure, dynamics, and functions of complex networks. 78 | 79 | Nxviz 80 | ***** 81 | 82 | `Nxviz `_ is a graph visualization package for NetworkX. With nxviz, you can create beautiful graph visualizations by a declarative API. 83 | 84 | Jupyter Kernels Available 85 | ######################### 86 | 87 | IPython Kernel (Python) 88 | ************************* 89 | 90 | The Jupyter team maintains the `IPython kernel `_ since the Jupyter notebook server depends on the IPython kernel functionality. 91 | Many other languages, in addition to Python, may be used in the notebook. 92 | 93 | PySpark Kernel (Python) 94 | ************************ 95 | 96 | A python Kernel to enable `Apache Spark for python `_. 97 | Writing PySpark Applications is really no different than writing normal Python applications or packages. 98 | It’s quite similar to writing command-line applications in particular. 99 | Spark doesn’t have a build concept, just Python scripts, so to run an application, you simply execute the script against the cluster. 100 | 101 | Syplon Kernel (Scala/Python) 102 | ***************************** 103 | 104 | A Scala kernel for Apache Spark that uses `metakernel `_ in combination with `py4j `_. 105 | 106 | R Kernel (R) 107 | ************ 108 | 109 | An R kernel for `Apache SparkR `_. 110 | SparkR is an R package that provides a light-weight frontend to use Apache Spark from R. 111 | In Spark 2.4.1, SparkR provides a distributed data frame implementation that supports operations like selection, filtering, aggregation etc. (similar to R data frames, dplyr) but on large datasets. 112 | SparkR also supports distributed machine learning using MLlib. -------------------------------------------------------------------------------- /docs/source/jupyter_rto.rst: -------------------------------------------------------------------------------- 1 | Jupyter Red Team Operations (RTO) Server 2 | ======================================== 3 | 4 | A notebook server built for offensive operators with a few libraries to connect to known tools such as Bloodhound and Cobalt Strike. 5 | 6 | Jupyter Python Libraries 7 | ######################## 8 | 9 | Neo4j Pytho Driver 10 | ****************** 11 | 12 | `Neo4j Bolt driver `_ for Python 13 | 14 | PyCobalt 15 | ******** 16 | 17 | `PyCobalt `_ is a Python API for Cobalt Strike 18 | 19 | Jupyter Kernels Available 20 | ######################### 21 | 22 | IPython Kernel (Python) 23 | ************************* 24 | 25 | The Jupyter team maintains the `IPython kernel `_ since the Jupyter notebook server depends on the IPython kernel functionality. 26 | Many other languages, in addition to Python, may be used in the notebook. -------------------------------------------------------------------------------- /docs/source/jupyter_spark.rst: -------------------------------------------------------------------------------- 1 | Jupyter Spark Server 2 | ==================== 3 | 4 | A notebook server built for any operator looking to leverage advanced analytics provided by Apache Spark. 5 | 6 | Jupyter Python Libraries 7 | ######################## 8 | 9 | Pandas 10 | ****** 11 | 12 | `Pandas `_ is an open source, BSD-licensed library providing high-performance, easy-to-use data structures and data analysis tools for the Python programming language. 13 | 14 | Jupyter Kernels Available 15 | ######################### 16 | 17 | IPython Kernel (Python) 18 | ************************* 19 | 20 | The Jupyter team maintains the `IPython kernel `_ since the Jupyter notebook server depends on the IPython kernel functionality. 21 | Many other languages, in addition to Python, may be used in the notebook. 22 | 23 | PySpark Kernel (Python) 24 | ************************ 25 | 26 | A python Kernel to enable `Apache Spark for python `_. 27 | Writing PySpark Applications is really no different than writing normal Python applications or packages. 28 | It’s quite similar to writing command-line applications in particular. 29 | Spark doesn’t have a build concept, just Python scripts, so to run an application, you simply execute the script against the cluster. 30 | 31 | Syplon Kernel (Scala/Python) 32 | ***************************** 33 | 34 | A Scala kernel for Apache Spark that uses `metakernel `_ in combination with `py4j `_. 35 | 36 | R Kernel (R) 37 | ************ 38 | 39 | An R kernel for `Apache SparkR `_. 40 | SparkR is an R package that provides a light-weight frontend to use Apache Spark from R. 41 | In Spark 2.4.1, SparkR provides a distributed data frame implementation that supports operations like selection, filtering, aggregation etc. (similar to R data frames, dplyr) but on large datasets. 42 | SparkR also supports distributed machine learning using MLlib. -------------------------------------------------------------------------------- /docs/source/zeppelin.rst: -------------------------------------------------------------------------------- 1 | Zeppelin Notebook 2 | ================= 3 | 4 | Coming soon.. -------------------------------------------------------------------------------- /scripts/docker_install.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # HuntingGrounds script: docker_install.sh 4 | # HuntingGrounnds description: Install docker and docker-compose 5 | # Author: Roberto Rodriguez (@Cyb3rWard0g) 6 | # License: GPL-3.0 7 | 8 | DOCKER_INFO_TAG="[DOCKER-INSTALLATION-INFO]" 9 | DOCKER_ERROR_TAG="[DOCKER-INSTALLATION-ERROR]" 10 | 11 | # *********** Check if user is root *************** 12 | if [[ $EUID -ne 0 ]]; then 13 | echo "$DOCKER_INFO_TAG YOU MUST BE ROOT TO RUN THIS SCRIPT!!!" 14 | exit 1 15 | fi 16 | 17 | # *********** Set Log File *************** 18 | LOGFILE="/var/log/helk-install.log" 19 | echoerror() { 20 | printf "$DOCKER_ERROR_TAG ${RC} * ERROR${EC}: $@\n" 1>&2; 21 | } 22 | 23 | # ********* Globals ********************** 24 | SYSTEM_KERNEL="$(uname -s)" 25 | 26 | echo "$DOCKER_INFO_TAG Checking distribution list and product version" 27 | if [ "$SYSTEM_KERNEL" == "Linux" ]; then 28 | # *********** Check distribution list *************** 29 | LSB_DIST="$(. /etc/os-release && echo "$ID")" 30 | LSB_DIST="$(echo "$LSB_DIST" | tr '[:upper:]' '[:lower:]')" 31 | # *********** Check distribution version *************** 32 | case "$LSB_DIST" in 33 | ubuntu) 34 | if [ -x "$(command -v lsb_release)" ]; then 35 | DIST_VERSION="$(lsb_release --codename | cut -f2)" 36 | fi 37 | if [ -z "$DIST_VERSION" ] && [ -r /etc/lsb-release ]; then 38 | DIST_VERSION="$(. /etc/lsb-release && echo "$DISTRIB_CODENAME")" 39 | fi 40 | # ********* Commenting Out CDROM ********************** 41 | sed -i "s/\(^deb cdrom.*$\)/\#/g" /etc/apt/sources.list 42 | ;; 43 | debian|raspbian) 44 | DIST_VERSION="$(sed 's/\/.*//' /etc/debian_version | sed 's/\..*//')" 45 | case "$DIST_VERSION" in 46 | 9) DIST_VERSION="stretch";; 47 | 8) DIST_VERSION="jessie";; 48 | 7) DIST_VERSION="wheezy";; 49 | esac 50 | # ********* Commenting Out CDROM ********************** 51 | sed -i "s/\(^deb cdrom.*$\)/\#/g" /etc/apt/sources.list 52 | ;; 53 | centos) 54 | if [ -z "$DIST_VERSION" ] && [ -r /etc/os-release ]; then 55 | DIST_VERSION="$(. /etc/os-release && echo "$VERSION_ID")" 56 | fi 57 | ;; 58 | rhel|ol|sles) 59 | ee_notice "$LSB_DIST" 60 | exit 1 61 | ;; 62 | *) 63 | if [ -x "$(command -v lsb_release)" ]; then 64 | DIST_VERSION="$(lsb_release --release | cut -f2)" 65 | fi 66 | if [ -z "$DIST_VERSION" ] && [ -r /etc/os-release ]; then 67 | DIST_VERSION="$(. /etc/os-release && echo "$VERSION_ID")" 68 | fi 69 | ;; 70 | esac 71 | ERROR=$? 72 | if [ $ERROR -ne 0 ]; then 73 | echoerror "Could not verify distribution or version of the OS (Error Code: $ERROR)." 74 | fi 75 | echo "$DOCKER_INFO_TAG You're using $LSB_DIST version $DIST_VERSION" 76 | elif [ "$SYSTEM_KERNEL" == "Darwin" ]; then 77 | PRODUCT_NAME="$(sw_vers -productName)" 78 | PRODUCT_VERSION="$(sw_vers -productVersion)" 79 | BUILD_VERSION="$(sw_vers -buildVersion)" 80 | echo "$DOCKER_INFO_TAG You're using $PRODUCT_NAME version $PRODUCT_VERSION" 81 | else 82 | echo "$DOCKER_INFO_TAG We cannot figure out the SYSTEM_KERNEL, distribution or version of the OS" 83 | fi 84 | 85 | 86 | # ********** Install Curl ******************** 87 | install_curl(){ 88 | echo "$DOCKER_INFO_TAG Installing curl before installing docker.." 89 | case "$LSB_DIST" in 90 | ubuntu|debian|raspbian) 91 | apt-get install -y curl >> $LOGFILE 2>&1 92 | ;; 93 | centos|rhel) 94 | yum install curl >> $LOGFILE 2>&1 95 | ;; 96 | *) 97 | echo "$DOCKER_INFO_TAG Please install curl for $LSB_DIST $DIST_VERSION .." 98 | exit 1 99 | ;; 100 | esac 101 | ERROR=$? 102 | if [ $ERROR -ne 0 ]; then 103 | echoerror "Could not install curl for $lsb_dist $dist_version (Error Code: $ERROR)." 104 | exit 1 105 | fi 106 | } 107 | 108 | # ****** Installing docker via convenience script *********** 109 | install_docker(){ 110 | echo "$DOCKER_INFO_TAG Installing docker via convenience script.." 111 | curl -fsSL get.docker.com -o get-docker.sh >> $LOGFILE 2>&1 112 | chmod +x get-docker.sh >> $LOGFILE 2>&1 113 | ./get-docker.sh >> $LOGFILE 2>&1 114 | ERROR=$? 115 | if [ $ERROR -ne 0 ]; then 116 | echoerror "Could not install docker via convenience script (Error Code: $ERROR)." 117 | if [ -x "$(command -v snap)" ]; then 118 | SNAP_VERSION=$(snap version | grep -w 'snap' | awk '{print $2}') 119 | echo "DOCKER_INFO_TAG Snap v$SNAP_VERSION is available. Trying to install docker via snap.." 120 | snap install docker >> $LOGFILE 2>&1 121 | ERROR=$? 122 | if [ $ERROR -ne 0 ]; then 123 | echoerror "Could not install docker via snap (Error Code: $ERROR)." 124 | exit 1 125 | fi 126 | echo "$DOCKER_INFO_TAG Docker successfully installed via snap." 127 | else 128 | echo "$DOCKER_INFO_TAG Docker could not be installed. Check /var/log/helk-install.log for details." 129 | exit 1 130 | fi 131 | fi 132 | } 133 | 134 | # ****** Installing docker compose from github.com/docker/compose *********** 135 | install_docker_compose(){ 136 | echo "$DOCKER_INFO_TAG Installing docker-compose.." 137 | curl -L https://github.com/docker/compose/releases/download/1.23.2/docker-compose-`uname -s`-`uname -m` -o /usr/local/bin/docker-compose >> $LOGFILE 2>&1 138 | chmod +x /usr/local/bin/docker-compose >> $LOGFILE 2>&1 139 | ERROR=$? 140 | if [ $ERROR -ne 0 ]; then 141 | echoerror "Could not install docker-compose (Error Code: $ERROR)." 142 | exit 1 143 | fi 144 | } 145 | 146 | # *********** Main steps ********************* 147 | if [ "$SYSTEM_KERNEL" == "Linux" ]; then 148 | # *********** Check if curl is installed *************** 149 | if [ -x "$(command -v curl)" ]; then 150 | echo "$DOCKER_INFO_TAG curl is already installed" 151 | else 152 | echo "$DOCKER_INFO_TAG curl is not installed" 153 | install_curl 154 | fi 155 | 156 | # *********** Check if docker is installed *************** 157 | if [ -x "$(command -v docker)" ]; then 158 | echo "$DOCKER_INFO_TAG Docker already installed" 159 | else 160 | echo "$DOCKER_INFO_TAG Docker is not installed" 161 | install_docker 162 | fi 163 | # ********** Check if docker-compose is installed ******* 164 | if [ -x "$(command -v docker-compose)" ]; then 165 | echo "$DOCKER_INFO_TAG Docker-compose already installed" 166 | else 167 | echo "$DOCKER_INFO_TAG Docker-compose is not installed" 168 | install_docker_compose 169 | fi 170 | else 171 | # *********** Check if docker is installed *************** 172 | if [ -x "$(command -v docker)" ] && [ -x "$(command -v docker-compose)" ]; then 173 | echo "$DOCKER_INFO_TAG Docker & Docker-compose already installed" 174 | else 175 | echo "$DOCKER_INFO_TAG Please innstall Docker & Docker-compose for $SYSTEM_KERNEL" 176 | exit 1 177 | fi 178 | fi --------------------------------------------------------------------------------