├── .env ├── LICENSE_HEADER ├── afm ├── __init__.py ├── filesystems │ ├── httpfs.py │ └── s3.py ├── pep │ ├── __init__.py │ ├── test_base.py │ ├── test_actions.py │ └── actions.py ├── command.py ├── worker.py ├── flight │ ├── auth_handlers.py │ └── flight.py ├── auth.py ├── ticket.py ├── config.py ├── auth_handlers │ └── auth_servers.py ├── environment │ └── environment.py ├── asset.py └── server.py ├── helm └── afm │ ├── Chart.yaml │ ├── templates │ ├── serviceaccount.yaml │ ├── configmap.yaml │ ├── service.yaml │ ├── NOTES.txt │ ├── _helpers.tpl │ └── deployment.yaml │ ├── .helmignore │ ├── values.sample.yaml │ ├── values.yaml │ └── files │ └── conf.yaml ├── .github ├── dco.yml ├── ISSUE_TEMPLATE │ └── config.yml └── workflows │ ├── test-module.yaml │ ├── pull-request.yml │ ├── push.yml │ └── codeql-analysis.yml ├── Makefile.env ├── hack ├── tag_value.sh ├── tools │ ├── requirements.sh │ ├── install_yq.sh │ ├── install_kubectl.sh │ ├── install_helm.sh │ └── common.sh ├── test-script │ ├── expected.txt │ ├── sample-policy.rego │ ├── fybrikapplication.yaml │ ├── Asset.yaml │ ├── test.py │ └── PS_20174392719_1491204439457_log.csv ├── make-rules │ ├── tools.mk │ └── helm.mk └── test_module.sh ├── sample ├── flight.yaml ├── README.md ├── sample.yaml ├── sample.py └── sample_put.py ├── RELEASE.md ├── Pipfile ├── Makefile ├── main.py ├── module.yaml ├── .gitignore ├── scripts └── gendata.py ├── README.md ├── LICENSE └── Pipfile.lock /.env: -------------------------------------------------------------------------------- 1 | DATA_DIR=/tmp 2 | -------------------------------------------------------------------------------- /LICENSE_HEADER: -------------------------------------------------------------------------------- 1 | Copyright ${years} ${owner}. 2 | SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /afm/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2020 IBM Corp. 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | -------------------------------------------------------------------------------- /helm/afm/Chart.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | description: A Helm chart for installing AFM in Kubernetes 3 | name: arrow-flight-module-chart 4 | version: "0.0.0" 5 | -------------------------------------------------------------------------------- /.github/dco.yml: -------------------------------------------------------------------------------- 1 | # This enables DCO bot for you, please take a look https://github.com/probot/dco 2 | # for more details. 3 | require: 4 | members: false 5 | -------------------------------------------------------------------------------- /Makefile.env: -------------------------------------------------------------------------------- 1 | export ROOT_DIR ?= . 2 | export TOOLS_DIR := $(ROOT_DIR)/hack/tools 3 | export TOOLBIN := $(TOOLS_DIR)/bin 4 | export ABSTOOLBIN := $(shell pwd)/$(TOOLS_DIR)/bin 5 | 6 | -------------------------------------------------------------------------------- /hack/tag_value.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # Copyright 2020 IBM Corp. 3 | # SPDX-License-Identifier: Apache-2.0 4 | 5 | : ${TOOLBIN:=./hack/tools/bin} 6 | 7 | ${TOOLBIN}/yq eval --inplace ".image.tag = \"$DOCKER_TAG\"" helm/afm/values.yaml 8 | -------------------------------------------------------------------------------- /hack/tools/requirements.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # Copyright 2020 The Kubernetes Authors. 3 | # SPDX-License-Identifier: Apache-2.0 4 | 5 | set -o nounset 6 | 7 | HELM_VERSION=v3.10.3 8 | KUBECTL_VERSION=v1.26.1 9 | YQ_VERSION=4.6.0 10 | -------------------------------------------------------------------------------- /helm/afm/templates/serviceaccount.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ServiceAccount 3 | metadata: 4 | name: {{ include "arrow-flight-module.fullname" . }} 5 | {{- if .Values.image.pullSecret }} 6 | imagePullSecrets: 7 | - name: {{ .Values.image.pullSecret }} 8 | {{- end }} 9 | -------------------------------------------------------------------------------- /afm/filesystems/httpfs.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2020 IBM Corp. 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | from fsspec.implementations.http import HTTPFileSystem 6 | from pyarrow.fs import PyFileSystem, FSSpecHandler 7 | 8 | def httpfs_from_config(): 9 | return PyFileSystem(FSSpecHandler(HTTPFileSystem())) 10 | -------------------------------------------------------------------------------- /helm/afm/templates/configmap.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ConfigMap 3 | metadata: 4 | name: {{ include "arrow-flight-module.fullname" . }} 5 | data: 6 | conf.yaml: |- 7 | {{- if .Values.config_override }} 8 | {{ .Values.config_override | indent 4}} 9 | {{- else }} 10 | {{ tpl ( .Files.Get "files/conf.yaml" ) . | indent 4 }} 11 | {{- end -}} 12 | -------------------------------------------------------------------------------- /afm/pep/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2020 IBM Corp. 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | from fybrik_python_transformation import Action, consolidate_actions, transform, transform_schema, transform_batches 6 | from .actions import Redact, RemoveColumns 7 | 8 | # registry is a map from action name to Action class 9 | registry = Action.registry 10 | -------------------------------------------------------------------------------- /afm/command.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2020 IBM Corp. 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | import json 6 | 7 | 8 | class AFMCommand: 9 | def __init__(self, cmd): 10 | self.raw = json.loads(cmd) 11 | 12 | @property 13 | def asset_name(self) -> str: 14 | return self.raw['asset'] 15 | 16 | @property 17 | def columns(self) -> list: 18 | return self.raw.get('columns', None) 19 | -------------------------------------------------------------------------------- /helm/afm/.helmignore: -------------------------------------------------------------------------------- 1 | # Patterns to ignore when building packages. 2 | # This supports shell glob matching, relative path matching, and 3 | # negation (prefixed with !). Only one pattern per line. 4 | .DS_Store 5 | # Common VCS dirs 6 | .git/ 7 | .gitignore 8 | .bzr/ 9 | .bzrignore 10 | .hg/ 11 | .hgignore 12 | .svn/ 13 | # Common backup files 14 | *.swp 15 | *.bak 16 | *.tmp 17 | *~ 18 | # Various IDEs 19 | .project 20 | .idea/ 21 | *.tmproj 22 | .vscode/ 23 | -------------------------------------------------------------------------------- /hack/test-script/expected.txt: -------------------------------------------------------------------------------- 1 | amount oldbalanceOrg 2 | 0 9839.64 XXXXX 3 | 1 1864.28 XXXXX 4 | 2 181.00 XXXXX 5 | 3 181.00 XXXXX 6 | 4 11668.14 XXXXX 7 | .. ... ... 8 | 95 710544.77 XXXXX 9 | 96 581294.26 XXXXX 10 | 97 11996.58 XXXXX 11 | 98 2875.10 XXXXX 12 | 99 8586.98 XXXXX 13 | 14 | [100 rows x 2 columns] 15 | -------------------------------------------------------------------------------- /hack/test-script/sample-policy.rego: -------------------------------------------------------------------------------- 1 | package dataapi.authz 2 | 3 | rule[{"action": {"name":"RedactAction", "columns": column_names}, "policy": description}] { 4 | description := "Redact columns tagged as PII in datasets tagged with finance = true" 5 | input.action.actionType == "read" 6 | input.resource.metadata.tags.finance 7 | column_names := [input.resource.metadata.columns[i].name | input.resource.metadata.columns[i].tags.PII] 8 | count(column_names) > 0 9 | } 10 | -------------------------------------------------------------------------------- /hack/test-script/fybrikapplication.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: app.fybrik.io/v1beta1 2 | kind: FybrikApplication 3 | metadata: 4 | name: my-notebook 5 | labels: 6 | app: my-notebook 7 | spec: 8 | selector: 9 | workloadSelector: 10 | matchLabels: 11 | app: my-notebook 12 | appInfo: 13 | intent: Fraud Detection 14 | data: 15 | - dataSetID: "fybrik-notebook-sample/paysim-csv" 16 | requirements: 17 | interface: 18 | protocol: fybrik-arrow-flight 19 | -------------------------------------------------------------------------------- /hack/tools/install_yq.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # Copyright 2020 The Kubernetes Authors. 3 | # SPDX-License-Identifier: Apache-2.0 4 | 5 | source ./common.sh 6 | 7 | 8 | header_text "Checking for bin/yq ${YQ_VERSION}" 9 | [[ -f bin/yq && `bin/yq --version | awk '{print $3}'` == ${YQ_VERSION} ]] && exit 0 10 | 11 | header_text "Installing bin/yq ${YQ_VERSION}" 12 | mkdir -p ./bin 13 | curl -L https://github.com/mikefarah/yq/releases/download/v${YQ_VERSION}/yq_${os}_${arch} -o bin/yq 14 | chmod +x bin/yq 15 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | blank_issues_enabled: true 2 | contact_links: 3 | - name: Ask a Question 4 | url: https://github.com/fybrik/arrow-flight-module/discussions/new?category=q-a 5 | about: Ask the community for help 6 | - name: Feature Request 7 | url: https://github.com/fybrik/arrow-flight-module/discussions/new?category=ideas 8 | about: Share ideas for new features 9 | - name: Bug Report 10 | url: https://github.com/fybrik/arrow-flight-module/issues/new 11 | about: Report a reproducable bug 12 | -------------------------------------------------------------------------------- /hack/tools/install_kubectl.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # Copyright 2020 The Kubernetes Authors. 3 | # SPDX-License-Identifier: Apache-2.0 4 | 5 | source ./common.sh 6 | 7 | header_text "Checking for bin/kubectl ${KUBECTL_VERSION}" 8 | [[ -f bin/kubectl && `bin/kubectl version -o=yaml 2> /dev/null | bin/yq e '.clientVersion.gitVersion' -` == "v${KUBECTL_VERSION}" ]] && exit 0 9 | 10 | header_text "Installing bin/kubectl ${KUBECTL_VERSION}" 11 | 12 | mkdir -p ./bin 13 | curl -L https://dl.k8s.io/release/${KUBECTL_VERSION}/bin/linux/amd64/kubectl -o bin/kubectl 14 | 15 | chmod +x bin/kubectl 16 | -------------------------------------------------------------------------------- /sample/flight.yaml: -------------------------------------------------------------------------------- 1 | data: 2 | - name: "data.csv" 3 | format: csv 4 | path: "/home/cdoron/pilot_arrow_client/docker/data.csv" 5 | connection: 6 | type: localfs 7 | - name: "nyc-taxi.parquet" 8 | connection: 9 | type: flight 10 | flight: 11 | flight_command: '{"asset": "data.csv"}' 12 | endpoint_url: "localhost" 13 | port: "9000" 14 | transformations: 15 | - action: "Redact" 16 | description: "redact stuff" 17 | columns: 18 | - dropoff_at 19 | options: 20 | redactValue: "XXXXX" 21 | -------------------------------------------------------------------------------- /hack/make-rules/tools.mk: -------------------------------------------------------------------------------- 1 | INSTALL_TOOLS += $(TOOLBIN)/yq 2 | $(TOOLBIN)/yq: 3 | cd $(TOOLS_DIR); ./install_yq.sh 4 | 5 | INSTALL_TOOLS += $(TOOLBIN)/helm 6 | $(TOOLBIN)/helm: 7 | cd $(TOOLS_DIR); ./install_helm.sh 8 | 9 | INSTALL_TOOLS += $(TOOLBIN)/kind 10 | $(TOOLBIN)/kind: 11 | GOBIN=$(ABSTOOLBIN) go install sigs.k8s.io/kind@v0.17.0 12 | 13 | INSTALL_TOOLS += $(TOOLBIN)/kubectl 14 | $(TOOLBIN)/kubectl: 15 | cd $(TOOLS_DIR); ./install_kubectl.sh 16 | 17 | .PHONY: install-tools 18 | install-tools: $(INSTALL_TOOLS) 19 | 20 | .PHONY: uninstall-tools 21 | uninstall-tools: 22 | rm -rf $(INSTALL_TOOLS) 23 | 24 | 25 | 26 | -------------------------------------------------------------------------------- /RELEASE.md: -------------------------------------------------------------------------------- 1 | # Release Process 2 | 3 | The process of creating a release is described in this document. Replace `X.Y.Z` with the version to be released. 4 | 5 | ## 1. Create a [new release](https://github.com/fybrik/arrow-flight-module/releases/new) 6 | 7 | - Use `vX.Y.Z` tag and set `master` as the target. 8 | - Update the tags `spec.chart.values.image.tag` and `spec.chart.name` in module.yaml file to be `X.Y.Z` and attach the file to the release. 9 | - Update `Version compatibility matrix` section in README.md if needed. 10 | 11 | Ensure that the release notes explicitly mention upgrade instructions and any breaking change. 12 | -------------------------------------------------------------------------------- /hack/tools/install_helm.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # Copyright 2020 The Kubernetes Authors. 3 | # SPDX-License-Identifier: Apache-2.0 4 | 5 | 6 | source ./common.sh 7 | 8 | header_text "Checking for bin/helm ${HELM_VERSION}" 9 | [[ -f bin/helm && `bin/helm version --template='{{.Version}}'` == ${HELM_VERSION} ]] && exit 0 10 | 11 | header_text "Installing bin/helm ${HELM_VERSION}" 12 | 13 | mkdir -p ./bin 14 | curl -fsSL -o get_helm.sh https://raw.githubusercontent.com/helm/helm/master/scripts/get-helm-3 15 | chmod 700 get_helm.sh 16 | HELM_INSTALL_DIR=bin ./get_helm.sh -v ${HELM_VERSION} --no-sudo 17 | rm -rf get_helm.sh 18 | 19 | -------------------------------------------------------------------------------- /helm/afm/templates/service.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | name: {{ .Release.Name }} 5 | labels: 6 | app.kubernetes.io/name: {{ include "arrow-flight-module.name" . }} 7 | helm.sh/chart: {{ include "arrow-flight-module.chart" . }} 8 | app.kubernetes.io/instance: {{ .Release.Name }} 9 | app.kubernetes.io/managed-by: {{ .Release.Service }} 10 | spec: 11 | type: {{ .Values.service.type }} 12 | ports: 13 | - port: {{ .Values.service.port }} 14 | targetPort: 8080 15 | protocol: TCP 16 | name: grpc 17 | selector: 18 | app.kubernetes.io/name: {{ include "arrow-flight-module.name" . }} 19 | app.kubernetes.io/instance: {{ .Release.Name }} 20 | -------------------------------------------------------------------------------- /Pipfile: -------------------------------------------------------------------------------- 1 | [[source]] 2 | name = "pypi" 3 | url = "https://pypi.org/simple" 4 | verify_ssl = true 5 | 6 | [scripts] 7 | server = "python main.py" 8 | addlicense = "licenseheaders -y 2020 -o \"IBM Corp\" -d . --ext .py -t ./LICENSE_HEADER" 9 | 10 | [dev-packages] 11 | pytest = "*" 12 | licenseheaders = "*" 13 | pylint = "*" 14 | 15 | [packages] 16 | pandas = "==1.4.2" 17 | pyarrow = "==8.0.0" 18 | requests = "*" 19 | fsspec = "==0.8.4" 20 | aiohttp = "==3.10.2" 21 | pyyaml = "==6.0.1" 22 | faker = "==13.3.0" 23 | fybrik_python_logging = "==0.1.0" 24 | fybrik_python_tls = "==0.1.0" 25 | fybrik_python_vault = "==0.2.0" 26 | fybrik_python_transformation = "==0.2.1" 27 | 28 | [requires] 29 | python_version = "3.8" 30 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | include Makefile.env 2 | 3 | DOCKER_TAG ?= 0.0.0 4 | 5 | DOCKER_NAME ?= arrow-flight-module 6 | 7 | IMG := ${DOCKER_HOSTNAME}/${DOCKER_NAMESPACE}/${DOCKER_NAME}:${DOCKER_TAG} 8 | 9 | export HELM_EXPERIMENTAL_OCI=1 10 | 11 | all: test build 12 | 13 | .PHONY: test 14 | test: 15 | pipenv run python -m unittest discover 16 | 17 | .PHONY: build 18 | build: 19 | pipenv requirements > requirements.txt 20 | docker build -f build/Dockerfile . -t ${IMG} 21 | rm requirements.txt 22 | 23 | .PHONY: docker-push 24 | docker-push: 25 | docker push ${IMG} 26 | 27 | .PHONY: push-to-kind 28 | push-to-kind: 29 | kind load docker-image ${IMG} 30 | 31 | include hack/make-rules/helm.mk 32 | include hack/make-rules/tools.mk 33 | 34 | -------------------------------------------------------------------------------- /sample/README.md: -------------------------------------------------------------------------------- 1 | # Sample 2 | 3 | This sample is for trying AFM locally without Kubernetes. 4 | It shows a server configured to serve [The New York City taxi trip record data](https://www1.nyc.gov/site/tlc/about/tlc-trip-record-data.page). 5 | 6 | ## Steps 7 | 8 | 1. Run the server with 9 | ```bash 10 | pipenv run server --config sample/sample.yaml 11 | ``` 12 | 1. Run a sample client with 13 | ```bash 14 | pipenv run python sample/sample.py --username qqq --password moo 15 | ``` 16 | 1. Write a parquet dataset to /tmp/new-dataset. The dataset consists of a single column with the numbers 0 to 10239 17 | ```bash 18 | pipenv run python sample/sample_put.py --username qqq --password moo 19 | ``` 20 | -------------------------------------------------------------------------------- /afm/worker.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2020 IBM Corp. 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | 6 | class Worker: 7 | def __init__(self, config: dict): 8 | self._name = config.get("name") 9 | self._address = config.get("address") 10 | self._port = config.get("port") 11 | 12 | @property 13 | def name(self): 14 | return self._name 15 | 16 | @property 17 | def address(self): 18 | return self._address 19 | 20 | @property 21 | def port(self): 22 | return self._port 23 | 24 | @property 25 | def path(self): 26 | return self._path 27 | 28 | def workers_from_config(workers_list: list): 29 | workers = [] 30 | for w in workers_list: 31 | workers.append(Worker(w)) 32 | return workers 33 | -------------------------------------------------------------------------------- /afm/flight/auth_handlers.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2020 IBM Corp. 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | 6 | from pyarrow import flight 7 | 8 | # taken from https://github.com/apache/arrow/blob/master/python/pyarrow/tests/test_flight.py#L450 9 | class HttpBasicClientAuthHandler(flight.ClientAuthHandler): 10 | """An example implementation of HTTP basic authentication.""" 11 | 12 | def __init__(self, username, password): 13 | super().__init__() 14 | self.basic_auth = flight.BasicAuth(username, password) 15 | self.token = None 16 | 17 | def authenticate(self, outgoing, incoming): 18 | auth = self.basic_auth.serialize() 19 | outgoing.write(auth) 20 | self.token = incoming.read() 21 | 22 | def get_token(self): 23 | return self.token 24 | -------------------------------------------------------------------------------- /hack/test-script/Asset.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: katalog.fybrik.io/v1alpha1 2 | kind: Asset 3 | metadata: 4 | name: paysim-csv 5 | spec: 6 | secretRef: 7 | name: paysim-csv 8 | details: 9 | dataFormat: csv 10 | connection: 11 | name: s3 12 | s3: 13 | endpoint: "http://localstack.fybrik-notebook-sample.svc.cluster.local:4566" 14 | bucket: "demo" 15 | object_key: "PS_20174392719_1491204439457_log.csv" 16 | metadata: 17 | name: Synthetic Financial Datasets For Fraud Detection 18 | geography: theshire 19 | tags: 20 | finance: true 21 | columns: 22 | - name: nameOrig 23 | tags: 24 | PII: true 25 | - name: oldbalanceOrg 26 | tags: 27 | PII: true 28 | - name: newbalanceOrig 29 | tags: 30 | PII: true 31 | -------------------------------------------------------------------------------- /main.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2020 IBM Corp. 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | import argparse 6 | from afm.server import AFMFlightServer 7 | from fybrik_python_logging import logger 8 | 9 | if __name__ == '__main__': 10 | parser = argparse.ArgumentParser(description='AFM Flight Server') 11 | parser.add_argument( 12 | '-p', '--port', type=int, default=8080, help='Listening port') 13 | parser.add_argument( 14 | '-c', '--config', type=str, default='/etc/conf/conf.yaml', help='Path to config file') 15 | parser.add_argument( 16 | '-l', '--loglevel', type=str, default='warning', help='logging level', 17 | choices=['trace', 'info', 'debug', 'warning', 'error', 'critical']) 18 | args = parser.parse_args() 19 | 20 | server = AFMFlightServer(args.config, args.port, args.loglevel.upper()) 21 | logger.info('AFMFlightServer started') 22 | server.serve() 23 | -------------------------------------------------------------------------------- /hack/tools/common.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # Copyright 2020 The Kubernetes Authors. 3 | # SPDX-License-Identifier: Apache-2.0 4 | 5 | 6 | set -o errexit 7 | set -o nounset 8 | set -o pipefail 9 | 10 | ROOT_DIR=../.. 11 | 12 | source ${ROOT_DIR}/hack/tools/requirements.sh 13 | 14 | arch=amd64 15 | os="unknown" 16 | 17 | if [[ "$OSTYPE" == "linux-gnu" ]]; then 18 | os="linux" 19 | elif [[ "$OSTYPE" == "darwin"* ]]; then 20 | os="darwin" 21 | fi 22 | 23 | if [[ "$os" == "unknown" ]]; then 24 | echo "OS '$OSTYPE' not supported. Aborting." >&2 25 | exit 1 26 | fi 27 | 28 | # Turn colors in this script off by setting the NO_COLOR variable in your 29 | # environment to any value: 30 | # 31 | # $ NO_COLOR=1 test.sh 32 | NO_COLOR=${NO_COLOR:-""} 33 | if [ -z "$NO_COLOR" ]; then 34 | header=$'\e[1;33m' 35 | reset=$'\e[0m' 36 | else 37 | header='' 38 | reset='' 39 | fi 40 | 41 | function header_text { 42 | echo "$header$*$reset" 43 | } 44 | 45 | export PATH=$(pwd)/bin:${PATH} 46 | -------------------------------------------------------------------------------- /hack/test-script/test.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2020 IBM Corp. 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | 6 | 7 | 8 | import json 9 | import pyarrow.flight as fl 10 | import pandas as pd 11 | import argparse 12 | 13 | parser = argparse.ArgumentParser() 14 | parser.add_argument("-e", "--endpoint", required=True) 15 | parser.add_argument("-p", "--port", required=True) 16 | args = parser.parse_args() 17 | # Create a Flight client 18 | client = fl.connect("grpc://{}:{}".format(args.endpoint, args.port)) 19 | 20 | # Prepare the request 21 | request = { 22 | "asset": "fybrik-notebook-sample/paysim-csv", 23 | # To request specific columns add to the request a "columns" key with a list of column names 24 | "columns": ["amount", "oldbalanceOrg"] 25 | } 26 | 27 | # Send request and fetch result as a pandas DataFrame 28 | info = client.get_flight_info(fl.FlightDescriptor.for_command(json.dumps(request))) 29 | reader: fl.FlightStreamReader = client.do_get(info.endpoints[0].ticket) 30 | df: pd.DataFrame = reader.read_pandas() 31 | print(df) 32 | 33 | -------------------------------------------------------------------------------- /helm/afm/templates/NOTES.txt: -------------------------------------------------------------------------------- 1 | {{- if .Values.assets -}} 2 | # Example usage in Python 3 | 4 | ```python 5 | import json 6 | import pyarrow.flight as fl 7 | import pandas as pd 8 | 9 | # Create a Flight client 10 | client = fl.connect("grpc://{{ .Release.Name }}.{{ .Release.Namespace }}.svc.cluster.local:80") 11 | 12 | # Prepare the request 13 | request = { 14 | {{- if gt (len .Values.assets) 1 }} 15 | # You can request any of the following assets: 16 | {{- range .Values.assets }} 17 | # - {{ .assetID | quote }} 18 | {{- end }} 19 | {{- end }} 20 | "asset": {{ (index .Values.assets 0).assetID | quote }}, 21 | # To request specific columns add to the request a "columns" key with a list of column names 22 | # "columns": [...] 23 | } 24 | 25 | # Send request and fetch result as a pandas DataFrame 26 | info = client.get_flight_info(fl.FlightDescriptor.for_command(json.dumps(request))) 27 | reader: fl.FlightStreamReader = client.do_get(info.endpoints[0].ticket) 28 | df: pd.DataFrame = reader.read_pandas() 29 | ``` 30 | {{- end -}} 31 | -------------------------------------------------------------------------------- /afm/auth.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2020 IBM Corp. 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | from fybrik_python_logging import logger 6 | from pyarrow import flight 7 | from pyarrow.flight import ServerAuthHandler 8 | from .auth_handlers.auth_servers import NoopAuthHandler, HttpBasicServerAuthHandler 9 | 10 | class AFMAuthHandler(ServerAuthHandler): 11 | def __init__(self, auth_config): 12 | super().__init__() 13 | if not auth_config: 14 | logger.info("no authentication configuration. Using NoopAuthHandler") 15 | self.auth_handler = NoopAuthHandler() 16 | elif 'basic' in auth_config: 17 | logger.info("basic authentication configuration. Using HttpBasicServerAuthHandler") 18 | self.auth_handler = HttpBasicServerAuthHandler(auth_config['basic'].get('credentials', None)) 19 | else: 20 | raise NotImplementedError("Unknown authenticaion type") 21 | 22 | def authenticate(self, outgoing, incoming): 23 | self.auth_handler.authenticate(outgoing, incoming) 24 | 25 | def is_valid(self, token): 26 | return self.auth_handler.is_valid(token) 27 | -------------------------------------------------------------------------------- /afm/ticket.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2020 IBM Corp. 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | import json 6 | 7 | 8 | # TODO: replace with a real ticket and not just a copy of command 9 | class AFMTicket: 10 | def __init__(self, asset_name, columns, flight_ticket=None, partition_path=None): 11 | self._asset_name = asset_name 12 | self._columns = columns 13 | self._flight_ticket = flight_ticket 14 | self._partition_path = partition_path 15 | 16 | @staticmethod 17 | def fromJSON(raw): 18 | return AFMTicket(**json.loads(raw)) 19 | 20 | def toJSON(self): 21 | return json.dumps({ 22 | "asset_name": self.asset_name, 23 | "columns": self.columns, 24 | "flight_ticket": self.flight_ticket, 25 | "partition_path": self.partition_path, 26 | }) 27 | 28 | @property 29 | def asset_name(self) -> str: 30 | return self._asset_name 31 | 32 | @property 33 | def columns(self) -> list: 34 | return self._columns 35 | 36 | @property 37 | def flight_ticket(self) -> str: 38 | return self._flight_ticket 39 | 40 | @property 41 | def partition_path(self) -> str: 42 | return self._partition_path 43 | -------------------------------------------------------------------------------- /afm/pep/test_base.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2020 IBM Corp. 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | import unittest 6 | from fybrik_python_transformation import Action, consolidate_actions, action_key 7 | 8 | class ActionOne(Action): 9 | pass 10 | 11 | class ActionTwo(Action): 12 | pass 13 | 14 | class TestBase(unittest.TestCase): 15 | 16 | def test_consolidate_actions(self): 17 | actions = [ 18 | ActionOne("1.1", columns=["gender"], options={"redactValue": "XXX"}), 19 | ActionTwo("2.1", columns=["gender", "weight"], options={}), 20 | ActionOne("1.2", columns=["age"], options={"redactValue": "XXX"}), 21 | ActionOne("1.3", columns=["weight"], options={"redactValue": "YYYY"}) 22 | ] 23 | actual = consolidate_actions(actions) 24 | expected = [ 25 | ActionOne("1.1", columns=["gender", "age"], options={"redactValue": "XXX"}), 26 | ActionTwo("2.1", columns=["gender", "weight"], options={}), 27 | ActionOne("1.3", columns=["weight"], options={"redactValue": "YYYY"}) 28 | ] 29 | self.assertEqual(str(sorted(actual, key=action_key)), str(sorted(expected, key=action_key))) 30 | 31 | self.assertEqual([], consolidate_actions([])) 32 | 33 | 34 | if __name__ == '__main__': 35 | unittest.main() 36 | -------------------------------------------------------------------------------- /.github/workflows/test-module.yaml: -------------------------------------------------------------------------------- 1 | name: Test 2 | on: 3 | workflow_dispatch: 4 | inputs: 5 | kind-version: 6 | required: true 7 | default: 'kind25' 8 | type: choice 9 | options: 10 | - kind23 11 | - kind24 12 | - kind25 13 | fybrik-version: 14 | required: true 15 | default: '1.2.1' 16 | module-version: 17 | required: true 18 | default: '0.10.0' 19 | cert-manager-version: 20 | required: true 21 | default: '1.6.2' 22 | 23 | env: 24 | GO_VERSION: 1.19 25 | AWS_PAGER: "" 26 | AWS_EC2_METADATA_DISABLED: true 27 | 28 | jobs: 29 | test-module: 30 | runs-on: ubuntu-latest 31 | steps: 32 | - uses: actions/checkout@v3 33 | with: 34 | fetch-depth: 0 35 | - name: Install Go 36 | uses: actions/setup-go@v3 37 | with: 38 | go-version: ${{ env.GO_VERSION }} 39 | - name: Install tools 40 | run: make install-tools 41 | - name: Install AWS cli 42 | uses: unfor19/install-aws-cli-action@v1 43 | with: 44 | version: 2 # default 45 | verbose: false # default 46 | - name: run-test 47 | run: pushd hack && ./test_module.sh ${{ github.event.inputs.kind-version }} ${{ github.event.inputs.fybrik-version }} ${{ github.event.inputs.module-version }} ${{ github.event.inputs.cert-manager-version }} 48 | 49 | -------------------------------------------------------------------------------- /.github/workflows/pull-request.yml: -------------------------------------------------------------------------------- 1 | name: Pull-request 2 | 3 | on: 4 | pull_request: 5 | branches: 6 | - master 7 | 8 | env: 9 | DOCKER_HOSTNAME: ghcr.io 10 | DOCKER_NAMESPACE: fybrik 11 | DOCKER_USERNAME: ${{ github.actor }} 12 | AWS_PAGER: "" 13 | AWS_EC2_METADATA_DISABLED: true 14 | GO_VERSION: 1.19 15 | 16 | jobs: 17 | build: 18 | name: Build 19 | runs-on: ubuntu-latest 20 | env: 21 | kubectlVersion: 'kind25' 22 | fybrikVersion: 'master' 23 | moduleVersion: 'master' 24 | certManagerVersion: '1.6.2' 25 | steps: 26 | - uses: actions/checkout@v3 27 | - name: Install Go 28 | uses: actions/setup-go@v3 29 | with: 30 | go-version: ${{ env.GO_VERSION }} 31 | - name: Set up Python 32 | uses: actions/setup-python@v3 33 | with: 34 | python-version: '3.x' 35 | - name: Install dependencies 36 | run: | 37 | python -m pip install --upgrade pip 38 | pip install setuptools wheel twine 39 | pip install pipenv 40 | - name: Install tools 41 | run: make install-tools 42 | - name: Install AWS cli 43 | uses: unfor19/install-aws-cli-action@v1 44 | with: 45 | version: 2 # default 46 | verbose: false # default 47 | - name: run-test 48 | run: pushd hack && ./test_module.sh $kubectlVersion $fybrikVersion $moduleVersion $certManagerVersion 49 | - name: Build docker image 50 | run: make build 51 | -------------------------------------------------------------------------------- /sample/sample.yaml: -------------------------------------------------------------------------------- 1 | auth: 2 | basic: 3 | credentials: 4 | qqq: "moo" 5 | user: "password" 6 | workers: 7 | - name: "main" 8 | address: "localhost" 9 | port: 8080 10 | data: 11 | - name: "new-dataset" 12 | capability: "read" 13 | format: parquet 14 | path: "/tmp/new-dataset" 15 | connection: 16 | type: localfs 17 | transformations: 18 | - action: "Filter" 19 | description: "filtering based on country" 20 | options: 21 | query: "Country == 'Israel' or Country == 'United Kingdom'" 22 | - action: "Redact" 23 | description: "redact stuff" 24 | columns: 25 | - Date of Birth 26 | options: 27 | redactValue: "XXXXX" 28 | - name: "new-dataset" 29 | capability: "write" 30 | format: parquet 31 | path: "/tmp/new-dataset" 32 | connection: 33 | type: localfs 34 | transformations: 35 | - action: "AgeFilter" 36 | description: "filtering based on age" 37 | columns: 38 | - Date of Birth 39 | options: 40 | age: 40 41 | - name: "nyc-taxi.parquet" 42 | capability: "read" 43 | format: parquet 44 | path: "ursa-labs-taxi-data/2019" 45 | connection: 46 | type: s3 47 | s3: 48 | endpoint_url: "https://s3.us-east-2.amazonaws.com" 49 | transformations: 50 | - action: "Redact" 51 | description: "redact stuff" 52 | columns: 53 | - payment_type 54 | options: 55 | redactValue: "XXXXX" 56 | -------------------------------------------------------------------------------- /helm/afm/values.sample.yaml: -------------------------------------------------------------------------------- 1 | # Try it with `helm install --generate-name --dry-run -f helm/afm/values.sample.yaml helm/afm` 2 | labels: 3 | app.fybrik.io/app-name: fybrik-flight-read 4 | uuid: 12345678 5 | assets: 6 | - args: 7 | - connection: 8 | name: s3 9 | s3: 10 | bucket: fybrik-test-bucket 11 | endpoint: s3.eu-gb.cloud-object-storage.appdomain.cloud 12 | object_key: test1.parquet 13 | region: theshire 14 | format: parquet 15 | vault: 16 | read: 17 | address: http://vault.fybrik-system:8200 18 | authPath: /v1/auth/kubernetes/login 19 | role: module 20 | secretPath: /v1/kubernetes-secrets/data-creds?namespace=fybrik-notebook-sample 21 | assetID: "test1" 22 | capability: read 23 | transformations: 24 | - name: "RedactAction" 25 | RedactAction: 26 | columns: 27 | - col1 28 | - col2 29 | - name: "RemoveAction" 30 | RemoveAction: 31 | columns: 32 | - col1 33 | - col2 34 | - args: 35 | - connection: 36 | name: s3 37 | s3: 38 | bucket: fybrik-test-bucket 39 | endpoint: s3.eu-gb.cloud-object-storage.appdomain.cloud 40 | object_key: test2.parquet 41 | format: parquet 42 | vault: 43 | write: 44 | address: http://vault.fybrik-system:8200 45 | authPath: /v1/auth/kubernetes/login 46 | role: module 47 | secretPath: /v1/kubernetes-secrets/data-creds?namespace=fybrik-notebook-sample 48 | assetID: "test2" 49 | capability: write 50 | transformations: 51 | - name: "RedactAction" 52 | RedactAction: 53 | columns: 54 | - col1 55 | - col2 56 | -------------------------------------------------------------------------------- /afm/config.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2020 IBM Corp. 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | import sys 6 | import yaml 7 | 8 | 9 | class Config: 10 | def __init__(self, config_path): 11 | # TODO: change to schemed yaml using schemed-yaml-config 12 | with open(config_path, 'r') as stream: 13 | self.values = yaml.safe_load(stream) 14 | if self.plugin_dir and self.plugin_dir not in sys.path: 15 | sys.path.append(self.plugin_dir) 16 | 17 | def for_asset(self, asset_name: str, capability="") -> dict: 18 | for asset_info in self.values.get('data', []): 19 | if asset_info['name'] == asset_name and (capability == "" or asset_info['capability'] == capability): 20 | return asset_info 21 | raise ValueError( 22 | "Requested config for undefined asset: {}".format(asset_name)) 23 | 24 | @property 25 | def plugin_dir(self) -> str: 26 | return self.values.get('plugin_dir', None) 27 | 28 | @property 29 | def app_uuid(self) -> str: 30 | return self.values.get('app-uuid', '') 31 | 32 | @property 33 | def workers(self) -> list: 34 | return self.values.get('workers', []) 35 | 36 | @property 37 | def auth(self) -> dict: 38 | return self.values.get('auth', {}) 39 | 40 | def __enter__(self): 41 | return self 42 | 43 | def __exit__(self, exc_type, exc_val, exc_tb): 44 | pass 45 | 46 | def connection_type(self, asset_name: str, capability="") -> str: 47 | asset_info = self.for_asset(asset_name, capability) 48 | if 'connection' in asset_info: 49 | return asset_info['connection'].get('type') 50 | return None 51 | -------------------------------------------------------------------------------- /afm/flight/flight.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2020 IBM Corp. 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | 6 | import pyarrow.flight as fl 7 | import json 8 | 9 | from pyarrow.flight import FlightEndpoint, Ticket 10 | from .auth_handlers import HttpBasicClientAuthHandler 11 | 12 | class Flight: 13 | def __init__(self, endpoint, port, flight_command, auth_handler): 14 | self.flight_client = fl.connect("grpc://{}:{}".format(endpoint, port)) 15 | if auth_handler: 16 | self.flight_client.authenticate(auth_handler) 17 | self.flight_command = flight_command 18 | 19 | def batches(self, reader): 20 | for chunk in reader: 21 | yield chunk.data 22 | 23 | def get_flight_info(self): 24 | return self.flight_client.get_flight_info(fl.FlightDescriptor.for_command(self.flight_command)) 25 | 26 | def do_get(self, context, ticket): 27 | ticket_dict = json.loads(ticket.ticket.decode()) 28 | flight_stream_reader = self.flight_client.do_get(Ticket(ticket_dict["flight_ticket"])) 29 | return flight_stream_reader.schema, self.batches(flight_stream_reader) 30 | 31 | def get_auth_handler(auth_config): 32 | if 'basic' in auth_config: 33 | return HttpBasicClientAuthHandler( 34 | auth_config['basic'].get('user', None), 35 | auth_config['basic'].get('password', None)) 36 | return None 37 | 38 | def flight_from_config(flight_config): 39 | endpoint = flight_config.get('endpoint_url') 40 | port = flight_config.get('port') 41 | flight_command = flight_config.get('flight_command') 42 | auth_handler = get_auth_handler(flight_config.get('auth', {})) 43 | return Flight(endpoint, port, flight_command, auth_handler) 44 | -------------------------------------------------------------------------------- /module.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: app.fybrik.io/v1beta1 2 | kind: FybrikModule 3 | metadata: 4 | name: arrow-flight-module 5 | labels: 6 | name: arrow-flight-module 7 | version: latest # semantic version 8 | spec: 9 | type: service 10 | chart: 11 | name: ghcr.io/fybrik/arrow-flight-module-chart:0.0.0-master 12 | values: 13 | image.tag: master 14 | capabilities: 15 | - capability: read 16 | scope: workload 17 | api: 18 | connection: 19 | name: fybrik-arrow-flight 20 | fybrik-arrow-flight: 21 | hostname: "{{ .Release.Name }}.{{ .Release.Namespace }}" 22 | port: 80 23 | scheme: grpc 24 | supportedInterfaces: 25 | - source: 26 | protocol: s3 27 | dataformat: parquet 28 | - source: 29 | protocol: s3 30 | dataformat: csv 31 | - source: 32 | protocol: fybrik-arrow-flight 33 | actions: 34 | - name: RedactAction 35 | - name: RemoveAction 36 | - name: FilterAction 37 | - name: AgeFilterAction 38 | - capability: write 39 | scope: workload 40 | api: 41 | connection: 42 | name: fybrik-arrow-flight 43 | fybrik-arrow-flight: 44 | hostname: "{{ .Release.Name }}.{{ .Release.Namespace }}" 45 | port: 80 46 | scheme: grpc 47 | supportedInterfaces: 48 | - sink: 49 | protocol: s3 50 | dataformat: parquet 51 | - sink: 52 | protocol: s3 53 | dataformat: csv 54 | actions: 55 | - name: RedactAction 56 | - name: RemoveAction 57 | - name: FilterAction 58 | - name: AgeFilterAction 59 | -------------------------------------------------------------------------------- /afm/auth_handlers/auth_servers.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2020 IBM Corp. 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | from fybrik_python_logging import logger 6 | from pyarrow import flight 7 | from pyarrow.flight import ServerAuthHandler 8 | 9 | # taken from https://github.com/apache/arrow/blob/master/python/pyarrow/tests/test_flight.py#L508 10 | class NoopAuthHandler(ServerAuthHandler): 11 | """A no-op auth handler.""" 12 | 13 | def authenticate(self, outgoing, incoming): 14 | outgoing.write("") 15 | """Do nothing.""" 16 | 17 | def is_valid(self, token): 18 | """ 19 | Returning an empty string. 20 | Returning None causes Type error. 21 | """ 22 | return "" 23 | 24 | # taken from https://github.com/apache/arrow/blob/master/python/pyarrow/tests/test_flight.py#L426 25 | class HttpBasicServerAuthHandler(ServerAuthHandler): 26 | """An example implementation of HTTP basic authentication.""" 27 | 28 | def __init__(self, creds): 29 | super().__init__() 30 | self.creds = creds 31 | 32 | def authenticate(self, outgoing, incoming): 33 | buf = incoming.read() 34 | auth = flight.BasicAuth.deserialize(buf) 35 | logger.info('basic authentication', 36 | extra={'username': auth.username.decode()}) 37 | if auth.username.decode() not in self.creds: 38 | raise flight.FlightUnauthenticatedError("unknown user") 39 | if self.creds[auth.username.decode()] != auth.password.decode(): 40 | raise flight.FlightUnauthenticatedError("wrong password") 41 | outgoing.write(auth.username) 42 | 43 | def is_valid(self, token): 44 | if not token: 45 | raise flight.FlightUnauthenticatedError("token not provided") 46 | if token.decode() not in self.creds: 47 | raise flight.FlightUnauthenticatedError("unknown user") 48 | return token 49 | -------------------------------------------------------------------------------- /afm/environment/environment.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2020 IBM Corp. 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | 6 | import os 7 | import ssl 8 | from os.path import exists 9 | from fybrik_python_logging import logger 10 | 11 | 12 | # constants 13 | MIN_TLS_VERSION = "MIN_TLS_VERSION" 14 | DATA_DIR = "DATA_DIR" 15 | 16 | CERT_FILE_PATH = os.environ.get(DATA_DIR) + "/tls-cert/tls.crt" 17 | CERT_FILE_KEY = os.environ.get(DATA_DIR) + "/tls-cert/tls.key" 18 | CACERTS_FILE = os.environ.get(DATA_DIR) + "/tls-cacert/ca.crt" 19 | 20 | def get_env_var_value(name): 21 | return os.environ.get(name) 22 | 23 | def print_env_vars(): 24 | data_dir = os.environ.get(DATA_DIR) 25 | if data_dir: 26 | logger.trace("DATA_DIR: " + data_dir) 27 | min_tls_version = os.environ.get(MIN_TLS_VERSION) 28 | if min_tls_version: 29 | logger.trace("MIN_TLS_VERSION: " + min_tls_version) 30 | 31 | def get_min_tls_version(): 32 | min_version = os.environ.get(MIN_TLS_VERSION) 33 | # ref: https://docs.python.org/3/library/ssl.html#ssl.TLSVersion.MINIMUM_SUPPORTED 34 | rv = None 35 | 36 | if min_version == "SSL-3": 37 | rv = ssl.TLSVersion.SSLv3 38 | elif min_version == "TLS-1": 39 | rv = ssl.TLSVersion.TLSv1 40 | elif min_version == "TLS-1.1": 41 | rv = ssl.TLSVersion.TLSv1_1 42 | elif min_version == "TLS-1.2": 43 | rv = ssl.TLSVersion.TLSv1_2 44 | elif min_version == "TLS-1.3": 45 | rv = ssl.TLSVersion.TLSv1_3 46 | else: 47 | logger.trace('MinTLSVersion is set to the system default value') 48 | return rv 49 | 50 | logger.trace("MinTLSVersion is set to " + min_version) 51 | return rv 52 | 53 | def get_certs(): 54 | """ 55 | returns the private key and certificate if such provided. 56 | """ 57 | if not exists(CERT_FILE_PATH) or not exists(CERT_FILE_KEY): 58 | return () 59 | return (CERT_FILE_PATH, CERT_FILE_KEY) 60 | 61 | def get_cacert_path(): 62 | """ 63 | returns the CA certificate file if such provided. 64 | """ 65 | if exists(CACERTS_FILE): 66 | return CACERTS_FILE 67 | return "" 68 | -------------------------------------------------------------------------------- /helm/afm/templates/_helpers.tpl: -------------------------------------------------------------------------------- 1 | {{/* vim: set filetype=mustache: */}} 2 | {{/* 3 | Expand the name of the chart. 4 | */}} 5 | {{- define "arrow-flight-module.name" -}} 6 | {{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" -}} 7 | {{- end -}} 8 | 9 | {{/* 10 | Create a default fully qualified app name. 11 | We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). 12 | If release name contains chart name it will be used as a full name. 13 | */}} 14 | {{- define "arrow-flight-module.fullname" -}} 15 | {{- if .Values.fullnameOverride -}} 16 | {{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" -}} 17 | {{- else -}} 18 | {{- $name := default .Chart.Name .Values.nameOverride -}} 19 | {{- if contains $name .Release.Name -}} 20 | {{- .Release.Name | trunc 63 | trimSuffix "-" -}} 21 | {{- else -}} 22 | {{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" -}} 23 | {{- end -}} 24 | {{- end -}} 25 | {{- end -}} 26 | 27 | {{/* 28 | Create chart name and version as used by the chart label. 29 | */}} 30 | {{- define "arrow-flight-module.chart" -}} 31 | {{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" -}} 32 | {{- end -}} 33 | 34 | {{/* 35 | processPodSecurityContext skips certain keys in Values.podSecurityContext 36 | map if running on openshift. 37 | */}} 38 | {{- define "fybrik.processPodSecurityContext" }} 39 | {{- $podSecurityContext := deepCopy .podSecurityContext }} 40 | {{- if .context.Capabilities.APIVersions.Has "security.openshift.io/v1" }} 41 | {{- range $k, $v := .podSecurityContext }} 42 | {{- if or (eq $k "runAsUser") (eq $k "seccompProfile") }} 43 | {{- $_ := unset $podSecurityContext $k }} 44 | {{- end }} 45 | {{- end }} 46 | {{- end }} 47 | {{- $podSecurityContext | toYaml }} 48 | {{- end }} 49 | 50 | {{/* 51 | Print Data directory. 52 | */}} 53 | {{- define "fybrik.getDataDir" -}} 54 | /data 55 | {{- end }} 56 | 57 | {{/* 58 | Print sub directory in /data directory. The sub directory is 59 | passed as parameter to the function. 60 | */}} 61 | {{- define "fybrik.getDataSubdir" -}} 62 | {{- $dir := toString (first .) -}} 63 | {{- printf "%s/%s" (include "fybrik.getDataDir" .) $dir }} 64 | {{- end }} 65 | 66 | -------------------------------------------------------------------------------- /.github/workflows/push.yml: -------------------------------------------------------------------------------- 1 | name: Push 2 | 3 | on: 4 | push: 5 | branches: 6 | - 'master' 7 | tags: 8 | - '*' 9 | 10 | env: 11 | DOCKER_HOSTNAME: ghcr.io 12 | DOCKER_NAMESPACE: fybrik 13 | DOCKER_USERNAME: ${{ github.actor }} 14 | 15 | jobs: 16 | build: 17 | name: Build 18 | runs-on: ubuntu-latest 19 | 20 | steps: 21 | - uses: actions/checkout@v3 22 | # Versions are created as follows (In helm 3.7 and above OCI reference tags 23 | # must be valid SemVer): 24 | # - Tags starting with v will be released as what comes after `v`. (e.g. refs/tags/v1.0 -> 1.0). 25 | # - Master branch will be released with `master` as version tag for the docker images and 26 | # `0.0.0-master` tag for helm chart. 27 | - id: version 28 | name: Infer version 29 | run: | 30 | if [[ ${GITHUB_REF} == refs/tags/* ]] ; 31 | then 32 | version="${GITHUB_REF#refs/tags/v}" 33 | echo "HELM_TAG=${version}" >> $GITHUB_ENV 34 | elif [[ ${GITHUB_REF} == refs/heads/master ]] ; 35 | then 36 | version=master 37 | echo "HELM_TAG=0.0.0-master" >> $GITHUB_ENV 38 | fi 39 | echo ::set-output name=version::$version 40 | echo "DOCKER_TAG=${version}" >> $GITHUB_ENV 41 | - name: Helm tool installer 42 | uses: Azure/setup-helm@v1 43 | with: 44 | version: v3.7.0 45 | - name: Set up Python 46 | uses: actions/setup-python@v3 47 | with: 48 | python-version: '3.x' 49 | - name: Install dependencies 50 | run: | 51 | python -m pip install --upgrade pip 52 | pip install setuptools wheel twine 53 | pip install pipenv 54 | - name: Build docker image 55 | run: make build 56 | - name: Docker push 57 | run: | 58 | echo "${{ secrets.GITHUB_TOKEN }}" | docker login -u "${{ env.DOCKER_USERNAME }}" --password-stdin "${{ env.DOCKER_HOSTNAME }}" 59 | make docker-push 60 | - run: docker images 61 | - name: Install yq 62 | run: make ./hack/tools/bin/yq 63 | - name: Update docker image tag in the chart value.yaml file 64 | run: ./hack/tag_value.sh 65 | - name: Helm chart push 66 | run: | 67 | export HELM_EXPERIMENTAL_OCI=1 68 | echo "${{ secrets.GITHUB_TOKEN }}" | helm registry login -u "${{ github.actor }}" --password-stdin "${{ env.DOCKER_HOSTNAME }}" 69 | make helm-chart-push 70 | 71 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | #build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | #wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | 53 | # Translations 54 | *.mo 55 | *.pot 56 | 57 | # Django stuff: 58 | *.log 59 | local_settings.py 60 | db.sqlite3 61 | db.sqlite3-journal 62 | 63 | # Flask stuff: 64 | instance/ 65 | .webassets-cache 66 | 67 | # Scrapy stuff: 68 | .scrapy 69 | 70 | # Sphinx documentation 71 | docs/_build/ 72 | 73 | # PyBuilder 74 | target/ 75 | 76 | # Jupyter Notebook 77 | .ipynb_checkpoints 78 | 79 | # IPython 80 | profile_default/ 81 | ipython_config.py 82 | 83 | # pyenv 84 | .python-version 85 | 86 | # pipenv 87 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 88 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 89 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 90 | # install all needed dependencies. 91 | #Pipfile.lock 92 | 93 | # celery beat schedule file 94 | celerybeat-schedule 95 | 96 | # SageMath parsed files 97 | *.sage.py 98 | 99 | # Environments 100 | .venv 101 | env/ 102 | venv/ 103 | ENV/ 104 | env.bak/ 105 | venv.bak/ 106 | 107 | # Spyder project settings 108 | .spyderproject 109 | .spyproject 110 | 111 | # Rope project settings 112 | .ropeproject 113 | 114 | # mkdocs documentation 115 | /site 116 | 117 | # mypy 118 | .mypy_cache/ 119 | .dmypy.json 120 | dmypy.json 121 | 122 | # Pyre type checker 123 | .pyre/ 124 | 125 | # IDEs 126 | .idea/ 127 | .vscode/ 128 | 129 | # Others 130 | requirements.txt 131 | -------------------------------------------------------------------------------- /scripts/gendata.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2020 IBM Corp. 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | import argparse 6 | import os 7 | from urllib.parse import urlparse 8 | 9 | import pandas as pd 10 | import pyarrow as pa 11 | import pyarrow.parquet as pq 12 | from pyarrow.fs import S3FileSystem, LocalFileSystem 13 | 14 | 15 | def sample_table(): 16 | df = pd.DataFrame( 17 | {'gender': ["Female", "Male", "Male"], 'weight': [-1, 5, 9.5], 'age': [1, 2, 3]}) 18 | return pa.Table.from_pandas(df) 19 | 20 | 21 | def import_table(source: str): 22 | if not source: 23 | return sample_table() 24 | if source.endswith(".csv"): 25 | from pyarrow import csv 26 | return csv.read_csv(source) 27 | if source.endswith(".json"): 28 | from pyarrow import json 29 | return json.read_json(source) 30 | if source.endswith(".parquet"): 31 | return pq.read_table(source) 32 | raise ValueError("source must be csv, json or parquet") 33 | 34 | 35 | def main(): 36 | parser = argparse.ArgumentParser( 37 | description="Generate sample parquet data") 38 | parser.add_argument('path', type=str, nargs='?', 39 | help='path to save data to', default="./data/data.parquet") 40 | parser.add_argument('--source', type=str, help='local path to import data from (optional; can be csv, json or parquet)') 41 | parser.add_argument('--endpoint', type=str, 42 | help='S3 endpoint (e.g.: https://s3.eu-de.cloud-object-storage.appdomain.cloud') 43 | parser.add_argument('--access_key', type=str, help='S3 access key') 44 | parser.add_argument('--secret_key', type=str, help='S3 secret key') 45 | args = parser.parse_args() 46 | 47 | if args.endpoint: 48 | print("Using S3 file system") 49 | parsed_endpoint = urlparse(args.endpoint) 50 | fs = S3FileSystem(endpoint_override=parsed_endpoint.netloc, 51 | scheme=parsed_endpoint.scheme, 52 | access_key=args.access_key, 53 | secret_key=args.secret_key, 54 | background_writes=False) 55 | else: 56 | print("Using local file system") 57 | os.makedirs(os.path.dirname(args.path), exist_ok=True) 58 | fs = LocalFileSystem() 59 | 60 | table = import_table(args.source) 61 | 62 | with fs.open_output_stream(args.path) as f: 63 | pq.write_table(table, f) 64 | print("Table written to", args.path) 65 | print(table.to_pandas()) 66 | 67 | 68 | if __name__ == '__main__': 69 | main() 70 | -------------------------------------------------------------------------------- /.github/workflows/codeql-analysis.yml: -------------------------------------------------------------------------------- 1 | # For most projects, this workflow file will not need changing; you simply need 2 | # to commit it to your repository. 3 | # 4 | # You may wish to alter this file to override the set of languages analyzed, 5 | # or to provide custom queries or build logic. 6 | # 7 | # ******** NOTE ******** 8 | # We have attempted to detect the languages in your repository. Please check 9 | # the `language` matrix defined below to confirm you have the correct set of 10 | # supported CodeQL languages. 11 | # 12 | name: "CodeQL" 13 | 14 | on: 15 | push: 16 | branches: [ master ] 17 | pull_request: 18 | # The branches below must be a subset of the branches above 19 | branches: [ master ] 20 | schedule: 21 | - cron: '30 12 * * 3' 22 | 23 | jobs: 24 | analyze: 25 | name: Analyze 26 | runs-on: ubuntu-latest 27 | 28 | strategy: 29 | fail-fast: false 30 | matrix: 31 | language: [ 'python' ] 32 | # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ] 33 | # Learn more: 34 | # https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#changing-the-languages-that-are-analyzed 35 | 36 | steps: 37 | - name: Checkout repository 38 | uses: actions/checkout@v3 39 | 40 | # Initializes the CodeQL tools for scanning. 41 | - name: Initialize CodeQL 42 | uses: github/codeql-action/init@v2 43 | with: 44 | languages: ${{ matrix.language }} 45 | # If you wish to specify custom queries, you can do so here or in a config file. 46 | # By default, queries listed here will override any specified in a config file. 47 | # Prefix the list here with "+" to use these queries and those in the config file. 48 | # queries: ./path/to/local/query, your-org/your-repo/queries@main 49 | 50 | # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). 51 | # If this step fails, then you should remove it and run the build manually (see below) 52 | - name: Autobuild 53 | uses: github/codeql-action/autobuild@v2 54 | 55 | # ℹ️ Command-line programs to run using the OS shell. 56 | # 📚 https://git.io/JvXDl 57 | 58 | # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines 59 | # and modify them (or add more) to build your code if your project 60 | # uses a compiled language 61 | 62 | #- run: | 63 | # make bootstrap 64 | # make release 65 | 66 | - name: Perform CodeQL Analysis 67 | uses: github/codeql-action/analyze@v2 68 | -------------------------------------------------------------------------------- /helm/afm/values.yaml: -------------------------------------------------------------------------------- 1 | # Default values for arrow-flight-module. 2 | # This is a YAML-formatted file. 3 | # Declare variables to be passed into your templates. 4 | 5 | replicaCount: 1 6 | 7 | image: 8 | repository: ghcr.io/fybrik/arrow-flight-module 9 | tag: 0.0.0 10 | pullPolicy: Always 11 | pullSecret: null 12 | 13 | # Set the size limit of the data directory. 14 | dataDirSizeLimit: 200Mi 15 | 16 | nameOverride: "" 17 | fullnameOverride: "" 18 | 19 | service: 20 | type: ClusterIP 21 | port: 80 22 | 23 | resources: {} 24 | # We usually recommend not to specify default resources and to leave this as a conscious 25 | # choice for the user. This also increases chances charts run on environments with little 26 | # resources, such as Minikube. If you do want to specify resources, uncomment the following 27 | # lines, adjust them as necessary, and remove the curly braces after 'resources:'. 28 | # limits: 29 | # cpu: 100m 30 | # memory: 128Mi 31 | # requests: 32 | # cpu: 100m 33 | # memory: 128Mi 34 | 35 | # Pod Security Context. 36 | # ref: https://kubernetes.io/docs/reference/kubernetes-api/workload-resources/pod-v1/#security-context 37 | podSecurityContext: 38 | runAsNonRoot: true 39 | # Ignored on openshift. 40 | runAsUser: 10001 41 | # Ignored on openshift. 42 | seccompProfile: 43 | type: RuntimeDefault 44 | # Container Security Context. 45 | # ref: https://kubernetes.io/docs/reference/kubernetes-api/workload-resources/pod-v1/#security-context-1 46 | containerSecurityContext: 47 | privileged: false 48 | allowPrivilegeEscalation: false 49 | capabilities: 50 | drop: 51 | - ALL 52 | 53 | nodeSelector: {} 54 | 55 | tolerations: [] 56 | 57 | affinity: {} 58 | 59 | tls: 60 | # Relavent if TLS is used between vault and the module. 61 | # MinVersion contains the minimum TLS version that is acceptable. 62 | # If not provided, the system default value is used. 63 | # Possible values are SSL-3, TLS-1, TLS-1.1, TLS-1.2 and TLS-1.3. 64 | # ref: https://docs.python.org/3/library/ssl.html#ssl.TLSVersion.MINIMUM_SUPPORTED 65 | minVersion: TLS-1.3 66 | certs: 67 | # Name of kubernetes secret that holds the module certificate. 68 | # The secret should be of `kubernetes.io/tls` type. 69 | # certSecretName: "test-tls-module-certs" 70 | certSecretName: "" 71 | # Name of kubernetes secret that holds the certificate authority (CA) certificates 72 | # which are used by the module to validate the connection to vault. 73 | # The CA certificates key in the secret should have `.crt` suffix. 74 | # The provided certificates replaces the certificates in the system CA certificate store. 75 | # If the secret is not provided then the CA certificates are taken from the system 76 | # CA certificate store, for example `/etc/ssl/certs/`. 77 | # cacertSecretName: "test-tls-ca-certs" 78 | cacertSecretName: "" 79 | 80 | -------------------------------------------------------------------------------- /sample/sample.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2020 IBM Corp. 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | from timeit import repeat 6 | import pyarrow.flight as fl 7 | import json 8 | import threading 9 | 10 | # taken from https://github.com/apache/arrow/blob/master/python/pyarrow/tests/test_flight.py#L450 11 | class HttpBasicClientAuthHandler(fl.ClientAuthHandler): 12 | """An example implementation of HTTP basic authentication.""" 13 | 14 | def __init__(self, username, password): 15 | super().__init__() 16 | self.basic_auth = fl.BasicAuth(username, password) 17 | self.token = None 18 | 19 | def authenticate(self, outgoing, incoming): 20 | auth = self.basic_auth.serialize() 21 | outgoing.write(auth) 22 | self.token = incoming.read() 23 | 24 | def get_token(self): 25 | return self.token 26 | 27 | request = { 28 | "asset": "nyc-taxi.parquet", 29 | "columns": ["vendor_id", "pickup_at", "dropoff_at", "payment_type"] 30 | } 31 | 32 | def read_from_endpoint(endpoint): 33 | if endpoint.locations: 34 | client = fl.connect(endpoint.locations[0]) 35 | else: 36 | client = fl.connect("grpc://localhost:{}".format(args.port)) 37 | if args.username or args.password: 38 | client.authenticate( 39 | HttpBasicClientAuthHandler(args.username, args.password)) 40 | result: fl.FlightStreamReader = client.do_get(endpoint.ticket) 41 | print(result.read_all().to_pandas()) 42 | #for s in result: 43 | # pass 44 | 45 | def read_dataset(): 46 | threads = [] 47 | for endpoint in info.endpoints: 48 | t = threading.Thread(target=read_from_endpoint, args=(endpoint,)) 49 | threads.append(t) 50 | t.start() 51 | for t in threads: 52 | t.join() 53 | 54 | def main(port, num_repeat, username, password): 55 | global client, info 56 | client = fl.connect("grpc://localhost:{}".format(port)) 57 | if username or password: 58 | client.authenticate(HttpBasicClientAuthHandler(username, password)) 59 | info = client.get_flight_info( 60 | fl.FlightDescriptor.for_command(json.dumps(request))) 61 | 62 | print("Timing " + str(num_repeat) + " runs of retrieving the dataset:" + 63 | str(repeat(stmt="read_dataset()", 64 | setup="from __main__ import read_dataset", 65 | repeat=num_repeat, number=1))) 66 | 67 | if __name__ == "__main__": 68 | import argparse 69 | parser = argparse.ArgumentParser(description='arrow-flight-module sample') 70 | parser.add_argument( 71 | '--port', type=int, default=8080, help='Listening port') 72 | parser.add_argument( 73 | '--repeat', type=int, default=3, help='Number of times we measure the time to go over dataset') 74 | parser.add_argument( 75 | '--username', type=str, default=None, help='Authentication username') 76 | parser.add_argument( 77 | '--password', type=str, default=None, help='Authentication password') 78 | args = parser.parse_args() 79 | 80 | main(args.port, args.repeat, args.username, args.password) 81 | -------------------------------------------------------------------------------- /hack/make-rules/helm.mk: -------------------------------------------------------------------------------- 1 | # This script contains helm version 3.7 commands for pushing and pulling charts to OCI registry 2 | # as described in https://github.com/helm/community/blob/main/hips/hip-0006.md 3 | # To use it the following env vars should be defined: 4 | 5 | # CHART_NAME the chart name as is appear in Chart.yaml 6 | # HELM_RELEASE the helm release-name of the chart 7 | # HELM_TAG the OCI reference tag (and also the chart version). Must be SemVer 8 | # CHART_LOCAL_PATH path to the chart directory 9 | # DOCKER_HOSTNAME the docker registry hostname 10 | # DOCKER_NAMESPACE docker registry namespace 11 | # DOCKER_USERNAME docker registry username 12 | 13 | 14 | HELM_VALUES ?= \ 15 | --set hello=world1 16 | 17 | TEMP := /tmp 18 | CHART_LOCAL_PATH ?= helm/afm 19 | CHART_NAME ?= arrow-flight-module-chart 20 | HELM_RELEASE ?= rel1-${DOCKER_NAME} 21 | HELM_TAG ?= 0.0.0 22 | 23 | CHART_REGISTRY_PATH := oci://${DOCKER_HOSTNAME}/${DOCKER_NAMESPACE} 24 | 25 | # To enable OCI experimental support for Helm versions prior to v3.8.0, HELM_EXPERIMENTAL_OCI is set 26 | export HELM_EXPERIMENTAL_OCI=1 27 | export GODEBUG=x509ignoreCN=0 28 | 29 | .PHONY: helm-login 30 | helm-login: $(TOOLBIN)/helm 31 | ifneq (${DOCKER_PASSWORD},) 32 | $(ABSTOOLBIN)/helm registry login -u "${DOCKER_USERNAME}" -p "${DOCKER_PASSWORD}" ${DOCKER_HOSTNAME} 33 | endif 34 | 35 | .PHONY: helm-verify 36 | helm-verify: $(TOOLBIN)/helm 37 | $(ABSTOOLBIN)/helm lint ${CHART_LOCAL_PATH} 38 | $(ABSTOOLBIN)/helm install --dry-run ${HELM_RELEASE} ${CHART_LOCAL_PATH} ${HELM_VALUES} 39 | 40 | .PHONY: helm-uninstall 41 | helm-uninstall: $(TOOLBIN)/helm 42 | $(ABSTOOLBIN)/helm uninstall ${HELM_RELEASE} || true 43 | 44 | .PHONY: helm-install 45 | helm-install: $(TOOLBIN)/helm 46 | $(ABSTOOLBIN)/helm install ${HELM_RELEASE} ${CHART_LOCAL_PATH} ${HELM_VALUES} 47 | 48 | 49 | # example for helm chart push: 50 | # helm package fybrik-template -d /tmp/ --version 0.7.0 51 | # helm push /tmp/fybrik-template-0.7.0.tgz oci://localhost:5000/fybrik-system/ 52 | .PHONY: helm-chart-push 53 | helm-chart-push: helm-login 54 | $(ABSTOOLBIN)/helm package ${CHART_LOCAL_PATH} --version=${HELM_TAG} --destination=${TEMP} 55 | $(ABSTOOLBIN)/helm push ${TEMP}/${CHART_NAME}-${HELM_TAG}.tgz ${CHART_REGISTRY_PATH} 56 | rm -rf ${TEMP}/${CHART_NAME}-${HELM_TAG}.tgz 57 | 58 | .PHONY: helm-chart-pull 59 | helm-chart-pull: helm-login 60 | $(ABSTOOLBIN)/helm pull ${CHART_REGISTRY_PATH}/${CHART_NAME} --version ${HELM_TAG} 61 | 62 | .PHONY: helm-list 63 | helm-chart-list: $(TOOLBIN)/helm 64 | $(ABSTOOLBIN)/helm list 65 | 66 | .PHONY: helm-chart-install 67 | helm-chart-install: $(TOOLBIN)/helm 68 | $(ABSTOOLBIN)/helm install ${HELM_RELEASE} ${CHART_REGISTRY_PATH}/${CHART_NAME} --version ${HELM_TAG} ${HELM_VALUES} 69 | $(ABSTOOLBIN)/helm list 70 | 71 | .PHONY: helm-template 72 | helm-template: $(TOOLBIN)/helm 73 | $(ABSTOOLBIN)/helm template ${HELM_RELEASE} ${CHART_REGISTRY_PATH} --version ${HELM_TAG} ${HELM_VALUES} 74 | 75 | .PHONY: helm-debug 76 | helm-debug: $(TOOLBIN)/helm 77 | $(ABSTOOLBIN)/helm template ${HELM_RELEASE} ${CHART_REGISTRY_PATH} ${HELM_VALUES} --version ${HELM_TAG} --debug 78 | 79 | .PHONY: helm-actions 80 | helm-actions: $(TOOLBIN)/helm 81 | $(ABSTOOLBIN)/helm show values --version ${HELM_TAG} ${CHART_REGISTRY_PATH} | yq -y -r .actions 82 | 83 | .PHONY: helm-all 84 | helm-all: helm-verify helm-chart-push helm-chart-pull helm-chart-install helm-uninstall 85 | 86 | -------------------------------------------------------------------------------- /sample/sample_put.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2020 IBM Corp. 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | import pyarrow.flight as fl 6 | import pyarrow as pa 7 | import json 8 | from faker import Faker 9 | 10 | # taken from https://github.com/apache/arrow/blob/master/python/pyarrow/tests/test_flight.py#L450 11 | class HttpBasicClientAuthHandler(fl.ClientAuthHandler): 12 | """An example implementation of HTTP basic authentication.""" 13 | 14 | def __init__(self, username, password): 15 | super().__init__() 16 | self.basic_auth = fl.BasicAuth(username, password) 17 | self.token = None 18 | 19 | def authenticate(self, outgoing, incoming): 20 | auth = self.basic_auth.serialize() 21 | outgoing.write(auth) 22 | self.token = incoming.read() 23 | 24 | def get_token(self): 25 | return self.token 26 | 27 | request = { 28 | "asset": "new-dataset", 29 | # write_mode can be append or overwrite. The default is overwrite. 30 | # "write_mode": "append", 31 | } 32 | 33 | def fake_dataset(num_entries): 34 | Faker.seed(1234) 35 | f = Faker() 36 | arrays = [] 37 | column_names = [] 38 | 39 | arr = [] 40 | for i in range(num_entries): 41 | arr.append(f.name()) 42 | arrays.append(arr) 43 | column_names.append("Name") 44 | 45 | arr = [] 46 | for i in range(num_entries): 47 | arr.append(f.email()) 48 | arrays.append(arr) 49 | column_names.append("Email") 50 | 51 | arr = [] 52 | for i in range(num_entries): 53 | arr.append(f.address()) 54 | arrays.append(arr) 55 | column_names.append("Address") 56 | 57 | arr = [] 58 | for i in range(num_entries): 59 | arr.append(f.country()) 60 | arrays.append(arr) 61 | column_names.append("Country") 62 | 63 | arr = [] 64 | for i in range(num_entries): 65 | arr.append(f.date_of_birth()) 66 | arrays.append(arr) 67 | column_names.append("Date of Birth") 68 | 69 | return arrays, column_names 70 | 71 | def main(port, username, password): 72 | client = fl.connect("grpc://localhost:{}".format(port)) 73 | if username or password: 74 | client.authenticate(HttpBasicClientAuthHandler(username, password)) 75 | 76 | # write the new dataset 77 | arrays, names = fake_dataset(1000) 78 | data = pa.Table.from_arrays(arrays, names=names) 79 | writer, _ = client.do_put(fl.FlightDescriptor.for_command(json.dumps(request)), 80 | data.schema) 81 | writer.write_table(data, 1024) 82 | writer.close() 83 | 84 | # now that the dataset is in place, let's try to read it 85 | info = client.get_flight_info( 86 | fl.FlightDescriptor.for_command(json.dumps(request))) 87 | 88 | endpoint = info.endpoints[0] 89 | result: fl.FlightStreamReader = client.do_get(endpoint.ticket) 90 | print(result.read_all().to_pandas()) 91 | 92 | if __name__ == "__main__": 93 | import argparse 94 | parser = argparse.ArgumentParser(description='arrow-flight-module sample') 95 | parser.add_argument( 96 | '--port', type=int, default=8080, help='Listening port') 97 | parser.add_argument( 98 | '--username', type=str, default=None, help='Authentication username') 99 | parser.add_argument( 100 | '--password', type=str, default=None, help='Authentication password') 101 | args = parser.parse_args() 102 | 103 | main(args.port, args.username, args.password) 104 | -------------------------------------------------------------------------------- /helm/afm/files/conf.yaml: -------------------------------------------------------------------------------- 1 | app-uuid: {{ .Values.uuid | default "app-uuid-missing" }} 2 | plugin_dir: /etc/plugins 3 | {{ if .Values.assets -}} 4 | data: 5 | {{- range $asset := .Values.assets }} 6 | - name: {{ $asset.assetID | quote }} 7 | capability: {{ $asset.capability }} 8 | {{- if and (.args) (eq (len .args) 1) -}} 9 | {{- with (index .args 0) }} 10 | format: {{ .format | quote }} 11 | {{- if index . "connection" "fybrik-arrow-flight" }} 12 | connection: 13 | type: flight 14 | flight: 15 | endpoint_url: {{ index . "connection" "fybrik-arrow-flight" "hostname" }} 16 | port: {{ index . "connection" "fybrik-arrow-flight" "port" }} 17 | flight_command: "{ \"asset\": \"{{ $asset.assetID }}\" }" 18 | {{- end }} 19 | {{- if index . "connection" "s3" }} 20 | path: "{{ .connection.s3.bucket }}/{{ .connection.s3.object_key }}" 21 | connection: 22 | type: s3 23 | s3: 24 | endpoint_url: {{if not (contains "://" .connection.s3.endpoint)}}https://{{end}}{{ .connection.s3.endpoint }} 25 | {{- if index . "connection" "s3" "region" }} 26 | region: {{ .connection.s3.region }} 27 | {{- end }} 28 | {{- range $k, $v := .vault }} 29 | {{- if or (eq $k "read") (eq $k "write") }} 30 | vault_credentials: 31 | {{- if $v.address }} 32 | address: {{ $v.address }} 33 | {{- end }} 34 | {{- if $v.authPath }} 35 | authPath: {{ $v.authPath }} 36 | {{- end }} 37 | {{- if $v.role }} 38 | role: {{ $v.role }} 39 | {{- end }} 40 | {{- if $v.secretPath }} 41 | secretPath: {{ $v.secretPath }} 42 | {{- end }} 43 | {{- end }} 44 | {{- end }} 45 | {{- end }} 46 | {{- end }} 47 | {{- end }} 48 | {{- if $asset.transformations }} 49 | transformations: 50 | {{- $redactColumns := list -}} 51 | {{- $removeColumns := list -}} 52 | {{- $query := "" -}} 53 | {{- $ageFilterColumns := list -}} 54 | {{- $age := 0 -}} 55 | {{- range $asset.transformations -}} 56 | {{- if eq .name "RedactAction" -}} 57 | {{- $redactColumns = .RedactAction.columns -}} 58 | {{- end -}} 59 | {{- if eq .name "RemoveAction" -}} 60 | {{- $removeColumns = .RemoveAction.columns -}} 61 | {{- end -}} 62 | {{- if eq .name "FilterAction" -}} 63 | {{- $query = .FilterAction.options.query -}} 64 | {{- end -}} 65 | {{- if eq .name "AgeFilterAction" -}} 66 | {{- $ageFilterColumns = .AgeFilterAction.columns -}} 67 | {{- $age = .AgeFilterAction.options.age -}} 68 | {{- end -}} 69 | {{- end -}} 70 | {{- if $redactColumns }} 71 | - action: "Redact" 72 | description: "redacting columns: {{ $redactColumns }}" 73 | columns: 74 | {{- range $redactColumns}} 75 | - {{ . }} 76 | {{- end }} 77 | options: 78 | redactValue: "XXXXX" 79 | {{- end }} 80 | {{- if $removeColumns }} 81 | - action: "RemoveColumns" 82 | description: "removing columns: {{ $removeColumns }}" 83 | columns: 84 | {{- range $removeColumns}} 85 | - {{ . }} 86 | {{- end }} 87 | {{- end }} 88 | {{- if $query }} 89 | - action: "Filter" 90 | description: "filtering based on query" 91 | options: 92 | query: "{{ $query }}" 93 | {{- end }} 94 | {{- if $ageFilterColumns }} 95 | - action: "AgeFilter" 96 | description: "filtering based on age" 97 | columns: 98 | {{- range $ageFilterColumns}} 99 | - {{ . }} 100 | {{- end }} 101 | {{- if $age }} 102 | options: 103 | age: "{{ $age }}" 104 | {{- end }} 105 | {{- end }} 106 | {{- end }} 107 | {{- end -}} 108 | {{- else -}} 109 | data: [] 110 | {{- end -}} 111 | -------------------------------------------------------------------------------- /helm/afm/templates/deployment.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: apps/v1 2 | kind: Deployment 3 | metadata: 4 | name: {{ include "arrow-flight-module.fullname" . }} 5 | labels: 6 | app.kubernetes.io/name: {{ include "arrow-flight-module.name" . }} 7 | helm.sh/chart: {{ include "arrow-flight-module.chart" . }} 8 | app.kubernetes.io/instance: {{ .Release.Name }} 9 | app.kubernetes.io/managed-by: {{ .Release.Service }} 10 | spec: 11 | replicas: {{ .Values.replicaCount }} 12 | selector: 13 | matchLabels: 14 | app.kubernetes.io/name: {{ include "arrow-flight-module.name" . }} 15 | app.kubernetes.io/instance: {{ .Release.Name }} 16 | template: 17 | metadata: 18 | labels: 19 | app.kubernetes.io/name: {{ include "arrow-flight-module.name" . }} 20 | app.kubernetes.io/instance: {{ .Release.Name }} 21 | {{- range $key, $val := .Values.labels }} 22 | {{ $key }}: {{ $val | quote }} 23 | {{- end }} 24 | annotations: 25 | sidecar.istio.io/inject: "true" 26 | spec: 27 | serviceAccountName: {{ include "arrow-flight-module.fullname" . }} 28 | securityContext: 29 | {{- include "fybrik.processPodSecurityContext" (dict "context" . "podSecurityContext" .Values.podSecurityContext) | nindent 8 }} 30 | containers: 31 | - name: {{ .Chart.Name }} 32 | image: "{{ .Values.image.repository }}:{{ .Values.image.tag }}" 33 | {{- if .Values.image.pullPolicy }} 34 | imagePullPolicy: {{ .Values.image.pullPolicy }} 35 | {{- end }} 36 | ports: 37 | - name: grpc 38 | containerPort: 8080 39 | protocol: TCP 40 | readinessProbe: 41 | failureThreshold: 3 42 | successThreshold: 1 43 | tcpSocket: 44 | port: 8080 45 | securityContext: 46 | {{- .Values.containerSecurityContext | toYaml | nindent 12 }} 47 | resources: 48 | {{- toYaml .Values.resources | nindent 12 }} 49 | env: 50 | - name: DATA_DIR 51 | value: {{ include "fybrik.getDataDir" . }} 52 | - name: MIN_TLS_VERSION 53 | value: {{ .Values.tls.minVersion }} 54 | volumeMounts: 55 | - name: data 56 | mountPath: {{ include "fybrik.getDataDir" . }} 57 | - readOnly: true 58 | mountPath: /etc/conf 59 | name: config 60 | {{- if .Values.tls.certs.certSecretName }} 61 | - mountPath: {{ include "fybrik.getDataSubdir" ( tuple "tls-cert" ) }} 62 | name: tls-cert 63 | readOnly: true 64 | {{- end }} 65 | {{- if .Values.tls.certs.cacertSecretName }} 66 | - mountPath: {{ include "fybrik.getDataSubdir" ( tuple "tls-cacert" ) }} 67 | name: tls-cacert 68 | readOnly: true 69 | {{- end }} 70 | volumes: 71 | - name: data 72 | emptyDir: 73 | sizeLimit: {{ .Values.dataDirSizeLimit }} 74 | - name: config 75 | configMap: 76 | name: {{ include "arrow-flight-module.fullname" . }} 77 | {{- if .Values.tls.certs.certSecretName }} 78 | - name: tls-cert 79 | secret: 80 | defaultMode: 420 81 | secretName: {{ .Values.tls.certs.certSecretName }} 82 | {{- end }} 83 | {{- if .Values.tls.certs.cacertSecretName }} 84 | - name: tls-cacert 85 | secret: 86 | defaultMode: 420 87 | secretName: {{ .Values.tls.certs.cacertSecretName }} 88 | {{- end }} 89 | {{- with .Values.nodeSelector }} 90 | nodeSelector: 91 | {{- toYaml . | nindent 8 }} 92 | {{- end }} 93 | {{- with .Values.affinity }} 94 | affinity: 95 | {{- toYaml . | nindent 8 }} 96 | {{- end }} 97 | {{- with .Values.tolerations }} 98 | tolerations: 99 | {{- toYaml . | nindent 8 }} 100 | {{- end }} 101 | -------------------------------------------------------------------------------- /afm/filesystems/s3.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2020 IBM Corp. 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | from urllib.parse import urlparse, quote 6 | import requests 7 | from fybrik_python_logging import logger, DataSetID, ForUser 8 | from pyarrow.fs import S3FileSystem 9 | from fybrik_python_vault import get_jwt_from_file, get_raw_secret_from_vault 10 | 11 | 12 | def get_s3_credentials_from_vault(vault_credentials, datasetID, tls_min_version=None, verify=True, cert=None): 13 | """Get S3 credentials from Vault 14 | 15 | Args: 16 | vault_credentials (dictonary): Properties used for getting s3 credentials from Vault. 17 | datasetID (string): dataset ID. 18 | tls_min_version (string, optional): tls minimum version to use in the connection to Vault. Defaults to None. 19 | verify (optional): Either a boolean, in which case it controls whether we verify 20 | the Vault server's TLS certificate, or a string, in which case it must be a path 21 | to a CA bundle to use. Defaults to ``True``. 22 | cert (tuple, optional): the module ('cert', 'key') pair. 23 | 24 | Returns: 25 | S3 (access_key, secret_key, session_token). 26 | """ 27 | jwt_file_path = vault_credentials.get('jwt_file_path', '/var/run/secrets/kubernetes.io/serviceaccount/token') 28 | jwt = get_jwt_from_file(jwt_file_path) 29 | vault_address = vault_credentials.get('address', 'https://localhost:8200') 30 | secret_path = vault_credentials.get('secretPath', '/v1/secret/data/cred') 31 | vault_auth = vault_credentials.get('authPath', '/v1/auth/kubernetes/login') 32 | role = vault_credentials.get('role', 'demo') 33 | 34 | credentials = get_raw_secret_from_vault(jwt, secret_path, vault_address, vault_auth, 35 | role, datasetID, tls_min_version, verify, cert) 36 | if not credentials: 37 | raise ValueError("Vault credentials are missing") 38 | if 'access_key' in credentials and 'secret_key' in credentials: 39 | session_token = None 40 | if 'session_token' in credentials and credentials['session_token']: 41 | session_token = credentials['session_token'] 42 | logger.trace("session_token was provided in credentials read from Vault", 43 | extra={DataSetID: datasetID}) 44 | else: 45 | logger.trace("session_token was NOT provided in credentials read from Vault", 46 | extra={DataSetID: datasetID}) 47 | if credentials['access_key'] and credentials['secret_key']: 48 | return credentials['access_key'], credentials['secret_key'], session_token 49 | else: 50 | if not credentials['access_key']: 51 | logger.error("'access_key' must be non-empty", 52 | extra={DataSetID: datasetID, ForUser: True}) 53 | if not credentials['secret_key']: 54 | logger.error("'secret_key' must be non-empty", 55 | extra={DataSetID: datasetID, ForUser: True}) 56 | logger.error("Expected both 'access_key' and 'secret_key' fields in vault secret", 57 | extra={DataSetID: datasetID, ForUser: True}) 58 | raise ValueError("Vault credentials are missing") 59 | 60 | def s3filesystem_from_config(s3_config, datasetID, tls_min_version=None, verify=True, cert=None): 61 | """Construct and return object of type S3FileSystem based on properties from the configuration 62 | 63 | Args: 64 | s3_config (dictinary): s3 configuration. 65 | datasetID (string): dataset ID. 66 | tls_min_version (string, optional): tls minimum version to use in the connection to Vault. Defaults to None. 67 | verify (optional): Either a boolean, in which case it controls whether we verify 68 | the Vault server's TLS certificate, or a string, in which case it must be a path 69 | to a CA bundle to use. Defaults to ``True``. 70 | cert (tuple, optional): the module ('cert', 'key') pair. 71 | 72 | Returns: 73 | An object of type S3FileSystem 74 | """ 75 | endpoint = s3_config.get('endpoint_url') 76 | region = s3_config.get('region') 77 | 78 | credentials = s3_config.get('credentials', {}) 79 | access_key = credentials.get('accessKey') 80 | secret_key = credentials.get('secretKey') 81 | 82 | if 'vault_credentials' in s3_config: 83 | logger.trace("reading s3 configuration from vault", 84 | extra={DataSetID: datasetID}) 85 | access_key, secret_key, session_token = get_s3_credentials_from_vault( 86 | s3_config.get('vault_credentials'), datasetID, tls_min_version, verify, cert) 87 | 88 | scheme, endpoint_override = _split_endpoint(endpoint) 89 | anonymous = not access_key 90 | 91 | return S3FileSystem( 92 | region=region, 93 | endpoint_override=endpoint_override, 94 | scheme=scheme, 95 | access_key=access_key, 96 | secret_key=secret_key, 97 | session_token=session_token, 98 | anonymous=anonymous 99 | ) 100 | 101 | 102 | def _split_endpoint(endpoint): 103 | if endpoint: 104 | parsed_endpoint = urlparse(endpoint) 105 | return parsed_endpoint.scheme, parsed_endpoint.netloc 106 | return None, None 107 | -------------------------------------------------------------------------------- /afm/pep/test_actions.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2020 IBM Corp. 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | import unittest 6 | import pandas as pd 7 | import pyarrow as pa 8 | 9 | from .actions import Redact 10 | from .actions import HashRedact 11 | 12 | class TestActions(unittest.TestCase): 13 | 14 | def test_redact(self): 15 | df = pd.DataFrame( 16 | {'gender': ["Female", "Male", "Male"], 'weight': [-1, 5, 9.5], 'age': [1, 2, 3]}) 17 | table = pa.Table.from_pandas(df) 18 | 19 | action = Redact("redact stuff", columns=["gender", "age"], options={"redactValue": "XXX"}) 20 | for record_batch in table.to_batches(): 21 | result = action(record_batch) 22 | self.assertEqual(result.schema.field("gender").type, pa.string()) 23 | self.assertEqual(result.schema.field("age").type, pa.string()) 24 | self.assertEqual(result.schema.field("weight").type, pa.float64()) 25 | 26 | self.assertEqual(result.to_pandas()["gender"][0], "XXX") 27 | self.assertEqual(result.to_pandas()["age"][0], "XXX") 28 | 29 | def test_hash_redact_md5(self): 30 | df = pd.DataFrame( 31 | {'col1': [1, 2, 3], 'col2': ["abcdefghijklmnopqrstuvwxyz", "bcdefghijklmnopqrstuvwxyza", "cdefghijklmnopqrstuvwxyzab"], 'col3': [1.0, 2.0, 3.0]}) 32 | table = pa.Table.from_pandas(df) 33 | 34 | action = HashRedact("Hash redact", columns=["col2"], options={"algo": "md5"}) 35 | for record_batch in table.to_batches(): 36 | result = action(record_batch) 37 | self.assertEqual(result.schema.field("col1").type, pa.int64()) 38 | self.assertEqual(result.schema.field("col2").type, pa.string()) 39 | self.assertEqual(result.schema.field("col3").type, pa.float64()) 40 | 41 | self.assertEqual(result.to_pandas()["col2"][0], "c3fcd3d76192e4007dfb496cca67e13b") 42 | self.assertEqual(result.to_pandas()["col2"][1], "07694ef19cf359bfd74556dc0cc7956d") 43 | self.assertEqual(result.to_pandas()["col2"][2], "8dda2bba265b7478676bf9526e79c91c") 44 | 45 | def test_hash_redact_md5(self): 46 | df = pd.DataFrame( 47 | {'col1': [1, 2, 3], 'col2': ["abcdefghijklmnopqrstuvwxyz", "bcdefghijklmnopqrstuvwxyza", "cdefghijklmnopqrstuvwxyzab"], 'col3': [1.0, 2.0, 3.0]}) 48 | table = pa.Table.from_pandas(df) 49 | 50 | action = HashRedact("Hash redact", columns=["col2"], options={"algo": "md5"}) 51 | for record_batch in table.to_batches(): 52 | result = action(record_batch) 53 | self.assertEqual(result.schema.field("col1").type, pa.int64()) 54 | self.assertEqual(result.schema.field("col2").type, pa.string()) 55 | self.assertEqual(result.schema.field("col3").type, pa.float64()) 56 | 57 | self.assertEqual(result.to_pandas()["col2"][0], "c3fcd3d76192e4007dfb496cca67e13b") 58 | self.assertEqual(result.to_pandas()["col2"][1], "07694ef19cf359bfd74556dc0cc7956d") 59 | self.assertEqual(result.to_pandas()["col2"][2], "8dda2bba265b7478676bf9526e79c91c") 60 | 61 | def test_hash_redact_sha256(self): 62 | df = pd.DataFrame( 63 | {'col1': [1, 2, 3], 'col2': ["abcdefghijklmnopqrstuvwxyz", "bcdefghijklmnopqrstuvwxyza", "cdefghijklmnopqrstuvwxyzab"], 'col3': [1.0, 2.0, 3.0]}) 64 | table = pa.Table.from_pandas(df) 65 | 66 | action = HashRedact("Hash redact", columns=["col2"], options={"algo": "sha256"}) 67 | for record_batch in table.to_batches(): 68 | result = action(record_batch) 69 | self.assertEqual(result.schema.field("col1").type, pa.int64()) 70 | self.assertEqual(result.schema.field("col2").type, pa.string()) 71 | self.assertEqual(result.schema.field("col3").type, pa.float64()) 72 | 73 | self.assertEqual(result.to_pandas()["col2"][0], "71c480df93d6ae2f1efad1447c66c9525e316218cf51fc8d9ed832f2daf18b73") 74 | self.assertEqual(result.to_pandas()["col2"][1], "e40957dd33bd9da6053d78bea4da6c7cde1fac92614bfd03d8b0c422e021651c") 75 | self.assertEqual(result.to_pandas()["col2"][2], "fa732dae244c6d0b946e096d05167539a4b6ec2cc72f13a86a7fd657ef523d07") 76 | 77 | 78 | def test_hash_redact_sha512(self): 79 | df = pd.DataFrame( 80 | {'col1': [1, 2, 3], 'col2': ["abcdefghijklmnopqrstuvwxyz", "bcdefghijklmnopqrstuvwxyza", "cdefghijklmnopqrstuvwxyzab"], 'col3': [1.0, 2.0, 3.0]}) 81 | table = pa.Table.from_pandas(df) 82 | 83 | action = HashRedact("Hash redact", columns=["col2"], options={"algo": "sha512"}) 84 | for record_batch in table.to_batches(): 85 | result = action(record_batch) 86 | self.assertEqual(result.schema.field("col1").type, pa.int64()) 87 | self.assertEqual(result.schema.field("col2").type, pa.string()) 88 | self.assertEqual(result.schema.field("col3").type, pa.float64()) 89 | 90 | self.assertEqual(result.to_pandas()["col2"][0], "4dbff86cc2ca1bae1e16468a05cb9881c97f1753bce3619034898faa1aabe429955a1bf8ec483d7421fe3c1646613a59ed5441fb0f321389f77f48a879c7b1f1") 91 | self.assertEqual(result.to_pandas()["col2"][1], "6cf15b5b147ed859119df308a3e22a3958ecf1056b9cab135a1ce722ec57f1b65a03983a183141db9cb68817d57fab964be3068fe05eac8ff3d5f24ca34c6524") 92 | self.assertEqual(result.to_pandas()["col2"][2], "5d63cd2920fdbf1f67d2a55a7d5b792331f9e21cc9965419170176e98a221d3a68080225f0e781734304c1ef6f162dade36acf463b137e6767416c1c53fa845d") 93 | 94 | 95 | if __name__ == '__main__': 96 | unittest.main() -------------------------------------------------------------------------------- /afm/asset.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2020 IBM Corp. 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | import importlib 6 | import os 7 | import sys 8 | 9 | from afm.config import Config 10 | from afm.pep import registry, consolidate_actions 11 | from afm.filesystems.s3 import s3filesystem_from_config 12 | from afm.filesystems.httpfs import httpfs_from_config 13 | from afm.flight.flight import flight_from_config 14 | from afm.environment.environment import get_cacert_path, get_certs, get_min_tls_version 15 | from fybrik_python_logging import logger, DataSetID 16 | from pyarrow.fs import LocalFileSystem 17 | 18 | def asset_from_config(config: Config, asset_name: str, partition_path=None, capability=""): 19 | connection_type = config.connection_type(asset_name, capability) 20 | if connection_type in ['s3', 'httpfs', 'localfs']: 21 | return FileSystemAsset(config, asset_name, partition_path, capability) 22 | elif connection_type == 'flight': 23 | return FlightAsset(config, asset_name, capability=capability) 24 | raise ValueError( 25 | "Unsupported connection type: {}".format(config.connection_type)) 26 | 27 | class Asset: 28 | def __init__(self, config: Config, asset_name: str, partition_path=None, capability=""): 29 | asset_config = config.for_asset(asset_name, capability=capability) 30 | self._config = asset_config 31 | self._actions = Asset._actions_for_asset(asset_config, config.plugin_dir) 32 | self._format = asset_config.get("format") 33 | if partition_path: 34 | self._path = partition_path 35 | else: 36 | self._path = asset_config.get("path") 37 | self._name = asset_config.get("name") 38 | 39 | def add_action(self, action): 40 | self._actions.insert(0, action) 41 | 42 | @property 43 | def actions(self): 44 | return self._actions 45 | 46 | @property 47 | def name(self): 48 | return self._name 49 | 50 | @property 51 | def format(self): 52 | return self._format 53 | 54 | @property 55 | def path(self): 56 | return self._path 57 | 58 | @property 59 | def connection_type(self): 60 | return self._config['connection']['type'] 61 | 62 | @staticmethod 63 | def _try_to_find_plugin(plugin_name: str, plugin_dir: str): 64 | if plugin_dir: 65 | python_filename = plugin_name + ".py" 66 | if python_filename in os.listdir(plugin_dir): 67 | module = importlib.import_module(plugin_name) 68 | cls = getattr(module, plugin_name) 69 | registry[plugin_name] = cls 70 | return cls 71 | logger.error("plugin " + plugin_name + " not found") 72 | return None 73 | 74 | @staticmethod 75 | def _actions_for_asset(asset_config: dict, plugin_dir: str): 76 | def build_action(x): 77 | action_name = x["action"] 78 | if action_name in registry: 79 | cls = registry[action_name] 80 | else: 81 | cls = Asset._try_to_find_plugin(action_name, plugin_dir) 82 | return cls(description=x["description"], columns=x.get("columns"), options=x.get("options")) 83 | 84 | transformations = asset_config.get("transformations") 85 | if not transformations: 86 | transformations = [] 87 | # Create a list of Action objects from the transformations configuration 88 | actions = [build_action(x) for x in transformations] 89 | # Consolidate identical actions to keep the asset.actions efficient 90 | return consolidate_actions(actions) 91 | 92 | class FileSystemAsset(Asset): 93 | def __init__(self, config: Config, asset_name: str, partition_path=None, capability=""): 94 | super().__init__(config, asset_name, partition_path, capability) 95 | self._filesystem = FileSystemAsset._filesystem_for_asset(self._config) 96 | 97 | @staticmethod 98 | def _filesystem_for_asset(asset_config: dict): 99 | connection = asset_config['connection'] 100 | connection_type = connection['type'] 101 | dataSetID = asset_config['name'] 102 | if connection_type == "s3": 103 | verify = None 104 | ca_cert_path = get_cacert_path() 105 | if ca_cert_path != "": 106 | logger.trace("set cacert path to " + ca_cert_path, extra={DataSetID: dataSetID}) 107 | verify = ca_cert_path 108 | 109 | cert = None 110 | certs_tuple = get_certs() 111 | if certs_tuple: 112 | st = ' ' 113 | logger.trace("set certs tuple to: " + st.join(certs_tuple), extra={DataSetID: dataSetID}) 114 | cert = certs_tuple 115 | tls_min_version = get_min_tls_version() 116 | return s3filesystem_from_config(connection["s3"], dataSetID, tls_min_version, 117 | verify, cert) 118 | elif connection_type == "localfs": 119 | return LocalFileSystem() 120 | elif connection_type == "httpfs": 121 | return httpfs_from_config() 122 | raise ValueError( 123 | "Unsupported connection type: {}".format(connection_type)) 124 | 125 | @property 126 | def filesystem(self): 127 | return self._filesystem 128 | 129 | class FlightAsset(Asset): 130 | def __init__(self, config: Config, asset_name: str, capability=""): 131 | super().__init__(config, asset_name, capability=capability) 132 | self._flight = flight_from_config(self._config['connection']['flight']) 133 | 134 | @property 135 | def flight(self): 136 | return self._flight 137 | -------------------------------------------------------------------------------- /afm/pep/actions.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2020 IBM Corp. 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | import pandas as pd 6 | import pyarrow as pa 7 | import hashlib 8 | from time import time 9 | from datetime import datetime 10 | 11 | from fybrik_python_transformation import Action, PandasAction 12 | 13 | class Filter(PandasAction): 14 | def __init__(self, description, columns, options): 15 | super().__init__(description, columns, options) 16 | self.query = options.get('query', '') 17 | 18 | def __dftransform__(self, df: pd.DataFrame) -> pd.DataFrame: 19 | if self.query: 20 | return df.query(self.query) 21 | else: 22 | return df 23 | 24 | class AgeFilter(PandasAction): 25 | def __init__(self, description, columns, options): 26 | super().__init__(description, columns, options) 27 | age = int(options.get('age', 18)) 28 | now = datetime.fromtimestamp(time()) 29 | self.cutoff = now.replace(year=(now.year-age)) 30 | 31 | def __dftransform__(self, df: pd.DataFrame) -> pd.DataFrame: 32 | if self.columns: 33 | for column in self.columns: 34 | df = df[pd.to_datetime(df[column]) < self.cutoff] 35 | return df 36 | else: 37 | return df 38 | 39 | class Redact(Action): 40 | def __init__(self, description, columns, options): 41 | super().__init__(description, columns, options) 42 | self.redact_value = options.get("redactValue", "XXXXXXXXXX") 43 | 44 | def __call__(self, records: pa.RecordBatch) -> pa.RecordBatch: 45 | """Transformation logic for Redact action. 46 | 47 | Args: 48 | records (pa.RecordBatch): record batch to transform 49 | 50 | Returns: 51 | pa.RecordBatch: transformed record batch 52 | """ 53 | columns = [column for column in self.columns if column in records.schema.names] 54 | indices = [records.schema.get_field_index(c) for c in columns] 55 | constColumn = pa.array([self.redact_value] * len(records), type=pa.string()) 56 | new_columns = records.columns 57 | for i in indices: 58 | new_columns[i] = constColumn 59 | new_schema = self.schema(records.schema) 60 | return pa.RecordBatch.from_arrays(new_columns, schema=new_schema) 61 | 62 | def field_type(self): 63 | """Overrides field_type to calculate transformed schema correctly.""" 64 | return pa.string() # redacted value is a string 65 | 66 | class RemoveColumns(Action): 67 | def __call__(self, records: pa.RecordBatch) -> pa.RecordBatch: 68 | """Overrides __call__ to verify no removed columns exist.""" 69 | columns = [column for column in self.columns if column in records.schema.names] 70 | if columns: 71 | raise RuntimeError("Access to {} is forbidden".format(columns)) 72 | return records # no transformation needed 73 | 74 | def schema(self, original): 75 | """Removes configured columns from the schema.""" 76 | schema: pa.Schema = original 77 | columns = [column for column in self.columns if column in schema.names] 78 | for column in columns: 79 | schema = schema.remove(schema.get_field_index(column)) 80 | return schema 81 | 82 | class FilterColumns(Action): 83 | def __init__(self, description, columns, options): 84 | super().__init__(description, columns, options) 85 | self._schema = None 86 | 87 | def __call__(self, records: pa.RecordBatch) -> pa.RecordBatch: 88 | columns = [column for column in self.columns if column in records.schema.names] 89 | indices = [records.schema.get_field_index(c) for c in columns] 90 | column_array = records.columns 91 | if not self._schema: 92 | self.schema(records.schema) 93 | return pa.RecordBatch.from_arrays( 94 | [column_array[i] for i in indices], 95 | schema=self._schema) 96 | 97 | def schema(self, original): 98 | if self._schema: 99 | return self.schema 100 | columns = [column for column in self.columns if column in original.names] 101 | self._schema = pa.schema([pa.field(c, original.field(c).type) for c in columns]) 102 | return self._schema 103 | 104 | 105 | class HashRedact(Action): 106 | def __init__(self, description, columns, options): 107 | super().__init__(description, columns, options) 108 | if options == None: 109 | self.hash_algo = "md5" 110 | else: 111 | self.hash_algo = options.get("algo", "md5") 112 | 113 | def __call__(self, records: pa.RecordBatch) -> pa.RecordBatch: 114 | """Transformation logic for HashRedact action. 115 | 116 | Args: 117 | records (pa.RecordBatch): record batch to transform 118 | 119 | Returns: 120 | pa.RecordBatch: transformed record batch 121 | """ 122 | columns = [column for column in self.columns if column in records.schema.names] 123 | indices = [records.schema.get_field_index(c) for c in columns] 124 | new_columns = records.columns 125 | algo = self.hash_algo.lower() 126 | hashFunc = hashlib.md5 127 | if algo == "md5": 128 | hashFunc = hashlib.md5 129 | elif algo == "sha256": 130 | hashFunc = hashlib.sha256 131 | elif algo == "sha512": 132 | hashFunc = hashlib.sha512 133 | else: 134 | raise ValueError(f"Algorithm {algo} is not supported!") 135 | for i in indices: 136 | new_columns[i] = pa.array([hashFunc(v.as_py().encode()).hexdigest() for v in records.column(i)]) 137 | 138 | new_schema = self.schema(records.schema) 139 | return pa.RecordBatch.from_arrays(new_columns, schema=new_schema) 140 | 141 | def field_type(self): 142 | """Overrides field_type to calculate transformed schema correctly.""" 143 | return pa.string() # redacted value is a string 144 | 145 | -------------------------------------------------------------------------------- /hack/test-script/PS_20174392719_1491204439457_log.csv: -------------------------------------------------------------------------------- 1 | step,type,amount,nameOrig,oldbalanceOrg,newbalanceOrig,nameDest,oldbalanceDest,newbalanceDest,isFraud,isFlaggedFraud 2 | 1,PAYMENT,9839.64,C1231006815,170136,160296.36,M1979787155,0,0,0,0 3 | 1,PAYMENT,1864.28,C1666544295,21249,19384.72,M2044282225,0,0,0,0 4 | 1,TRANSFER,181,C1305486145,181,0,C553264065,0,0,1,0 5 | 1,CASH_OUT,181,C840083671,181,0,C38997010,21182,0,1,0 6 | 1,PAYMENT,11668.14,C2048537720,41554,29885.86,M1230701703,0,0,0,0 7 | 1,PAYMENT,7817.71,C90045638,53860,46042.29,M573487274,0,0,0,0 8 | 1,PAYMENT,7107.77,C154988899,183195,176087.23,M408069119,0,0,0,0 9 | 1,PAYMENT,7861.64,C1912850431,176087.23,168225.59,M633326333,0,0,0,0 10 | 1,PAYMENT,4024.36,C1265012928,2671,0,M1176932104,0,0,0,0 11 | 1,DEBIT,5337.77,C712410124,41720,36382.23,C195600860,41898,40348.79,0,0 12 | 1,DEBIT,9644.94,C1900366749,4465,0,C997608398,10845,157982.12,0,0 13 | 1,PAYMENT,3099.97,C249177573,20771,17671.03,M2096539129,0,0,0,0 14 | 1,PAYMENT,2560.74,C1648232591,5070,2509.26,M972865270,0,0,0,0 15 | 1,PAYMENT,11633.76,C1716932897,10127,0,M801569151,0,0,0,0 16 | 1,PAYMENT,4098.78,C1026483832,503264,499165.22,M1635378213,0,0,0,0 17 | 1,CASH_OUT,229133.94,C905080434,15325,0,C476402209,5083,51513.44,0,0 18 | 1,PAYMENT,1563.82,C761750706,450,0,M1731217984,0,0,0,0 19 | 1,PAYMENT,1157.86,C1237762639,21156,19998.14,M1877062907,0,0,0,0 20 | 1,PAYMENT,671.64,C2033524545,15123,14451.36,M473053293,0,0,0,0 21 | 1,TRANSFER,215310.3,C1670993182,705,0,C1100439041,22425,0,0,0 22 | 1,PAYMENT,1373.43,C20804602,13854,12480.57,M1344519051,0,0,0,0 23 | 1,DEBIT,9302.79,C1566511282,11299,1996.21,C1973538135,29832,16896.7,0,0 24 | 1,DEBIT,1065.41,C1959239586,1817,751.59,C515132998,10330,0,0,0 25 | 1,PAYMENT,3876.41,C504336483,67852,63975.59,M1404932042,0,0,0,0 26 | 1,TRANSFER,311685.89,C1984094095,10835,0,C932583850,6267,2719172.89,0,0 27 | 1,PAYMENT,6061.13,C1043358826,443,0,M1558079303,0,0,0,0 28 | 1,PAYMENT,9478.39,C1671590089,116494,107015.61,M58488213,0,0,0,0 29 | 1,PAYMENT,8009.09,C1053967012,10968,2958.91,M295304806,0,0,0,0 30 | 1,PAYMENT,8901.99,C1632497828,2958.91,0,M33419717,0,0,0,0 31 | 1,PAYMENT,9920.52,C764826684,0,0,M1940055334,0,0,0,0 32 | 1,PAYMENT,3448.92,C2103763750,0,0,M335107734,0,0,0,0 33 | 1,PAYMENT,4206.84,C215078753,0,0,M1757317128,0,0,0,0 34 | 1,PAYMENT,5885.56,C840514538,0,0,M1804441305,0,0,0,0 35 | 1,PAYMENT,5307.88,C1768242710,0,0,M1971783162,0,0,0,0 36 | 1,PAYMENT,5031.22,C247113419,0,0,M151442075,0,0,0,0 37 | 1,PAYMENT,24213.67,C1238616099,0,0,M70695990,0,0,0,0 38 | 1,PAYMENT,8603.42,C1608633989,253,0,M1615617512,0,0,0,0 39 | 1,PAYMENT,2791.42,C923341586,300481,297689.58,M107994825,0,0,0,0 40 | 1,PAYMENT,7413.54,C1470868839,297689.58,290276.03,M1426725223,0,0,0,0 41 | 1,PAYMENT,3295.19,C711197015,233633,230337.81,M1384454980,0,0,0,0 42 | 1,PAYMENT,1684.81,C1481594086,297,0,M1569435561,0,0,0,0 43 | 1,DEBIT,5758.59,C1466917878,32604,26845.41,C1297685781,209699,16997.22,0,0 44 | 1,CASH_OUT,110414.71,C768216420,26845.41,0,C1509514333,288800,2415.16,0,0 45 | 1,PAYMENT,7823.46,C260084831,998,0,M267814113,0,0,0,0 46 | 1,PAYMENT,5086.48,C598357562,0,0,M1593224710,0,0,0,0 47 | 1,PAYMENT,5281.48,C1440738283,152019,146737.52,M1849015357,0,0,0,0 48 | 1,PAYMENT,13875.98,C484199463,15818,1942.02,M2008106788,0,0,0,0 49 | 1,CASH_OUT,56953.9,C1570470538,1942.02,0,C824009085,70253,64106.18,0,0 50 | 1,CASH_OUT,5346.89,C512549200,0,0,C248609774,652637,6453430.91,0,0 51 | 1,PAYMENT,2204.04,C1615801298,586,0,M490391704,0,0,0,0 52 | 1,PAYMENT,2641.47,C460570271,23053,20411.53,M1653361344,0,0,0,0 53 | 1,CASH_OUT,23261.3,C2072313080,20411.53,0,C2001112025,25742,0,0,0 54 | 1,PAYMENT,2330.64,C816944408,203543,201212.36,M909132503,0,0,0,0 55 | 1,PAYMENT,1614.64,C912966811,41276,39661.36,M1792384402,0,0,0,0 56 | 1,PAYMENT,9164.71,C1458621573,47235.77,38071.06,M1658980982,0,0,0,0 57 | 1,PAYMENT,2970.97,C46941357,38071.06,35100.09,M1152606315,0,0,0,0 58 | 1,PAYMENT,38.66,C343345308,16174,16135.34,M1714688478,0,0,0,0 59 | 1,PAYMENT,2252.44,C104716441,1627,0,M1506951181,0,0,0,0 60 | 1,TRANSFER,62610.8,C1976401987,79114,16503.2,C1937962514,517,8383.29,0,0 61 | 1,DEBIT,5529.13,C867288517,8547,3017.87,C242131142,10206,0,0,0 62 | 1,CASH_OUT,82940.31,C1528834618,3017.87,0,C476800120,132372,49864.36,0,0 63 | 1,DEBIT,4510.22,C280615803,10256,5745.78,C1254526270,10697,0,0,0 64 | 1,DEBIT,8727.74,C166694583,882770,874042.26,C1129670968,12636,0,0,0 65 | 1,PAYMENT,2599.46,C885910946,874042.26,871442.79,M1860591867,0,0,0,0 66 | 1,DEBIT,4874.49,C811207775,153,0,C1971489295,253104,0,0,0 67 | 1,PAYMENT,6440.78,C1161148117,2192,0,M516875052,0,0,0,0 68 | 1,PAYMENT,4910.14,C1131592118,41551,36640.86,M589987187,0,0,0,0 69 | 1,PAYMENT,6444.64,C1262609629,12019,5574.36,M587180314,0,0,0,0 70 | 1,DEBIT,5149.66,C1955990522,4782,0,C1330106945,52752,24044.18,0,0 71 | 1,PAYMENT,7292.16,C69673470,216827,209534.84,M1082411691,0,0,0,0 72 | 1,CASH_OUT,47458.86,C527211736,209534.84,162075.98,C2096057945,52120,0,0,0 73 | 1,CASH_OUT,136872.92,C1533123860,162075.98,25203.05,C766572210,217806,0,0,0 74 | 1,CASH_OUT,94253.33,C1718906711,25203.05,0,C977993101,99773,965870.05,0,0 75 | 1,PAYMENT,2998.04,C71802912,12030,9031.96,M2134271532,0,0,0,0 76 | 1,PAYMENT,3454.08,C686349795,9031.96,5577.88,M1831010686,0,0,0,0 77 | 1,PAYMENT,4316.2,C1423768154,10999,6682.8,M404222443,0,0,0,0 78 | 1,PAYMENT,2131.84,C1987977423,224,0,M61073295,0,0,0,0 79 | 1,PAYMENT,12986.61,C807322507,23350,10363.39,M396485834,0,0,0,0 80 | 1,TRANSFER,42712.39,C283039401,10363.39,0,C1330106945,57901.66,24044.18,0,0 81 | 1,TRANSFER,77957.68,C207471778,0,0,C1761291320,94900,22233.65,0,0 82 | 1,TRANSFER,17231.46,C1243171897,0,0,C783286238,24672,0,0,0 83 | 1,TRANSFER,78766.03,C1376151044,0,0,C1749186397,103772,277515.05,0,0 84 | 1,TRANSFER,224606.64,C873175411,0,0,C766572210,354678.92,0,0,0 85 | 1,TRANSFER,125872.53,C1443967876,0,0,C392292416,348512,3420103.09,0,0 86 | 1,TRANSFER,379856.23,C1449772539,0,0,C1590550415,900180,19169204.93,0,0 87 | 1,TRANSFER,1505626.01,C926859124,0,0,C665576141,29031,5515763.34,0,0 88 | 1,TRANSFER,554026.99,C1603696865,0,0,C766572210,579285.56,0,0,0 89 | 1,TRANSFER,147543.1,C12905860,0,0,C1359044626,223220,16518.36,0,0 90 | 1,TRANSFER,761507.39,C412788346,0,0,C1590550415,1280036.23,19169204.93,0,0 91 | 1,TRANSFER,1429051.47,C1520267010,0,0,C1590550415,2041543.62,19169204.93,0,0 92 | 1,TRANSFER,358831.92,C908084672,0,0,C392292416,474384.53,3420103.09,0,0 93 | 1,TRANSFER,367768.4,C288306765,0,0,C1359044626,370763.1,16518.36,0,0 94 | 1,TRANSFER,209711.11,C1556867940,0,0,C1509514333,399214.71,2415.16,0,0 95 | 1,TRANSFER,583848.46,C1839168128,0,0,C1286084959,667778,2107778.11,0,0 96 | 1,TRANSFER,1724887.05,C1495608502,0,0,C1590550415,3470595.1,19169204.93,0,0 97 | 1,TRANSFER,710544.77,C835773569,0,0,C1359044626,738531.5,16518.36,0,0 98 | 1,TRANSFER,581294.26,C843299092,0,0,C1590550415,5195482.15,19169204.93,0,0 99 | 1,TRANSFER,11996.58,C605982374,0,0,C1225616405,40255,0,0,0 100 | 1,PAYMENT,2875.1,C1412322831,15443,12567.9,M1651262695,0,0,0,0 101 | 1,PAYMENT,8586.98,C1305004711,3763,0,M494077446,0,0,0,0 102 | 103 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![GitHub Actions Build](https://github.com/fybrik/arrow-flight-module/actions/workflows/build.yml/badge.svg)](https://github.com/fybrik/arrow-flight-module/actions/workflows/build.yml) 2 | [![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://opensource.org/licenses/Apache-2.0) 3 | 4 | # arrow-flight-module 5 | 6 | The arrow-flight-module (AFM) for [Fybrik](https://github.com/fybrik/fybrik) brings enforcement of data governance policies to the world of [Apache Arrow](https://arrow.apache.org/) 7 | [Flight](https://arrow.apache.org/docs/format/Flight.html). 8 | 9 | ## What is it? 10 | 11 | AFM is a Flight server that enables applications to consume tabular data from data sources. More importantly, the AFM is also a Policy Enforcement Point (PEP) capable of performing enforcement actions dictated by a Policy Decision Point. 12 | Such enforcement actions include blocking unauthorized requests, validating and transforming data payloads. 13 | 14 | ## Register as a Fybrik module 15 | 16 | To register AFM as a Fybrik module apply `module.yaml` to the fybrik-system namespace of your cluster. 17 | 18 | To install the latest release run: 19 | 20 | ```bash 21 | kubectl apply -f https://github.com/fybrik/arrow-flight-module/releases/latest/download/module.yaml -n fybrik-system 22 | ``` 23 | 24 | ### Version compatibility matrix 25 | 26 | | Fybrik | AFM | Command 27 | | --- | --- | --- 28 | | 0.1.x | 0.1.x | `https://github.com/fybrik/arrow-flight-module/releases/download/v0.1.0/module.yaml` 29 | | 0.2.x | 0.2.x | `https://github.com/fybrik/arrow-flight-module/releases/download/v0.2.0/module.yaml` 30 | | 0.3.x | 0.3.x | `https://github.com/fybrik/arrow-flight-module/releases/download/v0.3.0/module.yaml` 31 | | 0.4.x | 0.4.x | `https://github.com/fybrik/arrow-flight-module/releases/download/v0.4.0/module.yaml` 32 | | 0.5.x | 0.5.x | `https://github.com/fybrik/arrow-flight-module/releases/download/v0.5.0/module.yaml` 33 | | 0.6.x | 0.6.x | `https://github.com/fybrik/arrow-flight-module/releases/download/v0.6.0/module.yaml` 34 | | 0.7.x | 0.7.x | `https://github.com/fybrik/arrow-flight-module/releases/download/v0.7.0/module.yaml` 35 | | 1.0.x | 0.8.x | `https://github.com/fybrik/arrow-flight-module/releases/download/v0.8.0/module.yaml` 36 | | 1.1.x | 0.8.x | `https://github.com/fybrik/arrow-flight-module/releases/download/v0.8.0/module.yaml` 37 | | 1.2.x | 0.9.x | `https://github.com/fybrik/arrow-flight-module/releases/download/v0.9.0/module.yaml` 38 | | 1.2.x | 0.10.x | `https://github.com/fybrik/arrow-flight-module/releases/download/v0.10.0/module.yaml` 39 | | master | master | `https://raw.githubusercontent.com/fybrik/arrow-flight-module/master/module.yaml` 40 | 41 | 42 | ## Build and deploy to Kubernetes 43 | 44 | These instructions are for building an image of AFM and 45 | deploying it to Kubernetes. Deployment will install a Helm release named `afm` to the cluster. The release runs the flight server with **empty configuration**. 46 | 47 | 48 | ### Requirements 49 | 50 | - make 51 | - Docker 52 | - kubectl with access to a kubernetes cluster (e.g., a [kind](https://kind.sigs.k8s.io/) cluster) 53 | - [Helm 3.7](https://helm.sh/docs/intro/install/) or above 54 | 55 | ### Deploy to kind clusters 56 | 57 | ```bash 58 | make build push-to-kind deploy 59 | ``` 60 | 61 | ### Deploy to other clusters 62 | 63 | For other clusters you will need to use an image registry. 64 | For example if your image registry is `us.icr.io/username` then use: 65 | ```bash 66 | REPOSITORY=us.icr.io/username/arrow-flight-module make build push deploy 67 | ``` 68 | 69 | ### Configuration 70 | 71 | You can provide a configuration file by upgrading the deployed Helm chart. 72 | For example, to use `sample/sample.yaml` as configuration run: 73 | 74 | ```bash 75 | helm upgrade --reuse-values --set-file config_override=sample/sample.yaml afm ./helm/afm 76 | ``` 77 | 78 | It is also possible to specify specific configuration values following the modules specification of Fybrik. Generally, you would want Fybrik to do that for you and not deal with it directly. 79 | 80 | ## Usage 81 | 82 | Once the server is deployed you can connect to it using any Flight client SDK. 83 | For example, in python: 84 | 85 | ```python 86 | import pyarrow.flight as fl 87 | import pandas as pd 88 | 89 | if __name__ == '__main__': 90 | client = fl.connect("grpc://afm-arrow-flight-module.default.svc.cluster.local:80") # change to the address that the server is deployed to 91 | info: fl.FlightInfo = client.get_flight_info( 92 | fl.FlightDescriptor.for_command(r'{"asset": "sample.parquet"}')) # change to an asset configured in the server config file 93 | result: fl.FlightStreamReader = client.do_get(info.endpoints[0].ticket) 94 | 95 | df: pd.DataFrame = result.read_pandas() 96 | print(df) 97 | ``` 98 | 99 | ## Development 100 | 101 | This project requires Python 3.8 and pipenv. 102 | 103 | Run the server locally with `pipenv install` and then `pipenv run server`. 104 | 105 | See `/sample` for an example to run locally. 106 | 107 | ## Status 108 | 109 | This project is in a **very early** stage and contains just the following features: 110 | 111 | - [X] Formats 112 | - [X] Parquet 113 | - [X] Filesystems 114 | - [X] S3 115 | - [X] Queries 116 | - [X] Asset name 117 | - [X] Column Selection 118 | - [X] Enforcement Actions 119 | - [X] Redact 120 | - [X] RemoveColumn 121 | - [X] Configuration (assets, actions) 122 | - [X] Demo 123 | - [X] Flight client 124 | 125 | 126 | The project focus is to transform data based on policies. 127 | We are planning to create a specialized architecture for a generic PEP 128 | for Apache Arrow Flight and demonstrating it with an existing flight server 129 | such as [Ballista](https://github.com/ballista-compute/ballista). 130 | 131 | A full list of deisred features (not necessarily fulfilled by AFM alone): 132 | 133 | - [ ] Formats 134 | - [X] Parquet 135 | - [ ] Parquet Modular Encryption (PME) 136 | - [ ] CSV 137 | - [ ] JSON 138 | - [ ] ORC 139 | - [ ] Arrow Flight 140 | - [ ] Filesystems 141 | - [X] S3 142 | - [ ] Local 143 | - [ ] Queries 144 | - [X] Asset name 145 | - [X] Column selection 146 | - [ ] Nested column selection 147 | - [ ] Filter 148 | - [ ] Full SQL 149 | - [ ] Enforcement Actions 150 | - [ ] Redact 151 | - [ ] RemoveColumn 152 | - [ ] Masking (format preserving) 153 | - [ ] Blackout period 154 | - [ ] Plugable enforcement actions 155 | - [ ] Simplified dynamic configuration (personal asset catalog, action policies) 156 | - [ ] Distributed query planning 157 | - [ ] Serverless query execution 158 | - [ ] Support writes 159 | - [ ] Integrations 160 | - [ ] [dataset-lifecycle-framework](https://github.com/IBM/dataset-lifecycle-framework) 161 | - [ ] Demo 162 | - [ ] Flight client 163 | - [ ] Apache Spark client 164 | -------------------------------------------------------------------------------- /hack/test_module.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -x 4 | set -e 5 | 6 | 7 | export WORKING_DIR=test-script 8 | export ACCESS_KEY=1234 9 | export SECRET_KEY=1234 10 | export TOOLBIN=tools/bin 11 | 12 | kubernetesVersion=$1 13 | fybrikVersion=$2 14 | moduleVersion=$3 15 | certManagerVersion=$4 16 | 17 | if [ $moduleVersion != 'master' ] 18 | then 19 | git checkout tags/v$moduleVersion 20 | fi 21 | 22 | if [ $kubernetesVersion == "kind23" ] 23 | then 24 | ${TOOLBIN}/kind delete cluster 25 | ${TOOLBIN}/kind create cluster --image=kindest/node:v1.23.13@sha256:ef453bb7c79f0e3caba88d2067d4196f427794086a7d0df8df4f019d5e336b61 26 | elif [ $kubernetesVersion == "kind24" ] 27 | then 28 | ${TOOLBIN}/kind delete cluster 29 | ${TOOLBIN}/kind create cluster --image=kindest/node:v1.24.7@sha256:577c630ce8e509131eab1aea12c022190978dd2f745aac5eb1fe65c0807eb315 30 | elif [ $kubernetesVersion == "kind25" ] 31 | then 32 | ${TOOLBIN}/kind delete cluster 33 | ${TOOLBIN}/kind create cluster --image=kindest/node:v1.25.3@sha256:f52781bc0d7a19fb6c405c2af83abfeb311f130707a0e219175677e366cc45d1 34 | else 35 | echo "Unsupported kind version" 36 | exit 1 37 | fi 38 | 39 | 40 | #quick start 41 | 42 | ${TOOLBIN}/helm repo add jetstack https://charts.jetstack.io 43 | ${TOOLBIN}/helm repo add hashicorp https://helm.releases.hashicorp.com 44 | ${TOOLBIN}/helm repo add fybrik-charts https://fybrik.github.io/charts 45 | ${TOOLBIN}/helm repo update 46 | 47 | 48 | ${TOOLBIN}/helm install cert-manager jetstack/cert-manager \ 49 | --namespace cert-manager \ 50 | --version v$certManagerVersion \ 51 | --create-namespace \ 52 | --set installCRDs=true \ 53 | --wait --timeout 400s 54 | 55 | if [ $fybrikVersion == "master" ] 56 | then 57 | rm -rf fybrik 58 | git clone https://github.com/fybrik/fybrik.git 59 | cd fybrik 60 | ../${TOOLBIN}/helm dependency update charts/vault 61 | ../${TOOLBIN}/helm install vault charts/vault --create-namespace -n fybrik-system \ 62 | --set "vault.injector.enabled=false" \ 63 | --set "vault.server.dev.enabled=true" \ 64 | --values charts/vault/env/dev/vault-single-cluster-values.yaml 65 | ../${TOOLBIN}/kubectl wait --for=condition=ready --all pod -n fybrik-system --timeout=120s 66 | ../${TOOLBIN}/helm install fybrik-crd charts/fybrik-crd -n fybrik-system --wait 67 | ../${TOOLBIN}/helm install fybrik charts/fybrik --set "coordinator.catalog=katalog" --set global.tag=master -n fybrik-system --wait 68 | cd - 69 | rm -rf fybrik 70 | else 71 | ${TOOLBIN}/helm install vault fybrik-charts/vault --create-namespace -n fybrik-system \ 72 | --set "vault.injector.enabled=false" \ 73 | --set "vault.server.dev.enabled=true" \ 74 | --values https://raw.githubusercontent.com/fybrik/fybrik/v$fybrikVersion/charts/vault/env/dev/vault-single-cluster-values.yaml 75 | ${TOOLBIN}/kubectl wait --for=condition=ready --all pod -n fybrik-system --timeout=400s 76 | 77 | ${TOOLBIN}/helm install fybrik-crd fybrik-charts/fybrik-crd -n fybrik-system --version v$fybrikVersion --wait 78 | ${TOOLBIN}/helm install fybrik fybrik-charts/fybrik --set "coordinator.catalog=katalog" -n fybrik-system --version v$fybrikVersion --wait 79 | fi 80 | 81 | # apply modules 82 | 83 | # Related to https://github.com/cert-manager/cert-manager/issues/2908 84 | # Fybrik webhook not really ready after "helm install --wait" 85 | # A workaround is to loop until the module is applied as expected 86 | if [ $moduleVersion == "master" ] 87 | then 88 | CMD="${TOOLBIN}/kubectl apply -f ../module.yaml -n fybrik-system" 89 | else 90 | CMD="${TOOLBIN}/kubectl apply -f https://github.com/fybrik/arrow-flight-module/releases/download/v$moduleVersion/module.yaml -n fybrik-system" 91 | fi 92 | 93 | count=0 94 | until $CMD 95 | do 96 | if [[ $count -eq 10 ]] 97 | then 98 | break 99 | fi 100 | sleep 1 101 | ((count=count+1)) 102 | done 103 | 104 | # Notebook sample 105 | 106 | ${TOOLBIN}/kubectl create namespace fybrik-notebook-sample 107 | ${TOOLBIN}/kubectl config set-context --current --namespace=fybrik-notebook-sample 108 | 109 | #localstack 110 | ${TOOLBIN}/helm repo add localstack-charts https://localstack.github.io/helm-charts 111 | ${TOOLBIN}/helm install localstack localstack-charts/localstack --set startServices="s3" --set service.type=ClusterIP 112 | ${TOOLBIN}/kubectl wait --for=condition=ready --all pod -n fybrik-notebook-sample --timeout=600s 113 | 114 | ${TOOLBIN}/kubectl port-forward svc/localstack 4566:4566 & 115 | 116 | 117 | export ENDPOINT="http://127.0.0.1:4566" 118 | export BUCKET="demo" 119 | export OBJECT_KEY="PS_20174392719_1491204439457_log.csv" 120 | export FILEPATH="$WORKING_DIR/PS_20174392719_1491204439457_log.csv" 121 | export REGION=us-east-1 122 | aws configure set aws_access_key_id ${ACCESS_KEY} && aws configure set aws_secret_access_key ${SECRET_KEY} 123 | aws configure set region ${REGION} 124 | aws --endpoint-url=${ENDPOINT} s3api create-bucket --bucket ${BUCKET} 125 | aws --endpoint-url=${ENDPOINT} s3api put-object --bucket ${BUCKET} --key ${OBJECT_KEY} --body ${FILEPATH} 126 | 127 | cat << EOF | ${TOOLBIN}/kubectl apply -f - 128 | apiVersion: v1 129 | kind: Secret 130 | metadata: 131 | name: paysim-csv 132 | type: Opaque 133 | stringData: 134 | access_key: "${ACCESS_KEY}" 135 | secret_key: "${SECRET_KEY}" 136 | EOF 137 | 138 | 139 | ${TOOLBIN}/kubectl apply -f $WORKING_DIR/Asset.yaml -n fybrik-notebook-sample 140 | 141 | ${TOOLBIN}/kubectl describe Asset paysim-csv -n fybrik-notebook-sample 142 | 143 | 144 | ${TOOLBIN}/kubectl -n fybrik-system create configmap sample-policy --from-file=$WORKING_DIR/sample-policy.rego 145 | ${TOOLBIN}/kubectl -n fybrik-system label configmap sample-policy openpolicyagent.org/policy=rego 146 | 147 | c=0 148 | while [[ $(${TOOLBIN}/kubectl get cm sample-policy -n fybrik-system -o 'jsonpath={.metadata.annotations.openpolicyagent\.org/policy-status}') != '{"status":"ok"}' ]] 149 | do 150 | echo "waiting" 151 | ((c++)) && ((c==25)) && break 152 | sleep 1 153 | done 154 | 155 | 156 | ${TOOLBIN}/kubectl apply -f $WORKING_DIR/fybrikapplication.yaml 157 | 158 | c=0 159 | while [[ $(${TOOLBIN}/kubectl get fybrikapplication my-notebook -o 'jsonpath={.status.ready}') != "true" ]] 160 | do 161 | echo "waiting" 162 | ((c++)) && ((c==30)) && break 163 | sleep 1 164 | done 165 | 166 | 167 | ${TOOLBIN}/kubectl get pods -n fybrik-blueprints 168 | POD_NAME=$(${TOOLBIN}/kubectl get pods -n fybrik-blueprints -o=name | sed "s/^.\{4\}//") 169 | 170 | ${TOOLBIN}/kubectl get cm -n fybrik-blueprints -o yaml 171 | export CATALOGED_ASSET=fybrik-notebook-sample/paysim-csv 172 | export ENDPOINT_HOSTNAME=$(kubectl get fybrikapplication my-notebook -n fybrik-notebook-sample -o "jsonpath={.status.assetStates.${CATALOGED_ASSET}.endpoint.fybrik-arrow-flight.hostname}") 173 | ${TOOLBIN}/kubectl cp $WORKING_DIR/test.py ${POD_NAME}:/tmp -n fybrik-blueprints 174 | ${TOOLBIN}/kubectl exec -i ${POD_NAME} -n fybrik-blueprints -- python /tmp/test.py -e ${ENDPOINT_HOSTNAME} -p 80 > res.out 175 | 176 | 177 | ${TOOLBIN}/kubectl logs ${POD_NAME} -n fybrik-blueprints 178 | 179 | DIFF=$(diff -b $WORKING_DIR/expected.txt res.out) 180 | RES=0 181 | if [ "${DIFF}" == "" ] 182 | then 183 | echo "test succeeded" 184 | else 185 | RES=1 186 | fi 187 | 188 | pkill kubectl 189 | ${TOOLBIN}/kubectl delete namespace fybrik-notebook-sample 190 | ${TOOLBIN}/kubectl -n fybrik-system delete configmap sample-policy 191 | 192 | if [ ${RES} == 1 ] 193 | then 194 | echo "test failed" 195 | exit 1 196 | fi 197 | -------------------------------------------------------------------------------- /afm/server.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2020 IBM Corp. 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | 6 | import json 7 | from fybrik_python_logging import logger, init_logger, DataSetID, ForUser 8 | import os 9 | 10 | import datetime 11 | import pyarrow as pa 12 | import pyarrow.flight as fl 13 | import pyarrow.parquet as pq 14 | import pyarrow.csv as csv 15 | import pyarrow.dataset as ds 16 | from pyarrow.fs import FileSelector 17 | 18 | from .asset import asset_from_config 19 | from .command import AFMCommand 20 | from .config import Config 21 | from .pep import transform, transform_schema, transform_batches, actions 22 | from .ticket import AFMTicket 23 | from .worker import workers_from_config 24 | from .auth import AFMAuthHandler 25 | from .environment.environment import print_env_vars 26 | 27 | class AFMFlightServer(fl.FlightServerBase): 28 | def __init__(self, config_path: str, port: int, loglevel: str, *args, **kwargs): 29 | with Config(config_path) as config: 30 | super(AFMFlightServer, self).__init__( 31 | "grpc://0.0.0.0:{}".format(port), 32 | auth_handler=AFMAuthHandler(config.auth), 33 | *args, **kwargs) 34 | init_logger(loglevel, config.app_uuid, 'arrow-flight-server') 35 | self.config_path = config_path 36 | print_env_vars() 37 | 38 | def _get_dataset(self, asset): 39 | # FIXME(roee88): bypass https://issues.apache.org/jira/browse/ARROW-7867 40 | selector = FileSelector(asset.path, allow_not_found=True, recursive=True) 41 | try: 42 | data_files = [f.path for f in asset.filesystem.get_file_info(selector) if f.size] 43 | except NotADirectoryError: 44 | data_files = None 45 | if not data_files: 46 | data_files = [asset.path] # asset.path is probably a single file 47 | 48 | if asset.format == "csv" or asset.format == "parquet": 49 | return ds.dataset(data_files, format=asset.format, filesystem=asset.filesystem), data_files 50 | 51 | raise ValueError("unsupported format {}".format(asset.format)) 52 | 53 | def _infer_schema(self, asset): 54 | dataset, data_files = self._get_dataset(asset) 55 | return dataset.schema, data_files 56 | 57 | def _filter_columns(self, schema, columns): 58 | fields = [schema.field(c) for c in columns] 59 | return pa.schema([pa.field(f.name, f.type, f.nullable, f.metadata) for f in fields]) 60 | 61 | # write arrow dataset to filesystem 62 | def _write_asset(self, asset, reader, write_mode): 63 | # in this implementation we currently begin by reading the entire dataset 64 | record_batches = reader.read_all().combine_chunks().to_batches() 65 | transformed_batches = transform_batches(asset.actions, record_batches) 66 | # If the client's request is to append to the existing data then the flag `overwrite_or_ignore` 67 | # is used with a unique basename_template that is related to the time of writing. 68 | # This operation writes the data to a new file while ignoring (not changing) the existed files. 69 | # Otherwise, the flag `delete_mathing` is used. This operation writes the data to a new file 70 | # while deleting the existed files. 71 | logger.trace("write_mode: " + write_mode, extra={DataSetID: asset.name, ForUser: True}) 72 | if write_mode == "append": 73 | existing_data_behavior='overwrite_or_ignore' 74 | else: 75 | existing_data_behavior='delete_matching' 76 | ds.write_dataset(transformed_batches, base_dir=asset.path, basename_template="part-{:%Y-%m-%d-%H-%M-%S-%f}-{{i}}.{}".format(datetime.datetime.now(), asset.format), format=asset.format, filesystem=asset.filesystem, existing_data_behavior=existing_data_behavior) 77 | 78 | def _read_asset(self, asset, columns=None): 79 | dataset, data_files = self._get_dataset(asset) 80 | columns = [c for c in columns if dataset.schema.get_field_index(c) != -1] 81 | scanner = ds.Scanner.from_dataset(dataset, columns=columns, batch_size=64*2**20) 82 | batches = scanner.to_batches() 83 | if columns: 84 | return self._filter_columns(dataset.schema, columns), batches 85 | return dataset.schema, batches 86 | 87 | def _get_endpoints(self, tickets, locations): 88 | endpoints = [] 89 | i = 0 90 | for ticket in tickets: 91 | if locations: 92 | endpoints.append(fl.FlightEndpoint(ticket.toJSON(), [locations[i]])) 93 | i = (i + 1) % len(locations) 94 | else: 95 | endpoints.append(fl.FlightEndpoint(ticket.toJSON(), [])) 96 | return endpoints 97 | 98 | 99 | def _get_locations(self, workers): 100 | locations = [] 101 | if workers: 102 | for worker in workers: 103 | locations.append("grpc://{}:{}".format(worker.address, worker.port)) 104 | else: 105 | local_address = os.getenv("MY_POD_IP") 106 | if local_address: 107 | locations += "grpc://{}:{}".format(local_address, self.port) 108 | 109 | return locations 110 | 111 | def get_flight_info(self, context, descriptor): 112 | cmd = AFMCommand(descriptor.command) 113 | logger.info('getting flight information', 114 | extra={'command': descriptor.command, 115 | DataSetID: cmd.asset_name, 116 | ForUser: True}) 117 | 118 | with Config(self.config_path) as config: 119 | asset = asset_from_config(config, cmd.asset_name, capability="read") 120 | workers = workers_from_config(config.workers) 121 | 122 | if asset.connection_type == 'flight': 123 | passthrough_flight_info = asset.flight.get_flight_info() 124 | schema = passthrough_flight_info.schema 125 | else: 126 | # Infer schema 127 | schema, data_files = self._infer_schema(asset) 128 | 129 | if cmd.columns: 130 | schema = self._filter_columns(schema, cmd.columns) 131 | schema = transform_schema(asset.actions, schema) 132 | 133 | # Build endpoint to this server 134 | locations = self._get_locations(workers) 135 | 136 | tickets = [] 137 | if asset.connection_type == 'flight': 138 | for endpoint in passthrough_flight_info.endpoints: 139 | tickets.append(AFMTicket(cmd.asset_name, schema.names, endpoint.ticket.ticket.decode())) 140 | else: 141 | # Build endpoint to this server 142 | for f in data_files: 143 | tickets.append(AFMTicket(cmd.asset_name, schema.names, partition_path=f)) 144 | 145 | endpoints = self._get_endpoints(tickets, locations) 146 | return fl.FlightInfo(schema, descriptor, endpoints, -1, -1) 147 | 148 | def do_get(self, context, ticket: fl.Ticket): 149 | ticket_info: AFMTicket = AFMTicket.fromJSON(ticket.ticket) 150 | if ticket_info.columns is None: 151 | raise ValueError("Columns must be specified in ticket") 152 | 153 | logger.info('retrieving dataset', 154 | extra={'ticket': ticket.ticket, 155 | DataSetID: ticket_info.asset_name, 156 | ForUser: True}) 157 | with Config(self.config_path) as config: 158 | asset = asset_from_config(config, ticket_info.asset_name, partition_path=ticket_info.partition_path, capability="read") 159 | 160 | if asset.connection_type == "flight": 161 | schema, batches = asset.flight.do_get(context, ticket) 162 | if ticket_info.columns: 163 | asset.add_action(actions.FilterColumns( 164 | columns=ticket_info.columns, 165 | description="filter columns", 166 | options=None)) 167 | else: 168 | schema, batches = self._read_asset(asset, ticket_info.columns) 169 | 170 | schema = transform_schema(asset.actions, schema) 171 | batches = transform(asset.actions, batches) 172 | return fl.GeneratorStream(schema, batches) 173 | 174 | def do_put(self, context, descriptor, reader, writer): 175 | asset_info = json.loads(descriptor.command) 176 | logger.info('writing dataset', 177 | extra={DataSetID: asset_info['asset'], 178 | ForUser: True}) 179 | # default write mode is overwrite 180 | write_mode = 'overwrite' 181 | if 'write_mode' in asset_info: 182 | write_mode = asset_info['write_mode'] 183 | if write_mode not in ['append', 'overwrite']: 184 | raise ValueError("Unsupported write mode type: {}".format(write_mode)) 185 | with Config(self.config_path) as config: 186 | asset = asset_from_config(config, asset_info['asset'], capability="write") 187 | self._write_asset(asset, reader, write_mode) 188 | 189 | def get_schema(self, context, descriptor): 190 | info = self.get_flight_info(context, descriptor) 191 | return fl.SchemaResult(info.schema) 192 | 193 | def list_flights(self, context, criteria): 194 | raise NotImplementedError("list_flights") 195 | 196 | def list_actions(self, context): 197 | raise NotImplementedError("list_actions") 198 | 199 | def do_action(self, context, action): 200 | raise NotImplementedError("do_action") 201 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /Pipfile.lock: -------------------------------------------------------------------------------- 1 | { 2 | "_meta": { 3 | "hash": { 4 | "sha256": "53ef70b0d5e384c88fc90e593b8b9601d26fe9da1413c4cb38a2dc0afe7be24b" 5 | }, 6 | "pipfile-spec": 6, 7 | "requires": { 8 | "python_version": "3.8" 9 | }, 10 | "sources": [ 11 | { 12 | "name": "pypi", 13 | "url": "https://pypi.org/simple", 14 | "verify_ssl": true 15 | } 16 | ] 17 | }, 18 | "default": { 19 | "aiohappyeyeballs": { 20 | "hashes": [ 21 | "sha256:4d6dea59215537dbc746e93e779caea8178c866856a721c9c660d7a5a7b8be03", 22 | "sha256:6fa48b9f1317254f122a07a131a86b71ca6946ca989ce6326fff54a99a920105" 23 | ], 24 | "markers": "python_version >= '3.8'", 25 | "version": "==2.3.5" 26 | }, 27 | "aiohttp": { 28 | "hashes": [ 29 | "sha256:01c98041f90927c2cbd72c22a164bb816fa3010a047d264969cf82e1d4bcf8d1", 30 | "sha256:0df930015db36b460aa9badbf35eccbc383f00d52d4b6f3de2ccb57d064a6ade", 31 | "sha256:1238fc979160bc03a92fff9ad021375ff1c8799c6aacb0d8ea1b357ea40932bb", 32 | "sha256:14dc3fcb0d877911d775d511eb617a486a8c48afca0a887276e63db04d3ee920", 33 | "sha256:14eb6b17f6246959fb0b035d4f4ae52caa870c4edfb6170aad14c0de5bfbf478", 34 | "sha256:18186a80ec5a701816adbf1d779926e1069392cf18504528d6e52e14b5920525", 35 | "sha256:19073d57d0feb1865d12361e2a1f5a49cb764bf81a4024a3b608ab521568093a", 36 | "sha256:1aa005f060aff7124cfadaa2493f00a4e28ed41b232add5869e129a2e395935a", 37 | "sha256:2c474af073e1a6763e1c5522bbb2d85ff8318197e4c6c919b8d7886e16213345", 38 | "sha256:30a9d59da1543a6f1478c3436fd49ec59be3868bca561a33778b4391005e499d", 39 | "sha256:341f8ece0276a828d95b70cd265d20e257f5132b46bf77d759d7f4e0443f2906", 40 | "sha256:352f3a4e5f11f3241a49b6a48bc5b935fabc35d1165fa0d87f3ca99c1fcca98b", 41 | "sha256:377220a5efde6f9497c5b74649b8c261d3cce8a84cb661be2ed8099a2196400a", 42 | "sha256:3988044d1635c7821dd44f0edfbe47e9875427464e59d548aece447f8c22800a", 43 | "sha256:465e445ec348d4e4bd349edd8b22db75f025da9d7b6dc1369c48e7935b85581e", 44 | "sha256:494a6f77560e02bd7d1ab579fdf8192390567fc96a603f21370f6e63690b7f3d", 45 | "sha256:49904f38667c44c041a0b44c474b3ae36948d16a0398a8f8cd84e2bb3c42a069", 46 | "sha256:4d1f694b5d6e459352e5e925a42e05bac66655bfde44d81c59992463d2897014", 47 | "sha256:4ddb43d06ce786221c0dfd3c91b4892c318eaa36b903f7c4278e7e2fa0dd5102", 48 | "sha256:518dc3cb37365255708283d1c1c54485bbacccd84f0a0fb87ed8917ba45eda5b", 49 | "sha256:53e8898adda402be03ff164b0878abe2d884e3ea03a4701e6ad55399d84b92dc", 50 | "sha256:54ba10eb5a3481c28282eb6afb5f709aedf53cf9c3a31875ffbdc9fc719ffd67", 51 | "sha256:54e36c67e1a9273ecafab18d6693da0fb5ac48fd48417e4548ac24a918c20998", 52 | "sha256:562b1153ab7f766ee6b8b357ec777a302770ad017cf18505d34f1c088fccc448", 53 | "sha256:5a7ceb2a0d2280f23a02c64cd0afdc922079bb950400c3dd13a1ab2988428aac", 54 | "sha256:655e583afc639bef06f3b2446972c1726007a21003cd0ef57116a123e44601bc", 55 | "sha256:685c1508ec97b2cd3e120bfe309a4ff8e852e8a7460f1ef1de00c2c0ed01e33c", 56 | "sha256:686c87782481fda5ee6ba572d912a5c26d9f98cc5c243ebd03f95222af3f1b0f", 57 | "sha256:69d73f869cf29e8a373127fc378014e2b17bcfbe8d89134bc6fb06a2f67f3cb3", 58 | "sha256:6fe8503b1b917508cc68bf44dae28823ac05e9f091021e0c41f806ebbb23f92f", 59 | "sha256:74c091a5ded6cb81785de2d7a8ab703731f26de910dbe0f3934eabef4ae417cc", 60 | "sha256:7cc8f65f5b22304693de05a245b6736b14cb5bc9c8a03da6e2ae9ef15f8b458f", 61 | "sha256:7dd9c7db94b4692b827ce51dcee597d61a0e4f4661162424faf65106775b40e7", 62 | "sha256:7de3ddb6f424af54535424082a1b5d1ae8caf8256ebd445be68c31c662354720", 63 | "sha256:7f98e70bbbf693086efe4b86d381efad8edac040b8ad02821453083d15ec315f", 64 | "sha256:87fab7f948e407444c2f57088286e00e2ed0003ceaf3d8f8cc0f60544ba61d91", 65 | "sha256:8bd7be6ff6c162a60cb8fce65ee879a684fbb63d5466aba3fa5b9288eb04aefa", 66 | "sha256:8da9449a575133828cc99985536552ea2dcd690e848f9d41b48d8853a149a959", 67 | "sha256:91b10208b222ddf655c3a3d5b727879d7163db12b634492df41a9182a76edaae", 68 | "sha256:92f7f4a4dc9cdb5980973a74d43cdbb16286dacf8d1896b6c3023b8ba8436f8e", 69 | "sha256:9360e3ffc7b23565600e729e8c639c3c50d5520e05fdf94aa2bd859eef12c407", 70 | "sha256:947847f07a8f81d7b39b2d0202fd73e61962ebe17ac2d8566f260679e467da7b", 71 | "sha256:95213b3d79c7e387144e9cb7b9d2809092d6ff2c044cb59033aedc612f38fb6d", 72 | "sha256:96e010736fc16d21125c7e2dc5c350cd43c528b85085c04bf73a77be328fe944", 73 | "sha256:99f81f9c1529fd8e03be4a7bd7df32d14b4f856e90ef6e9cbad3415dbfa9166c", 74 | "sha256:9bb2834a6f11d65374ce97d366d6311a9155ef92c4f0cee543b2155d06dc921f", 75 | "sha256:9dfc906d656e14004c5bc672399c1cccc10db38df2b62a13fb2b6e165a81c316", 76 | "sha256:9f6f0b252a009e98fe84028a4ec48396a948e7a65b8be06ccfc6ef68cf1f614d", 77 | "sha256:9fd16b5e1a7bdd14668cd6bde60a2a29b49147a535c74f50d8177d11b38433a7", 78 | "sha256:a0fde16d284efcacbe15fb0c1013f0967b6c3e379649239d783868230bf1db42", 79 | "sha256:a1a50e59b720060c29e2951fd9f13c01e1ea9492e5a527b92cfe04dd64453c16", 80 | "sha256:a4be88807283bd96ae7b8e401abde4ca0bab597ba73b5e9a2d98f36d451e9aac", 81 | "sha256:ad2274e707be37420d0b6c3d26a8115295fe9d8e6e530fa6a42487a8ca3ad052", 82 | "sha256:b2bfdda4971bd79201f59adbad24ec2728875237e1c83bba5221284dbbf57bda", 83 | "sha256:b52a27a5c97275e254704e1049f4b96a81e67d6205f52fa37a4777d55b0e98ef", 84 | "sha256:c01fbb87b5426381cd9418b3ddcf4fc107e296fa2d3446c18ce6c76642f340a3", 85 | "sha256:c836bf3c7512100219fe1123743fd8dd9a2b50dd7cfb0c3bb10d041309acab4b", 86 | "sha256:c8e98e1845805f184d91fda6f9ab93d7c7b0dddf1c07e0255924bfdb151a8d05", 87 | "sha256:ca2f5abcb0a9a47e56bac173c01e9f6c6e7f27534d91451c5f22e6a35a5a2093", 88 | "sha256:cd33d9de8cfd006a0d0fe85f49b4183c57e91d18ffb7e9004ce855e81928f704", 89 | "sha256:d611d1a01c25277bcdea06879afbc11472e33ce842322496b211319aa95441bb", 90 | "sha256:d9076810a5621236e29b2204e67a68e1fe317c8727ee4c9abbfbb1083b442c38", 91 | "sha256:d984db6d855de58e0fde1ef908d48fe9a634cadb3cf715962722b4da1c40619d", 92 | "sha256:dafb4abb257c0ed56dc36f4e928a7341b34b1379bd87e5a15ce5d883c2c90574", 93 | "sha256:ddfd2dca3f11c365d6857a07e7d12985afc59798458a2fdb2ffa4a0332a3fd43", 94 | "sha256:df59f8486507c421c0620a2c3dce81fbf1d54018dc20ff4fecdb2c106d6e6abc", 95 | "sha256:e00191d38156e09e8c81ef3d75c0d70d4f209b8381e71622165f22ef7da6f101", 96 | "sha256:e2f43d238eae4f0b04f58d4c0df4615697d4ca3e9f9b1963d49555a94f0f5a04", 97 | "sha256:e57e21e1167705f8482ca29cc5d02702208d8bf4aff58f766d94bcd6ead838cd", 98 | "sha256:e8f515d6859e673940e08de3922b9c4a2249653b0ac181169313bd6e4b1978ac", 99 | "sha256:eabe6bf4c199687592f5de4ccd383945f485779c7ffb62a9b9f1f8a3f9756df8", 100 | "sha256:ec6ad66ed660d46503243cbec7b2b3d8ddfa020f984209b3b8ef7d98ce69c3f2", 101 | "sha256:f81cd85a0e76ec7b8e2b6636fe02952d35befda4196b8c88f3cec5b4fb512839", 102 | "sha256:f9f49bdb94809ac56e09a310a62f33e5f22973d6fd351aac72a39cd551e98194", 103 | "sha256:fae962b62944eaebff4f4fddcf1a69de919e7b967136a318533d82d93c3c6bd1", 104 | "sha256:fc61f39b534c5d5903490478a0dd349df397d2284a939aa3cbaa2fb7a19b8397" 105 | ], 106 | "index": "pypi", 107 | "markers": "python_version >= '3.8'", 108 | "version": "==3.10.2" 109 | }, 110 | "aiosignal": { 111 | "hashes": [ 112 | "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc", 113 | "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17" 114 | ], 115 | "markers": "python_version >= '3.7'", 116 | "version": "==1.3.1" 117 | }, 118 | "async-timeout": { 119 | "hashes": [ 120 | "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f", 121 | "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028" 122 | ], 123 | "markers": "python_version < '3.11'", 124 | "version": "==4.0.3" 125 | }, 126 | "attrs": { 127 | "hashes": [ 128 | "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346", 129 | "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2" 130 | ], 131 | "markers": "python_version >= '3.7'", 132 | "version": "==24.2.0" 133 | }, 134 | "certifi": { 135 | "hashes": [ 136 | "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b", 137 | "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90" 138 | ], 139 | "index": "pypi", 140 | "markers": "python_version >= '3.6'", 141 | "version": "==2024.7.4" 142 | }, 143 | "charset-normalizer": { 144 | "hashes": [ 145 | "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027", 146 | "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087", 147 | "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786", 148 | "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8", 149 | "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09", 150 | "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185", 151 | "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574", 152 | "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e", 153 | "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519", 154 | "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898", 155 | "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269", 156 | "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3", 157 | "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f", 158 | "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6", 159 | "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8", 160 | "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a", 161 | "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73", 162 | "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc", 163 | "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714", 164 | "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2", 165 | "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc", 166 | "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce", 167 | "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d", 168 | "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e", 169 | "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6", 170 | "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269", 171 | "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96", 172 | "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d", 173 | "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a", 174 | "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4", 175 | "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77", 176 | "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d", 177 | "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0", 178 | "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed", 179 | "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068", 180 | "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac", 181 | "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25", 182 | "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8", 183 | "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab", 184 | "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26", 185 | "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2", 186 | "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db", 187 | "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f", 188 | "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5", 189 | "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99", 190 | "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c", 191 | "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d", 192 | "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811", 193 | "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa", 194 | "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a", 195 | "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03", 196 | "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b", 197 | "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04", 198 | "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c", 199 | "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001", 200 | "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458", 201 | "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389", 202 | "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99", 203 | "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985", 204 | "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537", 205 | "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238", 206 | "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f", 207 | "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d", 208 | "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796", 209 | "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a", 210 | "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143", 211 | "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8", 212 | "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c", 213 | "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5", 214 | "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5", 215 | "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711", 216 | "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4", 217 | "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6", 218 | "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c", 219 | "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7", 220 | "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4", 221 | "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b", 222 | "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae", 223 | "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12", 224 | "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c", 225 | "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae", 226 | "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8", 227 | "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887", 228 | "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b", 229 | "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4", 230 | "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f", 231 | "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5", 232 | "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33", 233 | "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519", 234 | "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561" 235 | ], 236 | "markers": "python_full_version >= '3.7.0'", 237 | "version": "==3.3.2" 238 | }, 239 | "faker": { 240 | "hashes": [ 241 | "sha256:618b140c77475786dbe3a5409ad53521cb76746ab7a5c77b99c663f3ef1b1bc2", 242 | "sha256:7e878365aaf2f6a3f88a689c5f8209b8b93f45e3e9c991272552553006856637" 243 | ], 244 | "index": "pypi", 245 | "markers": "python_version >= '3.6'", 246 | "version": "==13.3.0" 247 | }, 248 | "frozenlist": { 249 | "hashes": [ 250 | "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7", 251 | "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98", 252 | "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad", 253 | "sha256:0c250a29735d4f15321007fb02865f0e6b6a41a6b88f1f523ca1596ab5f50bd5", 254 | "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae", 255 | "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e", 256 | "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a", 257 | "sha256:1d0ce09d36d53bbbe566fe296965b23b961764c0bcf3ce2fa45f463745c04701", 258 | "sha256:20b51fa3f588ff2fe658663db52a41a4f7aa6c04f6201449c6c7c476bd255c0d", 259 | "sha256:23b2d7679b73fe0e5a4560b672a39f98dfc6f60df63823b0a9970525325b95f6", 260 | "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6", 261 | "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106", 262 | "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75", 263 | "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868", 264 | "sha256:32453c1de775c889eb4e22f1197fe3bdfe457d16476ea407472b9442e6295f7a", 265 | "sha256:3a670dc61eb0d0eb7080890c13de3066790f9049b47b0de04007090807c776b0", 266 | "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1", 267 | "sha256:410478a0c562d1a5bcc2f7ea448359fcb050ed48b3c6f6f4f18c313a9bdb1826", 268 | "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec", 269 | "sha256:48f6a4533887e189dae092f1cf981f2e3885175f7a0f33c91fb5b7b682b6bab6", 270 | "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950", 271 | "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19", 272 | "sha256:55fdc093b5a3cb41d420884cdaf37a1e74c3c37a31f46e66286d9145d2063bd0", 273 | "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8", 274 | "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a", 275 | "sha256:5a7d70357e7cee13f470c7883a063aae5fe209a493c57d86eb7f5a6f910fae09", 276 | "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86", 277 | "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c", 278 | "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5", 279 | "sha256:693945278a31f2086d9bf3df0fe8254bbeaef1fe71e1351c3bd730aa7d31c41b", 280 | "sha256:6db4667b187a6742b33afbbaf05a7bc551ffcf1ced0000a571aedbb4aa42fc7b", 281 | "sha256:6eb73fa5426ea69ee0e012fb59cdc76a15b1283d6e32e4f8dc4482ec67d1194d", 282 | "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0", 283 | "sha256:7268252af60904bf52c26173cbadc3a071cece75f873705419c8681f24d3edea", 284 | "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776", 285 | "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a", 286 | "sha256:82e8211d69a4f4bc360ea22cd6555f8e61a1bd211d1d5d39d3d228b48c83a897", 287 | "sha256:89aa2c2eeb20957be2d950b85974b30a01a762f3308cd02bb15e1ad632e22dc7", 288 | "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09", 289 | "sha256:96ec70beabbd3b10e8bfe52616a13561e58fe84c0101dd031dc78f250d5128b9", 290 | "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe", 291 | "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd", 292 | "sha256:9d3e0c25a2350080e9319724dede4f31f43a6c9779be48021a7f4ebde8b2d742", 293 | "sha256:a06339f38e9ed3a64e4c4e43aec7f59084033647f908e4259d279a52d3757d09", 294 | "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0", 295 | "sha256:a7496bfe1da7fb1a4e1cc23bb67c58fab69311cc7d32b5a99c2007b4b2a0e932", 296 | "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1", 297 | "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a", 298 | "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49", 299 | "sha256:b7e3ed87d4138356775346e6845cccbe66cd9e207f3cd11d2f0b9fd13681359d", 300 | "sha256:b7f2f9f912dca3934c1baec2e4585a674ef16fe00218d833856408c48d5beee7", 301 | "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480", 302 | "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89", 303 | "sha256:bfa4a17e17ce9abf47a74ae02f32d014c5e9404b6d9ac7f729e01562bbee601e", 304 | "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b", 305 | "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82", 306 | "sha256:c6321c9efe29975232da3bd0af0ad216800a47e93d763ce64f291917a381b8eb", 307 | "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068", 308 | "sha256:c99169d4ff810155ca50b4da3b075cbde79752443117d89429595c2e8e37fed8", 309 | "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b", 310 | "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb", 311 | "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2", 312 | "sha256:dca69045298ce5c11fd539682cff879cc1e664c245d1c64da929813e54241d11", 313 | "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b", 314 | "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc", 315 | "sha256:e6a20a581f9ce92d389a8c7d7c3dd47c81fd5d6e655c8dddf341e14aa48659d0", 316 | "sha256:e7004be74cbb7d9f34553a5ce5fb08be14fb33bc86f332fb71cbe5216362a497", 317 | "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17", 318 | "sha256:edb678da49d9f72c9f6c609fbe41a5dfb9a9282f9e6a2253d5a91e0fc382d7c0", 319 | "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2", 320 | "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439", 321 | "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5", 322 | "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac", 323 | "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825", 324 | "sha256:fbeb989b5cc29e8daf7f976b421c220f1b8c731cbf22b9130d8815418ea45887", 325 | "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced", 326 | "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74" 327 | ], 328 | "markers": "python_version >= '3.8'", 329 | "version": "==1.4.1" 330 | }, 331 | "fsspec": { 332 | "hashes": [ 333 | "sha256:404194b0cf93128d980689fe9a49013ee9676edd1ab7f0f15b737b2cedb0ab88", 334 | "sha256:e1e494d4814f6804769f3c7bfd7a722a15113cc0339d14755297f09306b8f21f" 335 | ], 336 | "index": "pypi", 337 | "markers": "python_version >= '3.7'", 338 | "version": "==0.8.4" 339 | }, 340 | "fybrik-python-logging": { 341 | "hashes": [ 342 | "sha256:212bbabfe97ae4eec5c33824cc546913f3f55fb02ce5d5ffd83e9b34912926a5", 343 | "sha256:609e34b6d40dcc93bfc8afc0a1c41824c3123d499e91c6f5ab918e8f0c9c6bcb" 344 | ], 345 | "version": "==0.1.0" 346 | }, 347 | "fybrik-python-tls": { 348 | "hashes": [ 349 | "sha256:f2d3c3efbca4dacfe7c1acc4939a7e3375bafd6a206c5118a12a77bd1e321675", 350 | "sha256:f5c905b616457f5fc924018f3ddbf78b7bb716a1570fb7bfe008abba25f4d994" 351 | ], 352 | "version": "==0.1.0" 353 | }, 354 | "fybrik-python-transformation": { 355 | "hashes": [ 356 | "sha256:3e9e9d817774e89d2cb93fa2da20d983b305c9fe517e6c18cb0b4fef137bf5e1", 357 | "sha256:f4918a3c50c0c4426adaa24e4d60ef1aaa724b26e934d12797ba1b558d3500d2" 358 | ], 359 | "version": "==0.2.1" 360 | }, 361 | "fybrik-python-vault": { 362 | "hashes": [ 363 | "sha256:9e439675e05ab6a1dda42d625bb04043a51c79e0a961616caf4efb1b59e63148", 364 | "sha256:d8422b996fa81b3a59e61e355bae6c588a7b96910d57c377c792593351804f13" 365 | ], 366 | "version": "==0.2.0" 367 | }, 368 | "idna": { 369 | "hashes": [ 370 | "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc", 371 | "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0" 372 | ], 373 | "markers": "python_version >= '3.5'", 374 | "version": "==3.7" 375 | }, 376 | "json-log-formatter": { 377 | "hashes": [ 378 | "sha256:5cb983928260638347211df0f9ad399f16bd2bd1909b0b2c6f91ec6ddb09e21a" 379 | ], 380 | "version": "==0.5.0" 381 | }, 382 | "multidict": { 383 | "hashes": [ 384 | "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556", 385 | "sha256:0275e35209c27a3f7951e1ce7aaf93ce0d163b28948444bec61dd7badc6d3f8c", 386 | "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29", 387 | "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b", 388 | "sha256:09a892e4a9fb47331da06948690ae38eaa2426de97b4ccbfafbdcbe5c8f37ff8", 389 | "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7", 390 | "sha256:107c0cdefe028703fb5dafe640a409cb146d44a6ae201e55b35a4af8e95457dd", 391 | "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40", 392 | "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6", 393 | "sha256:19fe01cea168585ba0f678cad6f58133db2aa14eccaf22f88e4a6dccadfad8b3", 394 | "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c", 395 | "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9", 396 | "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5", 397 | "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae", 398 | "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442", 399 | "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9", 400 | "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc", 401 | "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c", 402 | "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea", 403 | "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5", 404 | "sha256:37b15024f864916b4951adb95d3a80c9431299080341ab9544ed148091b53f50", 405 | "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182", 406 | "sha256:3d25f19500588cbc47dc19081d78131c32637c25804df8414463ec908631e453", 407 | "sha256:403c0911cd5d5791605808b942c88a8155c2592e05332d2bf78f18697a5fa15e", 408 | "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600", 409 | "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733", 410 | "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda", 411 | "sha256:4a6a4f196f08c58c59e0b8ef8ec441d12aee4125a7d4f4fef000ccb22f8d7241", 412 | "sha256:4cc0ef8b962ac7a5e62b9e826bd0cd5040e7d401bc45a6835910ed699037a461", 413 | "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e", 414 | "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e", 415 | "sha256:55205d03e8a598cfc688c71ca8ea5f66447164efff8869517f175ea632c7cb7b", 416 | "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e", 417 | "sha256:5cb241881eefd96b46f89b1a056187ea8e9ba14ab88ba632e68d7a2ecb7aadf7", 418 | "sha256:60d698e8179a42ec85172d12f50b1668254628425a6bd611aba022257cac1386", 419 | "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd", 420 | "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9", 421 | "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf", 422 | "sha256:69db76c09796b313331bb7048229e3bee7928eb62bab5e071e9f7fcc4879caee", 423 | "sha256:6bf7a982604375a8d49b6cc1b781c1747f243d91b81035a9b43a2126c04766f5", 424 | "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a", 425 | "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271", 426 | "sha256:76f067f5121dcecf0d63a67f29080b26c43c71a98b10c701b0677e4a065fbd54", 427 | "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4", 428 | "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496", 429 | "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb", 430 | "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319", 431 | "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3", 432 | "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f", 433 | "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527", 434 | "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed", 435 | "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604", 436 | "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef", 437 | "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8", 438 | "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5", 439 | "sha256:a3145cb08d8625b2d3fee1b2d596a8766352979c9bffe5d7833e0503d0f0b5e5", 440 | "sha256:aeaf541ddbad8311a87dd695ed9642401131ea39ad7bc8cf3ef3967fd093b626", 441 | "sha256:b55358304d7a73d7bdf5de62494aaf70bd33015831ffd98bc498b433dfe5b10c", 442 | "sha256:b82cc8ace10ab5bd93235dfaab2021c70637005e1ac787031f4d1da63d493c1d", 443 | "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c", 444 | "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc", 445 | "sha256:c9bf56195c6bbd293340ea82eafd0071cb3d450c703d2c93afb89f93b8386ccc", 446 | "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b", 447 | "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38", 448 | "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450", 449 | "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1", 450 | "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f", 451 | "sha256:d65f25da8e248202bd47445cec78e0025c0fe7582b23ec69c3b27a640dd7a8e3", 452 | "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755", 453 | "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226", 454 | "sha256:d946b0a9eb8aaa590df1fe082cee553ceab173e6cb5b03239716338629c50c7a", 455 | "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046", 456 | "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf", 457 | "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479", 458 | "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e", 459 | "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1", 460 | "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a", 461 | "sha256:e4972624066095e52b569e02b5ca97dbd7a7ddd4294bf4e7247d52635630dd83", 462 | "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929", 463 | "sha256:e8e94e6912639a02ce173341ff62cc1201232ab86b8a8fcc05572741a5dc7d93", 464 | "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a", 465 | "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c", 466 | "sha256:eca2e9d0cc5a889850e9bbd68e98314ada174ff6ccd1129500103df7a94a7a44", 467 | "sha256:edd08e6f2f1a390bf137080507e44ccc086353c8e98c657e666c017718561b89", 468 | "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba", 469 | "sha256:f2a1dee728b52b33eebff5072817176c172050d44d67befd681609b4746e1c2e", 470 | "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da", 471 | "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24", 472 | "sha256:fce28b3c8a81b6b36dfac9feb1de115bab619b3c13905b419ec71d03a3fc1423", 473 | "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef" 474 | ], 475 | "markers": "python_version >= '3.7'", 476 | "version": "==6.0.5" 477 | }, 478 | "numpy": { 479 | "hashes": [ 480 | "sha256:04640dab83f7c6c85abf9cd729c5b65f1ebd0ccf9de90b270cd61935eef0197f", 481 | "sha256:1452241c290f3e2a312c137a9999cdbf63f78864d63c79039bda65ee86943f61", 482 | "sha256:222e40d0e2548690405b0b3c7b21d1169117391c2e82c378467ef9ab4c8f0da7", 483 | "sha256:2541312fbf09977f3b3ad449c4e5f4bb55d0dbf79226d7724211acc905049400", 484 | "sha256:31f13e25b4e304632a4619d0e0777662c2ffea99fcae2029556b17d8ff958aef", 485 | "sha256:4602244f345453db537be5314d3983dbf5834a9701b7723ec28923e2889e0bb2", 486 | "sha256:4979217d7de511a8d57f4b4b5b2b965f707768440c17cb70fbf254c4b225238d", 487 | "sha256:4c21decb6ea94057331e111a5bed9a79d335658c27ce2adb580fb4d54f2ad9bc", 488 | "sha256:6620c0acd41dbcb368610bb2f4d83145674040025e5536954782467100aa8835", 489 | "sha256:692f2e0f55794943c5bfff12b3f56f99af76f902fc47487bdfe97856de51a706", 490 | "sha256:7215847ce88a85ce39baf9e89070cb860c98fdddacbaa6c0da3ffb31b3350bd5", 491 | "sha256:79fc682a374c4a8ed08b331bef9c5f582585d1048fa6d80bc6c35bc384eee9b4", 492 | "sha256:7ffe43c74893dbf38c2b0a1f5428760a1a9c98285553c89e12d70a96a7f3a4d6", 493 | "sha256:80f5e3a4e498641401868df4208b74581206afbee7cf7b8329daae82676d9463", 494 | "sha256:95f7ac6540e95bc440ad77f56e520da5bf877f87dca58bd095288dce8940532a", 495 | "sha256:9667575fb6d13c95f1b36aca12c5ee3356bf001b714fc354eb5465ce1609e62f", 496 | "sha256:a5425b114831d1e77e4b5d812b69d11d962e104095a5b9c3b641a218abcc050e", 497 | "sha256:b4bea75e47d9586d31e892a7401f76e909712a0fd510f58f5337bea9572c571e", 498 | "sha256:b7b1fc9864d7d39e28f41d089bfd6353cb5f27ecd9905348c24187a768c79694", 499 | "sha256:befe2bf740fd8373cf56149a5c23a0f601e82869598d41f8e188a0e9869926f8", 500 | "sha256:c0bfb52d2169d58c1cdb8cc1f16989101639b34c7d3ce60ed70b19c63eba0b64", 501 | "sha256:d11efb4dbecbdf22508d55e48d9c8384db795e1b7b51ea735289ff96613ff74d", 502 | "sha256:dd80e219fd4c71fc3699fc1dadac5dcf4fd882bfc6f7ec53d30fa197b8ee22dc", 503 | "sha256:e2926dac25b313635e4d6cf4dc4e51c8c0ebfed60b801c799ffc4c32bf3d1254", 504 | "sha256:e98f220aa76ca2a977fe435f5b04d7b3470c0a2e6312907b37ba6068f26787f2", 505 | "sha256:ed094d4f0c177b1b8e7aa9cba7d6ceed51c0e569a5318ac0ca9a090680a6a1b1", 506 | "sha256:f136bab9c2cfd8da131132c2cf6cc27331dd6fae65f95f69dcd4ae3c3639c810", 507 | "sha256:f3a86ed21e4f87050382c7bc96571755193c4c1392490744ac73d660e8f564a9" 508 | ], 509 | "markers": "python_version >= '3.8'", 510 | "version": "==1.24.4" 511 | }, 512 | "pandas": { 513 | "hashes": [ 514 | "sha256:0010771bd9223f7afe5f051eb47c4a49534345dfa144f2f5470b27189a4dd3b5", 515 | "sha256:061609334a8182ab500a90fe66d46f6f387de62d3a9cb9aa7e62e3146c712167", 516 | "sha256:09d8be7dd9e1c4c98224c4dfe8abd60d145d934e9fc1f5f411266308ae683e6a", 517 | "sha256:295872bf1a09758aba199992c3ecde455f01caf32266d50abc1a073e828a7b9d", 518 | "sha256:3228198333dd13c90b6434ddf61aa6d57deaca98cf7b654f4ad68a2db84f8cfe", 519 | "sha256:385c52e85aaa8ea6a4c600a9b2821181a51f8be0aee3af6f2dcb41dafc4fc1d0", 520 | "sha256:51649ef604a945f781105a6d2ecf88db7da0f4868ac5d45c51cb66081c4d9c73", 521 | "sha256:5586cc95692564b441f4747c47c8a9746792e87b40a4680a2feb7794defb1ce3", 522 | "sha256:5a206afa84ed20e07603f50d22b5f0db3fb556486d8c2462d8bc364831a4b417", 523 | "sha256:5b79af3a69e5175c6fa7b4e046b21a646c8b74e92c6581a9d825687d92071b51", 524 | "sha256:5c54ea4ef3823108cd4ec7fb27ccba4c3a775e0f83e39c5e17f5094cb17748bc", 525 | "sha256:8c5bf555b6b0075294b73965adaafb39cf71c312e38c5935c93d78f41c19828a", 526 | "sha256:92bc1fc585f1463ca827b45535957815b7deb218c549b7c18402c322c7549a12", 527 | "sha256:95c1e422ced0199cf4a34385ff124b69412c4bc912011ce895582bee620dfcaa", 528 | "sha256:b8134651258bce418cb79c71adeff0a44090c98d955f6953168ba16cc285d9f7", 529 | "sha256:be67c782c4f1b1f24c2f16a157e12c2693fd510f8df18e3287c77f33d124ed07", 530 | "sha256:c072c7f06b9242c855ed8021ff970c0e8f8b10b35e2640c657d2a541c5950f59", 531 | "sha256:d0d4f13e4be7ce89d7057a786023c461dd9370040bdb5efa0a7fe76b556867a0", 532 | "sha256:df82739e00bb6daf4bba4479a40f38c718b598a84654cbd8bb498fd6b0aa8c16", 533 | "sha256:f549097993744ff8c41b5e8f2f0d3cbfaabe89b4ae32c8c08ead6cc535b80139", 534 | "sha256:ff08a14ef21d94cdf18eef7c569d66f2e24e0bc89350bcd7d243dd804e3b5eb2" 535 | ], 536 | "index": "pypi", 537 | "markers": "python_version >= '3.8'", 538 | "version": "==1.4.2" 539 | }, 540 | "pyarrow": { 541 | "hashes": [ 542 | "sha256:03a10daad957970e914920b793f6a49416699e791f4c827927fd4e4d892a5d16", 543 | "sha256:15511ce2f50343f3fd5e9f7c30e4d004da9134e9597e93e9c96c3985928cbe82", 544 | "sha256:1dd482ccb07c96188947ad94d7536ab696afde23ad172df8e18944ec79f55055", 545 | "sha256:25a5f7c7f36df520b0b7363ba9f51c3070799d4b05d587c60c0adaba57763479", 546 | "sha256:3bd201af6e01f475f02be88cf1f6ee9856ab98c11d8bbb6f58347c58cd07be00", 547 | "sha256:3fee786259d986f8c046100ced54d63b0c8c9f7cdb7d1bbe07dc69e0f928141c", 548 | "sha256:42b7982301a9ccd06e1dd4fabd2e8e5df74b93ce4c6b87b81eb9e2d86dc79871", 549 | "sha256:4a18a211ed888f1ac0b0ebcb99e2d9a3e913a481120ee9b1fe33d3fedb945d4e", 550 | "sha256:51e58778fcb8829fca37fbfaea7f208d5ce7ea89ea133dd13d8ce745278ee6f0", 551 | "sha256:541e7845ce5f27a861eb5b88ee165d931943347eec17b9ff1e308663531c9647", 552 | "sha256:65c7f4cc2be195e3db09296d31a654bb6d8786deebcab00f0e2455fd109d7456", 553 | "sha256:69b043a3fce064ebd9fbae6abc30e885680296e5bd5e6f7353e6a87966cf2ad7", 554 | "sha256:6ea2c54e6b5ecd64e8299d2abb40770fe83a718f5ddc3825ddd5cd28e352cce1", 555 | "sha256:78a6ac39cd793582998dac88ab5c1c1dd1e6503df6672f064f33a21937ec1d8d", 556 | "sha256:81b87b782a1366279411f7b235deab07c8c016e13f9af9f7c7b0ee564fedcc8f", 557 | "sha256:8392b9a1e837230090fe916415ed4c3433b2ddb1a798e3f6438303c70fbabcfc", 558 | "sha256:863be6bad6c53797129610930794a3e797cb7d41c0a30e6794a2ac0e42ce41b8", 559 | "sha256:8cd86e04a899bef43e25184f4b934584861d787cf7519851a8c031803d45c6d8", 560 | "sha256:95c7822eb37663e073da9892f3499fe28e84f3464711a3e555e0c5463fd53a19", 561 | "sha256:98c13b2e28a91b0fbf24b483df54a8d7814c074c2623ecef40dce1fa52f6539b", 562 | "sha256:ba2b7aa7efb59156b87987a06f5241932914e4d5bbb74a465306b00a6c808849", 563 | "sha256:c9c97c8e288847e091dfbcdf8ce51160e638346f51919a9e74fe038b2e8aee62", 564 | "sha256:cb06cacc19f3b426681f2f6803cc06ff481e7fe5b3a533b406bc5b2138843d4f", 565 | "sha256:ce64bc1da3109ef5ab9e4c60316945a7239c798098a631358e9ab39f6e5529e9", 566 | "sha256:d5ef4372559b191cafe7db8932801eee252bfc35e983304e7d60b6954576a071", 567 | "sha256:d6f1e1040413651819074ef5b500835c6c42e6c446532a1ddef8bc5054e8dba5", 568 | "sha256:deb400df8f19a90b662babceb6dd12daddda6bb357c216e558b207c0770c7654", 569 | "sha256:ea132067ec712d1b1116a841db1c95861508862b21eddbcafefbce8e4b96b867", 570 | "sha256:ece333706a94c1221ced8b299042f85fd88b5db802d71be70024433ddf3aecab", 571 | "sha256:edad25522ad509e534400d6ab98cf1872d30c31bc5e947712bfd57def7af15bb" 572 | ], 573 | "index": "pypi", 574 | "markers": "python_version >= '3.7'", 575 | "version": "==8.0.0" 576 | }, 577 | "python-dateutil": { 578 | "hashes": [ 579 | "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86", 580 | "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9" 581 | ], 582 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", 583 | "version": "==2.8.2" 584 | }, 585 | "pytz": { 586 | "hashes": [ 587 | "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b", 588 | "sha256:ce42d816b81b68506614c11e8937d3aa9e41007ceb50bfdcb0749b921bf646c7" 589 | ], 590 | "version": "==2023.3.post1" 591 | }, 592 | "pyyaml": { 593 | "hashes": [ 594 | "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5", 595 | "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc", 596 | "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df", 597 | "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741", 598 | "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206", 599 | "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27", 600 | "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595", 601 | "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62", 602 | "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98", 603 | "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696", 604 | "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290", 605 | "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9", 606 | "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d", 607 | "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6", 608 | "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867", 609 | "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47", 610 | "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486", 611 | "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6", 612 | "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3", 613 | "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007", 614 | "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938", 615 | "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0", 616 | "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c", 617 | "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735", 618 | "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d", 619 | "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28", 620 | "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4", 621 | "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba", 622 | "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8", 623 | "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5", 624 | "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd", 625 | "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3", 626 | "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0", 627 | "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515", 628 | "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c", 629 | "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c", 630 | "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924", 631 | "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34", 632 | "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43", 633 | "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859", 634 | "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673", 635 | "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54", 636 | "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a", 637 | "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b", 638 | "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab", 639 | "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa", 640 | "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c", 641 | "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585", 642 | "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d", 643 | "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f" 644 | ], 645 | "index": "pypi", 646 | "markers": "python_version >= '3.6'", 647 | "version": "==6.0.1" 648 | }, 649 | "requests": { 650 | "hashes": [ 651 | "sha256:f2c3881dddb70d056c5bd7600a4fae312b2a300e39be6a118d30b90bd27262b5", 652 | "sha256:fa5490319474c82ef1d2c9bc459d3652e3ae4ef4c4ebdd18a21145a47ca4b6b8" 653 | ], 654 | "index": "pypi", 655 | "markers": "python_version >= '3.8'", 656 | "version": "==2.32.0" 657 | }, 658 | "six": { 659 | "hashes": [ 660 | "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", 661 | "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254" 662 | ], 663 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", 664 | "version": "==1.16.0" 665 | }, 666 | "urllib3": { 667 | "hashes": [ 668 | "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472", 669 | "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168" 670 | ], 671 | "index": "pypi", 672 | "markers": "python_version >= '3.8'", 673 | "version": "==2.2.2" 674 | }, 675 | "yarl": { 676 | "hashes": [ 677 | "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51", 678 | "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce", 679 | "sha256:07574b007ee20e5c375a8fe4a0789fad26db905f9813be0f9fef5a68080de559", 680 | "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0", 681 | "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81", 682 | "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc", 683 | "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4", 684 | "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c", 685 | "sha256:206a55215e6d05dbc6c98ce598a59e6fbd0c493e2de4ea6cc2f4934d5a18d130", 686 | "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136", 687 | "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e", 688 | "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec", 689 | "sha256:2b134fd795e2322b7684155b7855cc99409d10b2e408056db2b93b51a52accc7", 690 | "sha256:2d47552b6e52c3319fede1b60b3de120fe83bde9b7bddad11a69fb0af7db32f1", 691 | "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455", 692 | "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099", 693 | "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129", 694 | "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10", 695 | "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142", 696 | "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98", 697 | "sha256:4aa9741085f635934f3a2583e16fcf62ba835719a8b2b28fb2917bb0537c1dfa", 698 | "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7", 699 | "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525", 700 | "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c", 701 | "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9", 702 | "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c", 703 | "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8", 704 | "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b", 705 | "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf", 706 | "sha256:5a2e2433eb9344a163aced6a5f6c9222c0786e5a9e9cac2c89f0b28433f56e23", 707 | "sha256:5aef935237d60a51a62b86249839b51345f47564208c6ee615ed2a40878dccdd", 708 | "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27", 709 | "sha256:63b20738b5aac74e239622d2fe30df4fca4942a86e31bf47a81a0e94c14df94f", 710 | "sha256:686a0c2f85f83463272ddffd4deb5e591c98aac1897d65e92319f729c320eece", 711 | "sha256:6a962e04b8f91f8c4e5917e518d17958e3bdee71fd1d8b88cdce74dd0ebbf434", 712 | "sha256:6ad6d10ed9b67a382b45f29ea028f92d25bc0bc1daf6c5b801b90b5aa70fb9ec", 713 | "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff", 714 | "sha256:6fe79f998a4052d79e1c30eeb7d6c1c1056ad33300f682465e1b4e9b5a188b78", 715 | "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d", 716 | "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863", 717 | "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53", 718 | "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31", 719 | "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15", 720 | "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5", 721 | "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b", 722 | "sha256:84fc30f71689d7fc9168b92788abc977dc8cefa806909565fc2951d02f6b7d57", 723 | "sha256:8619d6915b3b0b34420cf9b2bb6d81ef59d984cb0fde7544e9ece32b4b3043c3", 724 | "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1", 725 | "sha256:8be9e837ea9113676e5754b43b940b50cce76d9ed7d2461df1af39a8ee674d9f", 726 | "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad", 727 | "sha256:957b4774373cf6f709359e5c8c4a0af9f6d7875db657adb0feaf8d6cb3c3964c", 728 | "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7", 729 | "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2", 730 | "sha256:a00862fb23195b6b8322f7d781b0dc1d82cb3bcac346d1e38689370cc1cc398b", 731 | "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2", 732 | "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b", 733 | "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9", 734 | "sha256:a825ec844298c791fd28ed14ed1bffc56a98d15b8c58a20e0e08c1f5f2bea1be", 735 | "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e", 736 | "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984", 737 | "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4", 738 | "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074", 739 | "sha256:ad4d7a90a92e528aadf4965d685c17dacff3df282db1121136c382dc0b6014d2", 740 | "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392", 741 | "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91", 742 | "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541", 743 | "sha256:bef596fdaa8f26e3d66af846bbe77057237cb6e8efff8cd7cc8dff9a62278bbf", 744 | "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572", 745 | "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66", 746 | "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575", 747 | "sha256:c74018551e31269d56fab81a728f683667e7c28c04e807ba08f8c9e3bba32f14", 748 | "sha256:ca06675212f94e7a610e85ca36948bb8fc023e458dd6c63ef71abfd482481aa5", 749 | "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1", 750 | "sha256:d25039a474c4c72a5ad4b52495056f843a7ff07b632c1b92ea9043a3d9950f6e", 751 | "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551", 752 | "sha256:d7d7f7de27b8944f1fee2c26a88b4dabc2409d2fea7a9ed3df79b67277644e17", 753 | "sha256:d7eeb6d22331e2fd42fce928a81c697c9ee2d51400bd1a28803965883e13cead", 754 | "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0", 755 | "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe", 756 | "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234", 757 | "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0", 758 | "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7", 759 | "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34", 760 | "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42", 761 | "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385", 762 | "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78", 763 | "sha256:ec61d826d80fc293ed46c9dd26995921e3a82146feacd952ef0757236fc137be", 764 | "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958", 765 | "sha256:f3bc6af6e2b8f92eced34ef6a96ffb248e863af20ef4fde9448cc8c9b858b749", 766 | "sha256:f7d6b36dd2e029b6bcb8a13cf19664c7b8e19ab3a58e0fefbb5b8461447ed5ec" 767 | ], 768 | "markers": "python_version >= '3.7'", 769 | "version": "==1.9.4" 770 | } 771 | }, 772 | "develop": { 773 | "astroid": { 774 | "hashes": [ 775 | "sha256:1aa149fc5c6589e3d0ece885b4491acd80af4f087baafa3fb5203b113e68cd3c", 776 | "sha256:6c107453dffee9055899705de3c9ead36e74119cee151e5a9aaf7f0b0e020a6a" 777 | ], 778 | "markers": "python_full_version >= '3.7.2'", 779 | "version": "==2.15.8" 780 | }, 781 | "dill": { 782 | "hashes": [ 783 | "sha256:76b122c08ef4ce2eedcd4d1abd8e641114bfc6c2867f49f3c41facf65bf19f5e", 784 | "sha256:cc1c8b182eb3013e24bd475ff2e9295af86c1a38eb1aff128dac8962a9ce3c03" 785 | ], 786 | "markers": "python_version < '3.11'", 787 | "version": "==0.3.7" 788 | }, 789 | "exceptiongroup": { 790 | "hashes": [ 791 | "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9", 792 | "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3" 793 | ], 794 | "markers": "python_version < '3.11'", 795 | "version": "==1.1.3" 796 | }, 797 | "iniconfig": { 798 | "hashes": [ 799 | "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", 800 | "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374" 801 | ], 802 | "markers": "python_version >= '3.7'", 803 | "version": "==2.0.0" 804 | }, 805 | "isort": { 806 | "hashes": [ 807 | "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504", 808 | "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6" 809 | ], 810 | "markers": "python_full_version >= '3.8.0'", 811 | "version": "==5.12.0" 812 | }, 813 | "lazy-object-proxy": { 814 | "hashes": [ 815 | "sha256:09763491ce220c0299688940f8dc2c5d05fd1f45af1e42e636b2e8b2303e4382", 816 | "sha256:0a891e4e41b54fd5b8313b96399f8b0e173bbbfc03c7631f01efbe29bb0bcf82", 817 | "sha256:189bbd5d41ae7a498397287c408617fe5c48633e7755287b21d741f7db2706a9", 818 | "sha256:18b78ec83edbbeb69efdc0e9c1cb41a3b1b1ed11ddd8ded602464c3fc6020494", 819 | "sha256:1aa3de4088c89a1b69f8ec0dcc169aa725b0ff017899ac568fe44ddc1396df46", 820 | "sha256:212774e4dfa851e74d393a2370871e174d7ff0ebc980907723bb67d25c8a7c30", 821 | "sha256:2d0daa332786cf3bb49e10dc6a17a52f6a8f9601b4cf5c295a4f85854d61de63", 822 | "sha256:5f83ac4d83ef0ab017683d715ed356e30dd48a93746309c8f3517e1287523ef4", 823 | "sha256:659fb5809fa4629b8a1ac5106f669cfc7bef26fbb389dda53b3e010d1ac4ebae", 824 | "sha256:660c94ea760b3ce47d1855a30984c78327500493d396eac4dfd8bd82041b22be", 825 | "sha256:66a3de4a3ec06cd8af3f61b8e1ec67614fbb7c995d02fa224813cb7afefee701", 826 | "sha256:721532711daa7db0d8b779b0bb0318fa87af1c10d7fe5e52ef30f8eff254d0cd", 827 | "sha256:7322c3d6f1766d4ef1e51a465f47955f1e8123caee67dd641e67d539a534d006", 828 | "sha256:79a31b086e7e68b24b99b23d57723ef7e2c6d81ed21007b6281ebcd1688acb0a", 829 | "sha256:81fc4d08b062b535d95c9ea70dbe8a335c45c04029878e62d744bdced5141586", 830 | "sha256:8fa02eaab317b1e9e03f69aab1f91e120e7899b392c4fc19807a8278a07a97e8", 831 | "sha256:9090d8e53235aa280fc9239a86ae3ea8ac58eff66a705fa6aa2ec4968b95c821", 832 | "sha256:946d27deaff6cf8452ed0dba83ba38839a87f4f7a9732e8f9fd4107b21e6ff07", 833 | "sha256:9990d8e71b9f6488e91ad25f322898c136b008d87bf852ff65391b004da5e17b", 834 | "sha256:9cd077f3d04a58e83d04b20e334f678c2b0ff9879b9375ed107d5d07ff160171", 835 | "sha256:9e7551208b2aded9c1447453ee366f1c4070602b3d932ace044715d89666899b", 836 | "sha256:9f5fa4a61ce2438267163891961cfd5e32ec97a2c444e5b842d574251ade27d2", 837 | "sha256:b40387277b0ed2d0602b8293b94d7257e17d1479e257b4de114ea11a8cb7f2d7", 838 | "sha256:bfb38f9ffb53b942f2b5954e0f610f1e721ccebe9cce9025a38c8ccf4a5183a4", 839 | "sha256:cbf9b082426036e19c6924a9ce90c740a9861e2bdc27a4834fd0a910742ac1e8", 840 | "sha256:d9e25ef10a39e8afe59a5c348a4dbf29b4868ab76269f81ce1674494e2565a6e", 841 | "sha256:db1c1722726f47e10e0b5fdbf15ac3b8adb58c091d12b3ab713965795036985f", 842 | "sha256:e7c21c95cae3c05c14aafffe2865bbd5e377cfc1348c4f7751d9dc9a48ca4bda", 843 | "sha256:e8c6cfb338b133fbdbc5cfaa10fe3c6aeea827db80c978dbd13bc9dd8526b7d4", 844 | "sha256:ea806fd4c37bf7e7ad82537b0757999264d5f70c45468447bb2b91afdbe73a6e", 845 | "sha256:edd20c5a55acb67c7ed471fa2b5fb66cb17f61430b7a6b9c3b4a1e40293b1671", 846 | "sha256:f0117049dd1d5635bbff65444496c90e0baa48ea405125c088e93d9cf4525b11", 847 | "sha256:f0705c376533ed2a9e5e97aacdbfe04cecd71e0aa84c7c0595d02ef93b6e4455", 848 | "sha256:f12ad7126ae0c98d601a7ee504c1122bcef553d1d5e0c3bfa77b16b3968d2734", 849 | "sha256:f2457189d8257dd41ae9b434ba33298aec198e30adf2dcdaaa3a28b9994f6adb", 850 | "sha256:f699ac1c768270c9e384e4cbd268d6e67aebcfae6cd623b4d7c3bfde5a35db59" 851 | ], 852 | "markers": "python_version >= '3.7'", 853 | "version": "==1.9.0" 854 | }, 855 | "licenseheaders": { 856 | "hashes": [ 857 | "sha256:3b159228b37bbba98bd01448c41a5eff773ab26ac5b14ac98c53d06dbc807696", 858 | "sha256:feb49c1a869f415431503ed56f4f3be48a4161495d3082f44af76c42c6a7e9ef" 859 | ], 860 | "index": "pypi", 861 | "markers": "python_version >= '3.5'", 862 | "version": "==0.8.8" 863 | }, 864 | "mccabe": { 865 | "hashes": [ 866 | "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325", 867 | "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e" 868 | ], 869 | "markers": "python_version >= '3.6'", 870 | "version": "==0.7.0" 871 | }, 872 | "packaging": { 873 | "hashes": [ 874 | "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5", 875 | "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7" 876 | ], 877 | "markers": "python_version >= '3.7'", 878 | "version": "==23.2" 879 | }, 880 | "platformdirs": { 881 | "hashes": [ 882 | "sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3", 883 | "sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e" 884 | ], 885 | "markers": "python_version >= '3.7'", 886 | "version": "==3.11.0" 887 | }, 888 | "pluggy": { 889 | "hashes": [ 890 | "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12", 891 | "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7" 892 | ], 893 | "markers": "python_version >= '3.8'", 894 | "version": "==1.3.0" 895 | }, 896 | "pylint": { 897 | "hashes": [ 898 | "sha256:73995fb8216d3bed149c8d51bba25b2c52a8251a2c8ac846ec668ce38fab5413", 899 | "sha256:f7b601cbc06fef7e62a754e2b41294c2aa31f1cb659624b9a85bcba29eaf8252" 900 | ], 901 | "index": "pypi", 902 | "markers": "python_full_version >= '3.7.2'", 903 | "version": "==2.17.5" 904 | }, 905 | "pytest": { 906 | "hashes": [ 907 | "sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32", 908 | "sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a" 909 | ], 910 | "index": "pypi", 911 | "markers": "python_version >= '3.7'", 912 | "version": "==7.4.0" 913 | }, 914 | "regex": { 915 | "hashes": [ 916 | "sha256:00ba3c9818e33f1fa974693fb55d24cdc8ebafcb2e4207680669d8f8d7cca79a", 917 | "sha256:00e871d83a45eee2f8688d7e6849609c2ca2a04a6d48fba3dff4deef35d14f07", 918 | "sha256:06e9abc0e4c9ab4779c74ad99c3fc10d3967d03114449acc2c2762ad4472b8ca", 919 | "sha256:0b9ac09853b2a3e0d0082104036579809679e7715671cfbf89d83c1cb2a30f58", 920 | "sha256:0d47840dc05e0ba04fe2e26f15126de7c755496d5a8aae4a08bda4dd8d646c54", 921 | "sha256:0f649fa32fe734c4abdfd4edbb8381c74abf5f34bc0b3271ce687b23729299ed", 922 | "sha256:107ac60d1bfdc3edb53be75e2a52aff7481b92817cfdddd9b4519ccf0e54a6ff", 923 | "sha256:11175910f62b2b8c055f2b089e0fedd694fe2be3941b3e2633653bc51064c528", 924 | "sha256:12bd4bc2c632742c7ce20db48e0d99afdc05e03f0b4c1af90542e05b809a03d9", 925 | "sha256:16f8740eb6dbacc7113e3097b0a36065a02e37b47c936b551805d40340fb9971", 926 | "sha256:1c0e8fae5b27caa34177bdfa5a960c46ff2f78ee2d45c6db15ae3f64ecadde14", 927 | "sha256:2c54e23836650bdf2c18222c87f6f840d4943944146ca479858404fedeb9f9af", 928 | "sha256:3367007ad1951fde612bf65b0dffc8fd681a4ab98ac86957d16491400d661302", 929 | "sha256:36362386b813fa6c9146da6149a001b7bd063dabc4d49522a1f7aa65b725c7ec", 930 | "sha256:39807cbcbe406efca2a233884e169d056c35aa7e9f343d4e78665246a332f597", 931 | "sha256:39cdf8d141d6d44e8d5a12a8569d5a227f645c87df4f92179bd06e2e2705e76b", 932 | "sha256:3b2c3502603fab52d7619b882c25a6850b766ebd1b18de3df23b2f939360e1bd", 933 | "sha256:3ccf2716add72f80714b9a63899b67fa711b654be3fcdd34fa391d2d274ce767", 934 | "sha256:3fef4f844d2290ee0ba57addcec17eec9e3df73f10a2748485dfd6a3a188cc0f", 935 | "sha256:4023e2efc35a30e66e938de5aef42b520c20e7eda7bb5fb12c35e5d09a4c43f6", 936 | "sha256:4a3ee019a9befe84fa3e917a2dd378807e423d013377a884c1970a3c2792d293", 937 | "sha256:4a8bf76e3182797c6b1afa5b822d1d5802ff30284abe4599e1247be4fd6b03be", 938 | "sha256:4a992f702c9be9c72fa46f01ca6e18d131906a7180950958f766c2aa294d4b41", 939 | "sha256:4c34d4f73ea738223a094d8e0ffd6d2c1a1b4c175da34d6b0de3d8d69bee6bcc", 940 | "sha256:4cd1bccf99d3ef1ab6ba835308ad85be040e6a11b0977ef7ea8c8005f01a3c29", 941 | "sha256:4ef80829117a8061f974b2fda8ec799717242353bff55f8a29411794d635d964", 942 | "sha256:58837f9d221744d4c92d2cf7201c6acd19623b50c643b56992cbd2b745485d3d", 943 | "sha256:5a8f91c64f390ecee09ff793319f30a0f32492e99f5dc1c72bc361f23ccd0a9a", 944 | "sha256:5addc9d0209a9afca5fc070f93b726bf7003bd63a427f65ef797a931782e7edc", 945 | "sha256:6239d4e2e0b52c8bd38c51b760cd870069f0bdf99700a62cd509d7a031749a55", 946 | "sha256:66e2fe786ef28da2b28e222c89502b2af984858091675044d93cb50e6f46d7af", 947 | "sha256:69c0771ca5653c7d4b65203cbfc5e66db9375f1078689459fe196fe08b7b4930", 948 | "sha256:6ac965a998e1388e6ff2e9781f499ad1eaa41e962a40d11c7823c9952c77123e", 949 | "sha256:6c56c3d47da04f921b73ff9415fbaa939f684d47293f071aa9cbb13c94afc17d", 950 | "sha256:6f85739e80d13644b981a88f529d79c5bdf646b460ba190bffcaf6d57b2a9863", 951 | "sha256:706e7b739fdd17cb89e1fbf712d9dc21311fc2333f6d435eac2d4ee81985098c", 952 | "sha256:741ba2f511cc9626b7561a440f87d658aabb3d6b744a86a3c025f866b4d19e7f", 953 | "sha256:7434a61b158be563c1362d9071358f8ab91b8d928728cd2882af060481244c9e", 954 | "sha256:76066d7ff61ba6bf3cb5efe2428fc82aac91802844c022d849a1f0f53820502d", 955 | "sha256:7979b834ec7a33aafae34a90aad9f914c41fd6eaa8474e66953f3f6f7cbd4368", 956 | "sha256:7eece6fbd3eae4a92d7c748ae825cbc1ee41a89bb1c3db05b5578ed3cfcfd7cb", 957 | "sha256:7ef1e014eed78ab650bef9a6a9cbe50b052c0aebe553fb2881e0453717573f52", 958 | "sha256:81dce2ddc9f6e8f543d94b05d56e70d03a0774d32f6cca53e978dc01e4fc75b8", 959 | "sha256:82fcc1f1cc3ff1ab8a57ba619b149b907072e750815c5ba63e7aa2e1163384a4", 960 | "sha256:8d1f21af4c1539051049796a0f50aa342f9a27cde57318f2fc41ed50b0dbc4ac", 961 | "sha256:90a79bce019c442604662d17bf69df99090e24cdc6ad95b18b6725c2988a490e", 962 | "sha256:9145f092b5d1977ec8c0ab46e7b3381b2fd069957b9862a43bd383e5c01d18c2", 963 | "sha256:91dc1d531f80c862441d7b66c4505cd6ea9d312f01fb2f4654f40c6fdf5cc37a", 964 | "sha256:979c24cbefaf2420c4e377ecd1f165ea08cc3d1fbb44bdc51bccbbf7c66a2cb4", 965 | "sha256:994645a46c6a740ee8ce8df7911d4aee458d9b1bc5639bc968226763d07f00fa", 966 | "sha256:9b98b7681a9437262947f41c7fac567c7e1f6eddd94b0483596d320092004533", 967 | "sha256:9c6b4d23c04831e3ab61717a707a5d763b300213db49ca680edf8bf13ab5d91b", 968 | "sha256:9c6d0ced3c06d0f183b73d3c5920727268d2201aa0fe6d55c60d68c792ff3588", 969 | "sha256:9fd88f373cb71e6b59b7fa597e47e518282455c2734fd4306a05ca219a1991b0", 970 | "sha256:a8f4e49fc3ce020f65411432183e6775f24e02dff617281094ba6ab079ef0915", 971 | "sha256:a9e908ef5889cda4de038892b9accc36d33d72fb3e12c747e2799a0e806ec841", 972 | "sha256:ad08a69728ff3c79866d729b095872afe1e0557251da4abb2c5faff15a91d19a", 973 | "sha256:adbccd17dcaff65704c856bd29951c58a1bd4b2b0f8ad6b826dbd543fe740988", 974 | "sha256:b0c7d2f698e83f15228ba41c135501cfe7d5740181d5903e250e47f617eb4292", 975 | "sha256:b3ab05a182c7937fb374f7e946f04fb23a0c0699c0450e9fb02ef567412d2fa3", 976 | "sha256:b6104f9a46bd8743e4f738afef69b153c4b8b592d35ae46db07fc28ae3d5fb7c", 977 | "sha256:ba7cd6dc4d585ea544c1412019921570ebd8a597fabf475acc4528210d7c4a6f", 978 | "sha256:bc72c231f5449d86d6c7d9cc7cd819b6eb30134bb770b8cfdc0765e48ef9c420", 979 | "sha256:bce8814b076f0ce5766dc87d5a056b0e9437b8e0cd351b9a6c4e1134a7dfbda9", 980 | "sha256:be5e22bbb67924dea15039c3282fa4cc6cdfbe0cbbd1c0515f9223186fc2ec5f", 981 | "sha256:be6b7b8d42d3090b6c80793524fa66c57ad7ee3fe9722b258aec6d0672543fd0", 982 | "sha256:bfe50b61bab1b1ec260fa7cd91106fa9fece57e6beba05630afe27c71259c59b", 983 | "sha256:bff507ae210371d4b1fe316d03433ac099f184d570a1a611e541923f78f05037", 984 | "sha256:c148bec483cc4b421562b4bcedb8e28a3b84fcc8f0aa4418e10898f3c2c0eb9b", 985 | "sha256:c15ad0aee158a15e17e0495e1e18741573d04eb6da06d8b84af726cfc1ed02ee", 986 | "sha256:c2169b2dcabf4e608416f7f9468737583ce5f0a6e8677c4efbf795ce81109d7c", 987 | "sha256:c55853684fe08d4897c37dfc5faeff70607a5f1806c8be148f1695be4a63414b", 988 | "sha256:c65a3b5330b54103e7d21cac3f6bf3900d46f6d50138d73343d9e5b2900b2353", 989 | "sha256:c7964c2183c3e6cce3f497e3a9f49d182e969f2dc3aeeadfa18945ff7bdd7051", 990 | "sha256:cc3f1c053b73f20c7ad88b0d1d23be7e7b3901229ce89f5000a8399746a6e039", 991 | "sha256:ce615c92d90df8373d9e13acddd154152645c0dc060871abf6bd43809673d20a", 992 | "sha256:d29338556a59423d9ff7b6eb0cb89ead2b0875e08fe522f3e068b955c3e7b59b", 993 | "sha256:d8a993c0a0ffd5f2d3bda23d0cd75e7086736f8f8268de8a82fbc4bd0ac6791e", 994 | "sha256:d9c727bbcf0065cbb20f39d2b4f932f8fa1631c3e01fcedc979bd4f51fe051c5", 995 | "sha256:dac37cf08fcf2094159922edc7a2784cfcc5c70f8354469f79ed085f0328ebdf", 996 | "sha256:dd829712de97753367153ed84f2de752b86cd1f7a88b55a3a775eb52eafe8a94", 997 | "sha256:e54ddd0bb8fb626aa1f9ba7b36629564544954fff9669b15da3610c22b9a0991", 998 | "sha256:e77c90ab5997e85901da85131fd36acd0ed2221368199b65f0d11bca44549711", 999 | "sha256:ebedc192abbc7fd13c5ee800e83a6df252bec691eb2c4bedc9f8b2e2903f5e2a", 1000 | "sha256:ef71561f82a89af6cfcbee47f0fabfdb6e63788a9258e913955d89fdd96902ab", 1001 | "sha256:f0a47efb1dbef13af9c9a54a94a0b814902e547b7f21acb29434504d18f36e3a", 1002 | "sha256:f4f2ca6df64cbdd27f27b34f35adb640b5d2d77264228554e68deda54456eb11", 1003 | "sha256:fb02e4257376ae25c6dd95a5aec377f9b18c09be6ebdefa7ad209b9137b73d48" 1004 | ], 1005 | "markers": "python_version >= '3.7'", 1006 | "version": "==2023.10.3" 1007 | }, 1008 | "tomli": { 1009 | "hashes": [ 1010 | "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc", 1011 | "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f" 1012 | ], 1013 | "markers": "python_version < '3.11'", 1014 | "version": "==2.0.1" 1015 | }, 1016 | "tomlkit": { 1017 | "hashes": [ 1018 | "sha256:38e1ff8edb991273ec9f6181244a6a391ac30e9f5098e7535640ea6be97a7c86", 1019 | "sha256:712cbd236609acc6a3e2e97253dfc52d4c2082982a88f61b640ecf0817eab899" 1020 | ], 1021 | "markers": "python_version >= '3.7'", 1022 | "version": "==0.12.1" 1023 | }, 1024 | "typing-extensions": { 1025 | "hashes": [ 1026 | "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0", 1027 | "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef" 1028 | ], 1029 | "markers": "python_version < '3.10'", 1030 | "version": "==4.8.0" 1031 | }, 1032 | "wrapt": { 1033 | "hashes": [ 1034 | "sha256:02fce1852f755f44f95af51f69d22e45080102e9d00258053b79367d07af39c0", 1035 | "sha256:077ff0d1f9d9e4ce6476c1a924a3332452c1406e59d90a2cf24aeb29eeac9420", 1036 | "sha256:078e2a1a86544e644a68422f881c48b84fef6d18f8c7a957ffd3f2e0a74a0d4a", 1037 | "sha256:0970ddb69bba00670e58955f8019bec4a42d1785db3faa043c33d81de2bf843c", 1038 | "sha256:1286eb30261894e4c70d124d44b7fd07825340869945c79d05bda53a40caa079", 1039 | "sha256:21f6d9a0d5b3a207cdf7acf8e58d7d13d463e639f0c7e01d82cdb671e6cb7923", 1040 | "sha256:230ae493696a371f1dbffaad3dafbb742a4d27a0afd2b1aecebe52b740167e7f", 1041 | "sha256:26458da5653aa5b3d8dc8b24192f574a58984c749401f98fff994d41d3f08da1", 1042 | "sha256:2cf56d0e237280baed46f0b5316661da892565ff58309d4d2ed7dba763d984b8", 1043 | "sha256:2e51de54d4fb8fb50d6ee8327f9828306a959ae394d3e01a1ba8b2f937747d86", 1044 | "sha256:2fbfbca668dd15b744418265a9607baa970c347eefd0db6a518aaf0cfbd153c0", 1045 | "sha256:38adf7198f8f154502883242f9fe7333ab05a5b02de7d83aa2d88ea621f13364", 1046 | "sha256:3a8564f283394634a7a7054b7983e47dbf39c07712d7b177b37e03f2467a024e", 1047 | "sha256:3abbe948c3cbde2689370a262a8d04e32ec2dd4f27103669a45c6929bcdbfe7c", 1048 | "sha256:3bbe623731d03b186b3d6b0d6f51865bf598587c38d6f7b0be2e27414f7f214e", 1049 | "sha256:40737a081d7497efea35ab9304b829b857f21558acfc7b3272f908d33b0d9d4c", 1050 | "sha256:41d07d029dd4157ae27beab04d22b8e261eddfc6ecd64ff7000b10dc8b3a5727", 1051 | "sha256:46ed616d5fb42f98630ed70c3529541408166c22cdfd4540b88d5f21006b0eff", 1052 | "sha256:493d389a2b63c88ad56cdc35d0fa5752daac56ca755805b1b0c530f785767d5e", 1053 | "sha256:4ff0d20f2e670800d3ed2b220d40984162089a6e2c9646fdb09b85e6f9a8fc29", 1054 | "sha256:54accd4b8bc202966bafafd16e69da9d5640ff92389d33d28555c5fd4f25ccb7", 1055 | "sha256:56374914b132c702aa9aa9959c550004b8847148f95e1b824772d453ac204a72", 1056 | "sha256:578383d740457fa790fdf85e6d346fda1416a40549fe8db08e5e9bd281c6a475", 1057 | "sha256:58d7a75d731e8c63614222bcb21dd992b4ab01a399f1f09dd82af17bbfc2368a", 1058 | "sha256:5c5aa28df055697d7c37d2099a7bc09f559d5053c3349b1ad0c39000e611d317", 1059 | "sha256:5fc8e02f5984a55d2c653f5fea93531e9836abbd84342c1d1e17abc4a15084c2", 1060 | "sha256:63424c681923b9f3bfbc5e3205aafe790904053d42ddcc08542181a30a7a51bd", 1061 | "sha256:64b1df0f83706b4ef4cfb4fb0e4c2669100fd7ecacfb59e091fad300d4e04640", 1062 | "sha256:74934ebd71950e3db69960a7da29204f89624dde411afbfb3b4858c1409b1e98", 1063 | "sha256:75669d77bb2c071333417617a235324a1618dba66f82a750362eccbe5b61d248", 1064 | "sha256:75760a47c06b5974aa5e01949bf7e66d2af4d08cb8c1d6516af5e39595397f5e", 1065 | "sha256:76407ab327158c510f44ded207e2f76b657303e17cb7a572ffe2f5a8a48aa04d", 1066 | "sha256:76e9c727a874b4856d11a32fb0b389afc61ce8aaf281ada613713ddeadd1cfec", 1067 | "sha256:77d4c1b881076c3ba173484dfa53d3582c1c8ff1f914c6461ab70c8428b796c1", 1068 | "sha256:780c82a41dc493b62fc5884fb1d3a3b81106642c5c5c78d6a0d4cbe96d62ba7e", 1069 | "sha256:7dc0713bf81287a00516ef43137273b23ee414fe41a3c14be10dd95ed98a2df9", 1070 | "sha256:7eebcdbe3677e58dd4c0e03b4f2cfa346ed4049687d839adad68cc38bb559c92", 1071 | "sha256:896689fddba4f23ef7c718279e42f8834041a21342d95e56922e1c10c0cc7afb", 1072 | "sha256:96177eb5645b1c6985f5c11d03fc2dbda9ad24ec0f3a46dcce91445747e15094", 1073 | "sha256:96e25c8603a155559231c19c0349245eeb4ac0096fe3c1d0be5c47e075bd4f46", 1074 | "sha256:9d37ac69edc5614b90516807de32d08cb8e7b12260a285ee330955604ed9dd29", 1075 | "sha256:9ed6aa0726b9b60911f4aed8ec5b8dd7bf3491476015819f56473ffaef8959bd", 1076 | "sha256:a487f72a25904e2b4bbc0817ce7a8de94363bd7e79890510174da9d901c38705", 1077 | "sha256:a4cbb9ff5795cd66f0066bdf5947f170f5d63a9274f99bdbca02fd973adcf2a8", 1078 | "sha256:a74d56552ddbde46c246b5b89199cb3fd182f9c346c784e1a93e4dc3f5ec9975", 1079 | "sha256:a89ce3fd220ff144bd9d54da333ec0de0399b52c9ac3d2ce34b569cf1a5748fb", 1080 | "sha256:abd52a09d03adf9c763d706df707c343293d5d106aea53483e0ec8d9e310ad5e", 1081 | "sha256:abd8f36c99512755b8456047b7be10372fca271bf1467a1caa88db991e7c421b", 1082 | "sha256:af5bd9ccb188f6a5fdda9f1f09d9f4c86cc8a539bd48a0bfdc97723970348418", 1083 | "sha256:b02f21c1e2074943312d03d243ac4388319f2456576b2c6023041c4d57cd7019", 1084 | "sha256:b06fa97478a5f478fb05e1980980a7cdf2712015493b44d0c87606c1513ed5b1", 1085 | "sha256:b0724f05c396b0a4c36a3226c31648385deb6a65d8992644c12a4963c70326ba", 1086 | "sha256:b130fe77361d6771ecf5a219d8e0817d61b236b7d8b37cc045172e574ed219e6", 1087 | "sha256:b56d5519e470d3f2fe4aa7585f0632b060d532d0696c5bdfb5e8319e1d0f69a2", 1088 | "sha256:b67b819628e3b748fd3c2192c15fb951f549d0f47c0449af0764d7647302fda3", 1089 | "sha256:ba1711cda2d30634a7e452fc79eabcadaffedf241ff206db2ee93dd2c89a60e7", 1090 | "sha256:bbeccb1aa40ab88cd29e6c7d8585582c99548f55f9b2581dfc5ba68c59a85752", 1091 | "sha256:bd84395aab8e4d36263cd1b9308cd504f6cf713b7d6d3ce25ea55670baec5416", 1092 | "sha256:c99f4309f5145b93eca6e35ac1a988f0dc0a7ccf9ccdcd78d3c0adf57224e62f", 1093 | "sha256:ca1cccf838cd28d5a0883b342474c630ac48cac5df0ee6eacc9c7290f76b11c1", 1094 | "sha256:cd525e0e52a5ff16653a3fc9e3dd827981917d34996600bbc34c05d048ca35cc", 1095 | "sha256:cdb4f085756c96a3af04e6eca7f08b1345e94b53af8921b25c72f096e704e145", 1096 | "sha256:ce42618f67741d4697684e501ef02f29e758a123aa2d669e2d964ff734ee00ee", 1097 | "sha256:d06730c6aed78cee4126234cf2d071e01b44b915e725a6cb439a879ec9754a3a", 1098 | "sha256:d5fe3e099cf07d0fb5a1e23d399e5d4d1ca3e6dfcbe5c8570ccff3e9208274f7", 1099 | "sha256:d6bcbfc99f55655c3d93feb7ef3800bd5bbe963a755687cbf1f490a71fb7794b", 1100 | "sha256:d787272ed958a05b2c86311d3a4135d3c2aeea4fc655705f074130aa57d71653", 1101 | "sha256:e169e957c33576f47e21864cf3fc9ff47c223a4ebca8960079b8bd36cb014fd0", 1102 | "sha256:e20076a211cd6f9b44a6be58f7eeafa7ab5720eb796975d0c03f05b47d89eb90", 1103 | "sha256:e826aadda3cae59295b95343db8f3d965fb31059da7de01ee8d1c40a60398b29", 1104 | "sha256:eef4d64c650f33347c1f9266fa5ae001440b232ad9b98f1f43dfe7a79435c0a6", 1105 | "sha256:f2e69b3ed24544b0d3dbe2c5c0ba5153ce50dcebb576fdc4696d52aa22db6034", 1106 | "sha256:f87ec75864c37c4c6cb908d282e1969e79763e0d9becdfe9fe5473b7bb1e5f09", 1107 | "sha256:fbec11614dba0424ca72f4e8ba3c420dba07b4a7c206c8c8e4e73f2e98f4c559", 1108 | "sha256:fd69666217b62fa5d7c6aa88e507493a34dec4fa20c5bd925e4bc12fce586639" 1109 | ], 1110 | "markers": "python_version < '3.11'", 1111 | "version": "==1.15.0" 1112 | } 1113 | } 1114 | } 1115 | --------------------------------------------------------------------------------