├── .docignore ├── .dockerignore ├── .copyright.tmpl ├── db └── .gitignore ├── tests ├── __init__.py ├── ddo │ ├── __init__.py │ ├── ddo_event_sample_v4.py │ ├── ddo_sample1_v4.py │ ├── ddo_sa_sample_with_credentials_v4.py │ ├── ddo_sample_algorithm_v4.py │ ├── ddo_with_compute_service.json │ ├── ddo_sample_algorithm.json │ ├── ddo_sa_sample_with_credentials.json │ ├── ddo_sample_invalid_url.json │ └── ddo_sample_ipfs_url.json ├── helpers │ ├── constants.py │ └── nonce.py ├── resources │ ├── provider_key_file.json │ ├── consumer_key_file.json │ └── branin.arff ├── test_proof.py ├── test_auth.py └── test_graphql.py ├── ocean_provider ├── __init__.py ├── utils │ ├── __init__.py │ ├── consumable.py │ ├── did.py │ ├── test │ │ ├── test_address.py │ │ ├── test_compute.py │ │ ├── test_encyption.py │ │ ├── test_accounts.py │ │ ├── test_provider_fees.py │ │ ├── test_error_responses.py │ │ ├── test_url.py │ │ ├── test_credentials.py │ │ ├── test_currency.py │ │ └── test_basics.py │ ├── data_nft_factory.py │ ├── error_responses.py │ ├── encryption.py │ ├── compute.py │ ├── services.py │ ├── compute_environments.py │ ├── address.py │ ├── proof.py │ ├── data_nft.py │ ├── currency.py │ ├── accounts.py │ ├── asset.py │ ├── url.py │ ├── credentials.py │ ├── util.py │ └── provider_fees.py ├── exceptions.py ├── version.py ├── routes │ ├── __init__.py │ ├── encrypt.py │ └── auth.py ├── validation │ ├── images.py │ └── RBAC.py ├── database.py ├── models.py ├── requests_session.py ├── constants.py ├── myapp.py ├── log.py ├── serializers.py ├── http_provider.py ├── file_types │ ├── file_types_factory.py │ └── types │ │ └── smartcontract.py ├── test │ └── test_user_nonce.py ├── run.py └── user_nonce.py ├── CHANGELOG.md ├── .bumpversion.cfg ├── MANIFEST.in ├── .env.example ├── .pre-commit-config.yaml ├── docker-compose.yml ├── .github └── workflows │ ├── black.yml │ ├── codacy.yml │ ├── codeql-analysis.yml │ └── pytest.yml ├── requirements_dev.txt ├── setup.cfg ├── docker-entrypoint.sh ├── compose-env-values ├── start_local_provider.sh ├── bumpversion.sh ├── pytest.ini ├── logging.yaml ├── deployment └── provider-standard-networks-deployment-example.yaml ├── Dockerfile ├── .gitignore ├── release-process.md ├── Makefile ├── conftest.py ├── setup.py └── README.md /.docignore: -------------------------------------------------------------------------------- 1 | **tests** 2 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | provider-py.egg-info 2 | .eggs 3 | .tox -------------------------------------------------------------------------------- /.copyright.tmpl: -------------------------------------------------------------------------------- 1 | Copyright 2023 Ocean Protocol Foundation 2 | SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /db/.gitignore: -------------------------------------------------------------------------------- 1 | # Ignore everything in this directory 2 | * 3 | # Except this file 4 | !.gitignore 5 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2023 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | -------------------------------------------------------------------------------- /tests/ddo/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2023 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | -------------------------------------------------------------------------------- /ocean_provider/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2023 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | -------------------------------------------------------------------------------- /ocean_provider/utils/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2023 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | 5 | History 6 | ======= 7 | -------------------------------------------------------------------------------- /.bumpversion.cfg: -------------------------------------------------------------------------------- 1 | [bumpversion] 2 | current_version = 2.1.6 3 | commit = True 4 | tag = True 5 | 6 | [bumpversion:file:setup.py] 7 | search = version='{current_version}' 8 | replace = version='{new_version}' 9 | -------------------------------------------------------------------------------- /tests/helpers/constants.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2023 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | 6 | """branin.arff dataset, permanently stored in Arweave""" 7 | ARWEAVE_TRANSACTION_ID = "a4qJoQZa1poIv5guEzkfgZYSAD0uYm7Vw4zm_tCswVQ" 8 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include CHANGELOG.md 2 | include LICENSE 3 | include README.md 4 | 5 | recursive-include tests * 6 | recursive-exclude * __pycache__ 7 | recursive-exclude * *.py[co] 8 | 9 | recursive-include docs *.md *.rst conf.py Makefile make.bat *.jpg *.png *.gif 10 | -------------------------------------------------------------------------------- /ocean_provider/exceptions.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2023 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | 6 | 7 | class InvalidSignatureError(Exception): 8 | """User signature is not valid.""" 9 | 10 | 11 | class RequestNotFound(Exception): 12 | """Request undeclared/undefined.""" 13 | -------------------------------------------------------------------------------- /ocean_provider/version.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2021 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | import configparser 6 | 7 | 8 | def get_version(): 9 | conf = configparser.ConfigParser() 10 | conf.read(".bumpversion.cfg") 11 | return conf["bumpversion"]["current_version"] 12 | -------------------------------------------------------------------------------- /.env.example: -------------------------------------------------------------------------------- 1 | FLASK_APP=ocean_provider/run.py 2 | PROVIDER_CONFIG_FILE=config.ini 3 | PROVIDER_ADDRESS=068ed00cf0441e4829d9784fcbe7b9e26d4bd8d0 4 | AQUARIUS_URL=https://aquarius.marketplace.dev-ocean.com 5 | 6 | PARITY_ADDRESS1=0x00bd138abd70e2f00903268f3db08f2d25677c9e 7 | PARITY_PASSWORD1=node0 8 | PARITY_KEYFILE1=tests/resources/consumer_key_file.json 9 | REQUEST_TIMEOUT=10 -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/pre-commit/mirrors-isort 3 | rev: v4.3.21 4 | hooks: 5 | - id: isort 6 | - repo: https://github.com/psf/black 7 | rev: 'refs/tags/22.6.0:refs/tags/22.6.0' 8 | hooks: 9 | - id: black 10 | - repo: https://github.com/PyCQA/flake8 11 | rev: 4.0.1 12 | hooks: 13 | - id: flake8 14 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | ## 2 | ## Copyright 2023 Ocean Protocol Foundation 3 | ## SPDX-License-Identifier: Apache-2.0 4 | ## 5 | version: '3' 6 | services: 7 | ocean-provider: 8 | image: oceanprotocol/provider-py:local 9 | ports: 10 | - 8030:8030 11 | env_file: 12 | - ${PROVIDER_ENV_FILE} 13 | volumes: 14 | - ${ARTIFACTS_FOLDER}:/usr/local/:ro 15 | -------------------------------------------------------------------------------- /.github/workflows/black.yml: -------------------------------------------------------------------------------- 1 | ## 2 | ## Copyright 2023 Ocean Protocol Foundation 3 | ## SPDX-License-Identifier: Apache-2.0 4 | ## 5 | name: black 6 | 7 | on: [push] 8 | 9 | jobs: 10 | black: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - uses: actions/checkout@v2 14 | - uses: psf/black@stable 15 | with: 16 | options: "--check --verbose" 17 | version: "22.10.0" 18 | -------------------------------------------------------------------------------- /requirements_dev.txt: -------------------------------------------------------------------------------- 1 | # pip install ocean-provider 2 | # is for end users. That will install the packages 3 | # listed in the install_requires list of setup.py. 4 | 5 | # pip install -r requirements_dev.txt 6 | # is for the developers of ocean-provider, so doing that should 7 | # install all the Python packages listed 8 | # in the install_requires list of setup.py 9 | # and also the 'dev' list in the extras_require dict 10 | 11 | -e .[dev] 12 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [bdist_wheel] 2 | universal = 1 3 | 4 | [aliases] 5 | # Define setup.py command aliases here 6 | test = pytest 7 | 8 | [tool:pytest] 9 | collect_ignore = ['setup.py'] 10 | 11 | [isort] 12 | multi_line_output = 3 13 | include_trailing_comma = True 14 | force_grid_wrap = 0 15 | use_parentheses = True 16 | ensure_newline_before_comments = True 17 | line_length = 88 18 | 19 | [flake8] 20 | max-line-length=8888888888 21 | ignore=E203,W503 22 | -------------------------------------------------------------------------------- /ocean_provider/routes/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2023 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | """Unites routes from compute and consume files.""" 6 | # flake8: noqa 7 | from flask import Blueprint 8 | 9 | services = Blueprint("services", __name__) 10 | 11 | from .auth import * # isort:skip 12 | from .compute import * # isort:skip 13 | from .consume import * # isort:skip 14 | from .encrypt import * # isort:skip 15 | from .decrypt import * # isort:skip 16 | -------------------------------------------------------------------------------- /tests/helpers/nonce.py: -------------------------------------------------------------------------------- 1 | import time 2 | from _decimal import Decimal 3 | 4 | from ocean_provider.user_nonce import get_nonce, update_nonce 5 | 6 | 7 | def build_nonce(address) -> Decimal: 8 | nonce = get_nonce(address) 9 | if nonce: 10 | nonce = int(float(nonce)) + 1 11 | update_nonce(address, nonce) 12 | 13 | return Decimal(nonce) 14 | 15 | update_nonce(address, 1) 16 | return Decimal(1) 17 | 18 | 19 | def build_nonce_for_compute() -> int: 20 | return time.time_ns() 21 | -------------------------------------------------------------------------------- /docker-entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | ## 3 | ## Copyright 2023 Ocean Protocol Foundation 4 | ## SPDX-License-Identifier: Apache-2.0 5 | ## 6 | 7 | if [ "${DEPLOY_CONTRACTS}" = "true" ]; then 8 | while [ ! -f "/ocean-contracts/artifacts/ready" ]; do 9 | sleep 2 10 | done 11 | fi 12 | 13 | /bin/cp -up /ocean-provider/artifacts/* /usr/local/artifacts/ 2>/dev/null || true 14 | 15 | gunicorn -b ${OCEAN_PROVIDER_URL#*://} -w ${OCEAN_PROVIDER_WORKERS} -t ${OCEAN_PROVIDER_TIMEOUT} ocean_provider.run:app 16 | tail -f /dev/null 17 | -------------------------------------------------------------------------------- /ocean_provider/validation/images.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2023 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | import logging 6 | 7 | logger = logging.getLogger(__name__) 8 | 9 | 10 | def validate_container(container): 11 | # Validate `container` data 12 | for key in ["entrypoint", "image", "checksum"]: 13 | if not container.get(key): 14 | return False, "missing_entrypoint_image_checksum" 15 | 16 | if not container["checksum"].startswith("sha256:"): 17 | return False, "checksum_prefix" 18 | 19 | return True, "" 20 | -------------------------------------------------------------------------------- /ocean_provider/utils/consumable.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2023 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | 6 | 7 | from enum import Enum 8 | 9 | 10 | class ConsumableCodes(Enum): 11 | """ 12 | Contains constant values for: 13 | - OK 14 | - ASSET_DISABLED 15 | - CONNECTIVITY_FAIL 16 | - CREDENTIAL_NOT_IN_ALLOW_LIST 17 | - CREDENTIAL_IN_DENY_LIST 18 | """ 19 | 20 | OK = 0 21 | ASSET_DISABLED = 1 22 | CONNECTIVITY_FAIL = 2 23 | CREDENTIAL_NOT_IN_ALLOW_LIST = 3 24 | CREDENTIAL_IN_DENY_LIST = 4 25 | 26 | 27 | class MalformedCredential(Exception): 28 | pass 29 | -------------------------------------------------------------------------------- /compose-env-values: -------------------------------------------------------------------------------- 1 | PARITY_ADDRESS1=0x00bd138abd70e2f00903268f3db08f2d25677c9e 2 | PARITY_PASSWORD1=node0 3 | PARITY_KEYFILE1=/ocean-provider/tests/resources/consumer_key_file.json 4 | 5 | PROVIDER_ADDRESS=068ed00cf0441e4829d9784fcbe7b9e26d4bd8d0 6 | 7 | NETWORK_URL=wss://rinkeby.infura.io/ws/v3/357f2fe737db4304bd2f7285c5602d0d 8 | AQUARIUS_URL=https://aquarius.marketplace.dev-ocean.com 9 | OCEAN_PROVIDER_URL=http://0.0.0.0:8030 10 | OPERATOR_SERVICE_URL=https://stagev4.c2d.oceanprotocol.com 11 | 12 | AZURE_ACCOUNT_NAME= 13 | AZURE_ACCOUNT_KEY= 14 | AZURE_RESOURCE_GROUP= 15 | AZURE_LOCATION= 16 | AZURE_CLIENT_ID= 17 | AZURE_CLIENT_SECRET= 18 | AZURE_TENANT_ID= 19 | AZURE_SUBSCRIPTION_ID= 20 | -------------------------------------------------------------------------------- /start_local_provider.sh: -------------------------------------------------------------------------------- 1 | # Export env vars 2 | export PROVIDER_PRIVATE_KEY=0xfd5c1ccea015b6d663618850824154a3b3fb2882c46cefb05b9a93fea8c3d215 3 | export TEST_PRIVATE_KEY1=0xef4b441145c1d0f3b4bc6d61d29f5c6e502359481152f869247c7a4244d45209 4 | export TEST_PRIVATE_KEY2=0x5d75837394b078ce97bc289fa8d75e21000573520bfa7784a9d28ccaae602bf8 5 | export OPERATOR_SERVICE_URL=http://172.15.0.13:31000 6 | export ADDRESS_FILE=~/.ocean/ocean-contracts/artifacts/address.json 7 | export IPFS_GATEWAY=http://172.15.0.16:8080 8 | export AUTHORIZED_DECRYPTERS=[] 9 | export LOG_LEVEL=DEBUG 10 | 11 | # Start Flask server 12 | export FLASK_ENV=development 13 | export FLASK_APP=ocean_provider/run.py 14 | flask run --port=8030 15 | -------------------------------------------------------------------------------- /ocean_provider/utils/did.py: -------------------------------------------------------------------------------- 1 | from hashlib import sha256 2 | 3 | from eth_typing.encoding import HexStr 4 | from eth_typing.evm import HexAddress 5 | from eth_utils.hexadecimal import remove_0x_prefix 6 | from web3.main import Web3 7 | 8 | 9 | def compute_did_from_data_nft_address_and_chain_id( 10 | data_nft_address: HexAddress, chain_id: int 11 | ) -> HexStr: 12 | """Return a DID calculated from the data NFT address and chain ID. 13 | See for details: https://github.com/oceanprotocol/docs/blob/v4main/content/concepts/did-ddo.md#did 14 | """ 15 | return "did:op:" + remove_0x_prefix( 16 | Web3.toHex(sha256((data_nft_address + str(chain_id)).encode("utf-8")).digest()) 17 | ) 18 | -------------------------------------------------------------------------------- /ocean_provider/database.py: -------------------------------------------------------------------------------- 1 | """ 2 | Defines sqlite database related variables. 3 | """ 4 | import os 5 | from os.path import abspath, dirname 6 | 7 | from sqlalchemy import create_engine 8 | from sqlalchemy.ext.declarative import declarative_base 9 | from sqlalchemy.orm import sessionmaker 10 | 11 | PROJECT_ROOT = dirname(dirname(abspath(__file__))) 12 | SQLALCHEMY_DATABASE_URL = "sqlite:////" + os.path.join( 13 | PROJECT_ROOT, "db", os.getenv("STORAGE_PATH", "ocean-provider.db") 14 | ) 15 | 16 | engine = create_engine( 17 | SQLALCHEMY_DATABASE_URL, connect_args={"check_same_thread": False} 18 | ) 19 | 20 | SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) 21 | Base = declarative_base() 22 | -------------------------------------------------------------------------------- /tests/resources/provider_key_file.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "50aa801a-8d66-1402-1fa4-d8987868c2ce", 3 | "version": 3, 4 | "crypto": { 5 | "cipher": "aes-128-ctr", 6 | "cipherparams": { 7 | "iv": "a874e6fe50a5bb088826c45560dc1b7e" 8 | }, 9 | "ciphertext": "2383c6aa50c744b6558e77b5dcec6137f647c81f10f71f22a87321fd1306056c", 10 | "kdf": "pbkdf2", 11 | "kdfparams": { 12 | "c": 10240, 13 | "dklen": 32, 14 | "prf": "hmac-sha256", 15 | "salt": "eca6ccc9fbb0bdc3a516c7576808ba5031669e6878f3bb95624ddb46449e119c" 16 | }, 17 | "mac": "14e9a33a45ae32f88a0bd5aac14521c1fcf14f56fd55c1a1c080b2f81ddb8d44" 18 | }, 19 | "address": "068ed00cf0441e4829d9784fcbe7b9e26d4bd8d0", 20 | "name": "", 21 | "meta": "{}" 22 | } 23 | -------------------------------------------------------------------------------- /tests/resources/consumer_key_file.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "0902d04b-f26e-5c1f-e3ae-78d2c1cb16e7", 3 | "version": 3, 4 | "crypto": { 5 | "cipher": "aes-128-ctr", 6 | "cipherparams": { 7 | "iv": "6a829fe7bc656d85f6c2e9fd21784952" 8 | }, 9 | "ciphertext": "1bfec0b054a648af8fdd0e85662206c65a4af0ed15fede4ad41ca9ab7b504ce2", 10 | "kdf": "pbkdf2", 11 | "kdfparams": { 12 | "c": 10240, 13 | "dklen": 32, 14 | "prf": "hmac-sha256", 15 | "salt": "95f96b5ee22dd537e06076eb8d7078eb7275d29af935782fe476696b11be50e5" 16 | }, 17 | "mac": "4af2215c3cd9447a5b0512d7d1c3ea5a4435981e1c8f48bf71d7a49c0e5b4986" 18 | }, 19 | "address": "00bd138abd70e2f00903268f3db08f2d25677c9e", 20 | "name": "Validator0", 21 | "meta": "{}" 22 | } 23 | -------------------------------------------------------------------------------- /ocean_provider/models.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2023 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | from sqlalchemy import Column, String 6 | 7 | from .database import Base 8 | 9 | 10 | class UserNonce(Base): 11 | """ 12 | Table for storing the nonce values for the Eth account addresses. 13 | """ 14 | 15 | __tablename__ = "user_nonce" 16 | 17 | address = Column(String(255), nullable=False, primary_key=True, autoincrement=False) 18 | nonce = Column(String(255), nullable=False) 19 | 20 | 21 | class RevokedToken(Base): 22 | """ 23 | Table for storing the revoked jwt tokens. 24 | """ 25 | 26 | __tablename__ = "revoked_tokens" 27 | 28 | token = Column(String(255), nullable=False, primary_key=True, autoincrement=False) 29 | -------------------------------------------------------------------------------- /ocean_provider/requests_session.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2023 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | from requests.adapters import HTTPAdapter, Retry 6 | from requests.sessions import Session 7 | 8 | 9 | def get_requests_session() -> Session: 10 | """ 11 | Set connection pool maxsize and block value to avoid `connection pool full` warnings. 12 | 13 | :return: requests session 14 | """ 15 | session = Session() 16 | retries = Retry(total=5, backoff_factor=1, status_forcelist=[502, 503, 504]) 17 | session.mount( 18 | "http://", 19 | HTTPAdapter( 20 | pool_connections=25, pool_maxsize=25, pool_block=True, max_retries=retries 21 | ), 22 | ) 23 | session.mount( 24 | "https://", 25 | HTTPAdapter( 26 | pool_connections=25, pool_maxsize=25, pool_block=True, max_retries=retries 27 | ), 28 | ) 29 | return session 30 | -------------------------------------------------------------------------------- /ocean_provider/constants.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2023 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | 6 | 7 | class BaseURLs: 8 | """ 9 | This class stores values for: 10 | 11 | - `BASE_PROVIDER_URL` 12 | - `SWAGGER_URL` 13 | - `SERVICES_URL` 14 | """ 15 | 16 | BASE_PROVIDER_URL = "/api" 17 | SWAGGER_URL = "/api/docs" # URL for exposing Swagger UI (without trailing '/') 18 | SERVICES_URL = BASE_PROVIDER_URL + "/services" 19 | NOT_ALLOWED_METHODS = ["HEAD", "OPTIONS"] 20 | 21 | 22 | class Metadata: 23 | """ 24 | This class stores values for: 25 | 26 | - `TITLE` 27 | - `DESCRIPTION` 28 | """ 29 | 30 | TITLE = "Provider" 31 | DESCRIPTION = ( 32 | "Ocean Provider is the technical component executed by Data Providers allowing them to " 33 | "provide extended data services. When running with our Docker images, " 34 | "it is exposed under `http://localhost:8030`." 35 | ) 36 | HOST = "myfancyprovider.com" 37 | -------------------------------------------------------------------------------- /ocean_provider/utils/test/test_address.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2023 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | import os 6 | 7 | import pytest 8 | from ocean_provider.utils.address import get_address_json, get_contract_address 9 | 10 | 11 | @pytest.mark.unit 12 | def test_get_address_json(): 13 | address_json = get_address_json(os.getenv("ADDRESS_FILE")) 14 | assert address_json["development"]["chainId"] == 8996 15 | assert address_json["development"]["Ocean"].startswith("0x") 16 | 17 | 18 | @pytest.mark.unit 19 | def test_get_contract_address(): 20 | assert get_contract_address( 21 | os.getenv("ADDRESS_FILE"), "ERC721Factory", 8996 22 | ).startswith("0x") 23 | 24 | 25 | @pytest.mark.unit 26 | def test_get_address_json_missing_var(monkeypatch): 27 | monkeypatch.delenv("ADDRESS_FILE") 28 | address_json = get_address_json(os.getenv("ADDRESS_FILE")) 29 | 30 | assert address_json["goerli"]["chainId"] == 5 31 | assert address_json["goerli"]["Ocean"].startswith("0x") 32 | -------------------------------------------------------------------------------- /ocean_provider/utils/test/test_compute.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2023 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | import logging 6 | 7 | import pytest 8 | from ocean_provider.utils.compute import ( 9 | get_compute_endpoint, 10 | get_compute_result_endpoint, 11 | ) 12 | 13 | test_logger = logging.getLogger(__name__) 14 | 15 | 16 | @pytest.mark.unit 17 | def test_get_compute_endpoint(monkeypatch): 18 | monkeypatch.setenv("OPERATOR_SERVICE_URL", "http://with-slash.com/") 19 | assert get_compute_endpoint() == "http://with-slash.com/api/v1/operator/compute" 20 | assert ( 21 | get_compute_result_endpoint() 22 | == "http://with-slash.com/api/v1/operator/getResult" 23 | ) 24 | 25 | monkeypatch.setenv("OPERATOR_SERVICE_URL", "http://without-slash.com") 26 | assert get_compute_endpoint() == "http://without-slash.com/api/v1/operator/compute" 27 | assert ( 28 | get_compute_result_endpoint() 29 | == "http://without-slash.com/api/v1/operator/getResult" 30 | ) 31 | -------------------------------------------------------------------------------- /bumpversion.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | ## 3 | ## Copyright 2023 Ocean Protocol Foundation 4 | ## SPDX-License-Identifier: Apache-2.0 5 | ## 6 | 7 | set -x 8 | set -e 9 | 10 | usage(){ 11 | echo "Usage: $0 {major|minor|patch} [--tag]" 12 | exit 1 13 | } 14 | 15 | if ! [ -x "$(command -v bumpversion)" ]; then 16 | echo 'Error: bumpversion is not installed.' >&2 17 | exit 1 18 | elif ! git diff-index --quiet HEAD -- >/dev/null 2>&1; then 19 | echo 'There are local changes in your the git repository. Please commit or stash them before bumping version.' >&2 20 | exit 1 21 | fi 22 | 23 | if [ "$#" -lt 1 ]; then 24 | echo "Illegal number of parameters" 25 | usage 26 | elif [[ $1 != 'major' && $1 != 'minor' && $1 != 'patch' ]]; then 27 | echo 'First argument must be {major|minor|patch}' 28 | usage 29 | fi 30 | 31 | if [[ $2 == '--tag' ]]; then 32 | if git branch --contains $(git rev-parse --verify HEAD) | grep -E 'main'; then 33 | bumpversion --tag --commit $1 34 | else 35 | echo "Only main tags can be tagged" 36 | exit 1 37 | fi 38 | else 39 | bumpversion $1 40 | fi 41 | -------------------------------------------------------------------------------- /ocean_provider/utils/data_nft_factory.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2023 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | import os 6 | 7 | from ocean_provider.utils.address import get_contract_address, get_contract_definition 8 | from web3.contract import Contract 9 | from web3.main import Web3 10 | 11 | 12 | def get_data_nft_factory_address(web3: Web3) -> str: 13 | return get_contract_address( 14 | os.getenv("ADDRESS_FILE"), "ERC721Factory", web3.chain_id 15 | ) 16 | 17 | 18 | def get_data_nft_factory_contract(web3: Web3) -> Contract: 19 | abi = get_contract_definition("ERC721Factory")["abi"] 20 | data_nft_factory_address = get_data_nft_factory_address(web3) 21 | return web3.eth.contract( 22 | address=web3.toChecksumAddress(data_nft_factory_address), abi=abi 23 | ) 24 | 25 | 26 | def is_nft_deployed_from_factory(web3: Web3, nft_address: str) -> bool: 27 | """Check if NFT is deployed from the factory.""" 28 | data_nft_factory = get_data_nft_factory_contract(web3) 29 | return data_nft_factory.caller.erc721List(nft_address) == nft_address 30 | -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | markers = 3 | unit: runnable without external components (deselect with '-m "not unit"') 4 | integration: require external components (deselect with '-m "not integration"') 5 | env = 6 | D:PROVIDER_PRIVATE_KEY={{"8996": "0xfd5c1ccea015b6d663618850824154a3b3fb2882c46cefb05b9a93fea8c3d215"}} 7 | D:UNIVERSAL_PRIVATE_KEY=0xfd5c1ccea015b6d663618850824154a3b3fb2882c46cefb05b9a93fea8c3d215 8 | D:TEST_PRIVATE_KEY1=0xef4b441145c1d0f3b4bc6d61d29f5c6e502359481152f869247c7a4244d45209 9 | D:TEST_PRIVATE_KEY2=0x5d75837394b078ce97bc289fa8d75e21000573520bfa7784a9d28ccaae602bf8 10 | D:OPERATOR_SERVICE_URL=http://172.15.0.13:31000/ 11 | D:ADDRESS_FILE=~/.ocean/ocean-contracts/artifacts/address.json 12 | D:IPFS_GATEWAY=http://172.15.0.16:8080 13 | D:ARWEAVE_GATEWAY=https://arweave.net/ 14 | D:REDIS_CONNECTION=redis://172.15.0.18:6379 15 | D:FACTORY_DEPLOYER_PRIVATE_KEY=0xc594c6e5def4bab63ac29eed19a134c130388f74f019bc74b8f4389df2837a58 16 | D:MAX_CHECKSUM_LENGTH =5242880 17 | D:LOG_LEVEL=DEBUG 18 | D:NETWORK_URL={{"8996": "http://127.0.0.1:8545"}} 19 | D:AQUARIUS_URL =http://172.15.0.5:5000 20 | D:STORAGE_PATH=ocean-provider.db 21 | D:REQUEST_TIMEOUT=10 22 | -------------------------------------------------------------------------------- /ocean_provider/myapp.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2023 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | 6 | """ 7 | This module creates an instance of flask `app`, creates `user_nonce` table if not exists, and sets the environment configuration. 8 | """ 9 | from flask import Flask, _app_ctx_stack 10 | from flask_cors import CORS 11 | from flask_sieve import Sieve 12 | from ocean_provider.log import setup_logging 13 | from sqlalchemy.orm import scoped_session 14 | 15 | from .database import Base, SessionLocal, engine 16 | 17 | setup_logging() 18 | 19 | with engine.connect() as con: 20 | rs = con.execute( 21 | """ 22 | CREATE TABLE IF NOT EXISTS user_nonce ( 23 | address VARCHAR(255) NOT NULL, 24 | nonce VARCHAR(255) NOT NULL, 25 | PRIMARY KEY (address) 26 | ) 27 | """ 28 | ) 29 | 30 | rs = con.execute( 31 | """ 32 | CREATE TABLE IF NOT EXISTS revoked_tokens ( 33 | token VARCHAR(255) NOT NULL, 34 | PRIMARY KEY (token) 35 | ) 36 | """ 37 | ) 38 | 39 | app = Flask(__name__) 40 | CORS(app) 41 | Sieve(app) 42 | app.session = scoped_session(SessionLocal, scopefunc=_app_ctx_stack.__ident_func__) 43 | Base.query = app.session.query_property() 44 | -------------------------------------------------------------------------------- /ocean_provider/utils/error_responses.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2023 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | import json 6 | import logging 7 | 8 | from flask import jsonify, make_response 9 | from ocean_provider.utils.url import is_url 10 | 11 | logger = logging.getLogger(__name__) 12 | 13 | STRIPPED_URL_MSG = "" 14 | 15 | 16 | def error_response(err_str, status: int, custom_logger=None): 17 | """Logs error and returns an error response.""" 18 | back_2_dict = isinstance(err_str, dict) 19 | err_str = str(err_str) if not back_2_dict else json.dumps(err_str) 20 | err_str = strip_and_replace_urls(str(err_str)) 21 | 22 | this_logger = custom_logger if custom_logger else logger 23 | this_logger.error(err_str, exc_info=1) 24 | err_str = err_str if not back_2_dict else json.loads(err_str) 25 | response = make_response(jsonify(error=err_str), status) 26 | response.headers["Connection"] = "close" 27 | 28 | return response 29 | 30 | 31 | def strip_and_replace_urls(err_str: str) -> str: 32 | """Strips sensitive data from urls to be logged/returned.""" 33 | tokens = [] 34 | for token in err_str.split(): 35 | tokens += [STRIPPED_URL_MSG] if is_url(token) else [token] 36 | return " ".join(tokens) 37 | -------------------------------------------------------------------------------- /ocean_provider/log.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2023 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | 6 | import logging 7 | import logging.config 8 | import os 9 | from pathlib import Path 10 | 11 | import coloredlogs 12 | import yaml 13 | 14 | 15 | def setup_logging(log_config_path="logging.yaml", log_level=None): 16 | """Logging Setup""" 17 | env_log_level = os.getenv("LOG_LEVEL", None) 18 | if env_log_level: 19 | print(f"env var LOG_LEVEL detected = {env_log_level}") 20 | log_level = env_log_level 21 | 22 | env_log_config_path = os.getenv("LOG_CFG", None) 23 | if env_log_config_path: 24 | print(f"env var LOG_CFG detected = {env_log_config_path}") 25 | log_config_path = env_log_config_path 26 | 27 | if log_level: 28 | print(f"Using basic logging config, log level = {log_level}") 29 | logging_level = logging._nameToLevel.get(log_level) 30 | logging.basicConfig(level=logging_level) 31 | coloredlogs.install(level=logging_level) 32 | else: 33 | log_config_path = Path(log_config_path).expanduser().resolve() 34 | print(f"Using logging config file, {log_config_path}") 35 | with open(log_config_path, "rt") as f: 36 | log_config_dict = yaml.safe_load(f.read()) 37 | logging.config.dictConfig(log_config_dict) 38 | -------------------------------------------------------------------------------- /logging.yaml: -------------------------------------------------------------------------------- 1 | ## 2 | ## Copyright 2023 Ocean Protocol Foundation 3 | ## SPDX-License-Identifier: Apache-2.0 4 | ## 5 | version: 1 6 | disable_existing_loggers: False 7 | formatters: 8 | simple: 9 | format: "%(asctime)s - %(name)s - %(levelname)s - %(message)s" 10 | 11 | handlers: 12 | console: 13 | class: logging.StreamHandler 14 | level: DEBUG 15 | formatter: simple 16 | stream: ext://sys.stdout 17 | 18 | info_file_handler: 19 | class: logging.handlers.RotatingFileHandler 20 | level: INFO 21 | formatter: simple 22 | filename: info.log 23 | maxBytes: 10485760 # 10MB 24 | backupCount: 20 25 | encoding: utf8 26 | 27 | error_file_handler: 28 | class: logging.handlers.RotatingFileHandler 29 | level: ERROR 30 | formatter: simple 31 | filename: errors.log 32 | maxBytes: 10485760 # 10MB 33 | backupCount: 20 34 | encoding: utf8 35 | 36 | loggers: 37 | ocean_provider: 38 | level: INFO 39 | handlers: [console] 40 | propagate: no 41 | ocean_provider.routes: 42 | level: INFO 43 | handlers: [console] 44 | propagate: no 45 | ocean_provider.util: 46 | level: INFO 47 | handlers: [console] 48 | propagate: no 49 | 50 | root: 51 | level: INFO 52 | handlers: [console, info_file_handler, error_file_handler] 53 | -------------------------------------------------------------------------------- /ocean_provider/utils/test/test_encyption.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2023 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | import pytest 6 | from ocean_provider.utils.encryption import do_decrypt, do_encrypt 7 | from web3.main import Web3 8 | 9 | 10 | @pytest.mark.unit 11 | def test_encryption_with_bytes(provider_wallet): 12 | test_string = "hello_world" 13 | test_bytes = Web3.toBytes(text=test_string) 14 | result = do_encrypt(test_bytes, provider_wallet) 15 | assert result.startswith("0x") 16 | assert do_decrypt(result, provider_wallet) == test_bytes 17 | 18 | 19 | @pytest.mark.unit 20 | def test_encryption_with_hexstr(provider_wallet): 21 | test_string = '["https://raw.githubusercontent.com/tbertinmahieux/MSongsDB/master/Tasks_Demos/CoverSongs/shs_dataset_test.txt"]' 22 | result = do_encrypt(Web3.toHex(text=test_string), provider_wallet) 23 | assert result.startswith("0x") 24 | assert do_decrypt(result, provider_wallet) == Web3.toBytes(text=test_string) 25 | 26 | 27 | @pytest.mark.unit 28 | def test_encryption_with_text(provider_wallet): 29 | test_string = ( 30 | '["https://raw.githubusercontent.com/tbertinmahieux/MSongsDB/master/Tasks_Demos/CoverSongs/shs_dataset_test.txt", ' 31 | '"https://raw.githubusercontent.com/tbertinmahieux/MSongsDB/master/Tasks_Demos/CoverSongs/shs_dataset_test.txt"]' 32 | ) 33 | result = do_encrypt(test_string, provider_wallet) 34 | assert result.startswith("0x") 35 | assert do_decrypt(result, provider_wallet) == Web3.toBytes(text=test_string) 36 | -------------------------------------------------------------------------------- /deployment/provider-standard-networks-deployment-example.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: apps/v1 2 | kind: Deployment 3 | metadata: 4 | labels: 5 | app: provider 6 | name: provider 7 | spec: 8 | progressDeadlineSeconds: 60 9 | replicas: 1 10 | revisionHistoryLimit: 5 11 | selector: 12 | matchLabels: 13 | app: provider 14 | strategy: 15 | rollingUpdate: 16 | maxSurge: 25% 17 | maxUnavailable: 25% 18 | type: RollingUpdate 19 | template: 20 | metadata: 21 | labels: 22 | app: provider 23 | spec: 24 | containers: 25 | - env: 26 | - name: NETWORK_URL 27 | value: < mainnet, rinkeby, ropsten or custom Openethereum service > 28 | - name: PROVIDER_PRIVATE_KEY 29 | value: < private key> 30 | - name: LOG_LEVEL 31 | value: INFO 32 | - name: OCEAN_PROVIDER_URL 33 | value: http://0.0.0.0:8030 34 | - name: OCEAN_PROVIDER_WORKERS 35 | value: "1" 36 | - name: IPFS_GATEWAY 37 | value: < IPFS gateway if defined/available > 38 | - name: OCEAN_PROVIDER_TIMEOUT 39 | value: "9000" 40 | - name: AQUARIUS_URL 41 | value: < http://aquarius_url > 42 | image: oceanprotocol/provider-py: 43 | imagePullPolicy: IfNotPresent 44 | name: provider 45 | ports: 46 | - containerPort: 8030 47 | protocol: TCP 48 | terminationMessagePath: /dev/termination-log 49 | terminationMessagePolicy: File 50 | dnsPolicy: ClusterFirst 51 | restartPolicy: Always 52 | schedulerName: default-scheduler 53 | terminationGracePeriodSeconds: 30 54 | -------------------------------------------------------------------------------- /ocean_provider/utils/encryption.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2023 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | from typing import Union 6 | 7 | import ecies 8 | from eth_account.account import LocalAccount 9 | from eth_typing.encoding import HexStr 10 | from eth_utils.hexadecimal import is_0x_prefixed 11 | from ocean_provider.utils.accounts import get_private_key 12 | from web3 import Web3 13 | 14 | 15 | def do_encrypt( 16 | document: Union[HexStr, str, bytes], 17 | wallet: LocalAccount = None, 18 | public_key: str = None, 19 | ) -> HexStr: 20 | """ 21 | :param document: document to be encrypted as HexStr or bytes 22 | :param wallet: LocalAccount instance 23 | :param public_key: Eth public address 24 | :return: Encrypted String 25 | """ 26 | key = get_private_key(wallet).public_key.to_hex() if wallet else public_key 27 | 28 | if isinstance(document, str): 29 | if is_0x_prefixed(document): 30 | document = Web3.toBytes(hexstr=document) 31 | else: 32 | document = Web3.toBytes(text=document) 33 | encrypted_document = ecies.encrypt(key, document) 34 | 35 | return Web3.toHex(encrypted_document) 36 | 37 | 38 | def do_decrypt( 39 | encrypted_document: Union[HexStr, bytes], provider_wallet: LocalAccount 40 | ) -> bytes: 41 | """ 42 | :param encrypted_document: Encrypted document as HexStr or bytes 43 | :param provider_wallet: LocalAccount instance 44 | :return: Decrypted string 45 | """ 46 | key = get_private_key(provider_wallet).to_hex() 47 | if isinstance(encrypted_document, str): 48 | encrypted_document = Web3.toBytes(hexstr=encrypted_document) 49 | 50 | return ecies.decrypt(key, encrypted_document) 51 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | ## 2 | ## Copyright 2023 Ocean Protocol Foundation 3 | ## SPDX-License-Identifier: Apache-2.0 4 | ## 5 | FROM python:3.8-slim-buster 6 | LABEL maintainer="Ocean Protocol " 7 | 8 | ARG VERSION 9 | 10 | RUN apt-get update && \ 11 | apt-get install --no-install-recommends -y \ 12 | build-essential \ 13 | gcc \ 14 | gettext-base && \ 15 | apt-get clean && \ 16 | rm -rf /var/lib/apt/lists/* 17 | 18 | COPY . /ocean-provider 19 | WORKDIR /ocean-provider 20 | 21 | # Install dependencies in a virtual environment 22 | RUN python3.8 -m pip install --no-cache-dir setuptools wheel && \ 23 | python3.8 -m pip install --no-cache-dir . 24 | 25 | ENV NETWORK_URL='http://127.0.0.1:8545' 26 | 27 | ENV PROVIDER_PRIVATE_KEY='' 28 | ENV PROVIDER_ADDRESS='' 29 | 30 | ENV AZURE_ACCOUNT_NAME='' 31 | ENV AZURE_ACCOUNT_KEY='' 32 | ENV AZURE_RESOURCE_GROUP='' 33 | ENV AZURE_LOCATION='' 34 | ENV AZURE_CLIENT_ID='' 35 | ENV AZURE_CLIENT_SECRET='' 36 | ENV AZURE_TENANT_ID='' 37 | ENV AZURE_SUBSCRIPTION_ID='' 38 | 39 | # do checksums only if file size < 5 Mb 40 | ENV MAX_CHECKSUM_LENGTH='5242880' 41 | 42 | # Note: AZURE_SHARE_INPUT and AZURE_SHARE_OUTPUT are only used 43 | # for Azure Compute data assets (not for Azure Storage data assets). 44 | # If you're not supporting Azure Compute, just leave their values 45 | # as 'compute' and 'output', respectively. 46 | ENV AZURE_SHARE_INPUT='compute' 47 | ENV AZURE_SHARE_OUTPUT='output' 48 | 49 | ENV OCEAN_PROVIDER_URL='http://0.0.0.0:8030' 50 | 51 | ENV OCEAN_PROVIDER_WORKERS='1' 52 | ENV OCEAN_PROVIDER_TIMEOUT='9000' 53 | ENV ALLOW_NON_PUBLIC_IP=False 54 | ENV ARWEAVE_GATEWAY=https://arweave.net/ 55 | ENV IPFS_GATEWAY=https://ipfs.io 56 | 57 | 58 | ENTRYPOINT ["/ocean-provider/docker-entrypoint.sh"] 59 | 60 | EXPOSE 8030 61 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | tests/resources/branin.arff 10 | 11 | # Distribution / packaging 12 | .Python 13 | build/ 14 | develop-eggs/ 15 | dist/ 16 | downloads/ 17 | eggs/ 18 | .eggs/ 19 | lib/ 20 | lib64/ 21 | parts/ 22 | sdist/ 23 | var/ 24 | wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | .hypothesis/ 50 | .pytest_cache/ 51 | 52 | # Translations 53 | *.mo 54 | *.pot 55 | 56 | # Django stuff: 57 | *.log 58 | local_settings.py 59 | db.sqlite3 60 | 61 | # Flask stuff: 62 | instance/ 63 | .webassets-cache 64 | 65 | # Scrapy stuff: 66 | .scrapy 67 | 68 | # Sphinx documentation 69 | docs/_build/ 70 | 71 | # PyBuilder 72 | target/ 73 | 74 | # Jupyter Notebook 75 | .ipynb_checkpoints 76 | 77 | # pyenv 78 | .python-version 79 | 80 | # celery beat schedule file 81 | celerybeat-schedule 82 | 83 | # SageMath parsed files 84 | *.sage.py 85 | 86 | # Environments 87 | .env 88 | .venv 89 | env/ 90 | venv/ 91 | ENV/ 92 | env.bak/ 93 | venv.bak/ 94 | 95 | # Spyder project settings 96 | .spyderproject 97 | .spyproject 98 | 99 | # Rope project settings 100 | .ropeproject 101 | 102 | # mkdocs documentation 103 | /site 104 | 105 | # mypy 106 | .mypy_cache/ 107 | .idea 108 | docker/docs 109 | config_local.ini 110 | *.db 111 | -------------------------------------------------------------------------------- /ocean_provider/utils/compute.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2023 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | import logging 6 | import os 7 | import time 8 | from datetime import datetime, timezone 9 | from urllib.parse import urljoin 10 | 11 | from eth_keys import KeyAPI 12 | from eth_keys.backends import NativeECCBackend 13 | from ocean_provider.user_nonce import get_nonce 14 | from ocean_provider.utils.accounts import sign_message 15 | from ocean_provider.utils.basics import get_provider_wallet 16 | 17 | logger = logging.getLogger(__name__) 18 | keys = KeyAPI(NativeECCBackend) 19 | 20 | 21 | def get_compute_endpoint(): 22 | return urljoin(os.getenv("OPERATOR_SERVICE_URL"), "api/v1/operator/compute") 23 | 24 | 25 | def get_compute_result_endpoint(): 26 | return urljoin(os.getenv("OPERATOR_SERVICE_URL"), "api/v1/operator/getResult") 27 | 28 | 29 | def process_compute_request(data): 30 | provider_wallet = get_provider_wallet(use_universal_key=True) 31 | did = data.get("documentId") 32 | owner = data.get("consumerAddress") 33 | job_id = data.get("jobId") 34 | body = dict() 35 | body["providerAddress"] = provider_wallet.address 36 | if owner is not None: 37 | body["owner"] = owner 38 | if job_id is not None: 39 | body["jobId"] = job_id 40 | if did is not None: 41 | body["documentId"] = did 42 | 43 | nonce, provider_signature = sign_for_compute(provider_wallet, owner, job_id) 44 | body["providerSignature"] = provider_signature 45 | body["nonce"] = nonce 46 | 47 | return body 48 | 49 | 50 | def sign_for_compute(wallet, owner, job_id=None): 51 | nonce = time.time_ns() 52 | logger.info(f"nonce for user {owner} is {nonce}") 53 | 54 | # prepare consumer signature on did 55 | msg = f"{owner}{job_id}{nonce}" if job_id else f"{owner}{nonce}" 56 | signature = sign_message(msg, wallet) 57 | 58 | return nonce, signature 59 | -------------------------------------------------------------------------------- /tests/ddo/ddo_event_sample_v4.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2023 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | ddo_event_sample_v4 = { 6 | "@context": ["https://w3id.org/did/v1"], 7 | "id": "did:op:ffa5037987b74fbab600d7515605146bb7babcb929c94c60ba93ac5ceda56775", 8 | "created": "2000-10-31T01:30:00.000-05:00", 9 | "updated": "2000-10-31T01:30:00.000-05:00", 10 | "version": "4.1.0", 11 | "chainId": 8996, 12 | "metadata": { 13 | "type": "dataset", 14 | "name": "Event DDO sample", 15 | "description": "Event DDO sample", 16 | "author": "Met Office", 17 | "license": "CC-BY", 18 | "contentLanguage": "en-US", 19 | "tags": ["samples"], 20 | }, 21 | "services": [ 22 | { 23 | "id": "test_id", 24 | "type": "access", 25 | "datatokenAddress": "0x20e91598bb797eEd2C7D4431a274c2997D080f53", 26 | "name": "dataAssetAccess", 27 | "description": "dataAssetAccess", 28 | "serviceEndpoint": "http://localhost:8030/", 29 | "timeout": 0, 30 | "files": "encrypted files", 31 | }, 32 | { 33 | "id": "test_id2", 34 | "type": "compute", 35 | "name": "dataAssetComputingService", 36 | "description": "dataAssetComputingService", 37 | "datatokenAddress": "0x20e91598bb797eEd2C7D4431a274c2997D080f53", 38 | "serviceEndpoint": "http://localhost:8030/", 39 | "timeout": 3600, 40 | "files": "encrypted files", 41 | "compute": { 42 | "namespace": "dataAssetComputingService", 43 | "allowRawAlgorithm": False, 44 | "allowNetworkAccess": False, 45 | "publisherTrustedAlgorithms": [], 46 | "publisherTrustedAlgorithmPublishers": [], 47 | }, 48 | }, 49 | ], 50 | } 51 | -------------------------------------------------------------------------------- /ocean_provider/utils/test/test_accounts.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import pytest 4 | from ocean_provider.exceptions import InvalidSignatureError 5 | from ocean_provider.utils.accounts import ( 6 | get_private_key, 7 | sign_message, 8 | verify_signature, 9 | ) 10 | from tests.helpers.nonce import build_nonce 11 | 12 | 13 | @pytest.mark.unit 14 | def test_get_private_key(publisher_wallet): 15 | assert ( 16 | str(get_private_key(publisher_wallet)).lower() 17 | == os.getenv("TEST_PRIVATE_KEY1").lower() 18 | ) 19 | 20 | 21 | @pytest.mark.unit 22 | def test_verify_signature(consumer_wallet, publisher_wallet): 23 | nonce = build_nonce(consumer_wallet.address) 24 | did = "did:op:test" 25 | msg = f"{consumer_wallet.address}{did}{nonce}" 26 | msg_w_nonce = f"{consumer_wallet.address}{did}" 27 | signature = sign_message(msg, consumer_wallet) 28 | 29 | assert verify_signature(consumer_wallet.address, signature, msg_w_nonce, nonce) 30 | 31 | nonce = build_nonce(consumer_wallet.address) 32 | did = "did:op:test" 33 | msg = f"{consumer_wallet.address}{did}{nonce}" 34 | msg_w_nonce = f"{consumer_wallet.address}{did}" 35 | signature = sign_message(msg, consumer_wallet) 36 | 37 | with pytest.raises(InvalidSignatureError) as e_info: 38 | verify_signature(publisher_wallet.address, signature, msg_w_nonce, nonce) 39 | 40 | assert f"Invalid signature {signature} for ethereum address" in e_info.value.args[0] 41 | 42 | nonce = 1 43 | did = "did:op:test" 44 | msg = f"{consumer_wallet.address}{did}{nonce}" 45 | msg_w_nonce = f"{consumer_wallet.address}{did}" 46 | signature = sign_message(msg, consumer_wallet) 47 | # expired nonce 48 | with pytest.raises(InvalidSignatureError) as e_info: 49 | verify_signature(consumer_wallet.address, signature, msg_w_nonce, nonce) 50 | 51 | assert e_info.value.args[0].startswith("Invalid signature expected nonce") 52 | -------------------------------------------------------------------------------- /ocean_provider/utils/services.py: -------------------------------------------------------------------------------- 1 | from copy import deepcopy 2 | from typing import Any, Dict, Optional 3 | 4 | from eth_typing.encoding import HexStr 5 | from eth_typing.evm import HexAddress 6 | 7 | 8 | class ServiceType: 9 | ACCESS = "access" 10 | COMPUTE = "compute" 11 | 12 | 13 | class Service: 14 | def __init__( 15 | self, 16 | index: int, 17 | service_id: str, 18 | service_type: ServiceType, 19 | datatoken_address: HexAddress, 20 | service_endpoint: str, 21 | encrypted_files: HexStr, 22 | timeout: int, 23 | name: Optional[str] = None, 24 | description: Optional[str] = None, 25 | compute_dict: Optional[dict] = None, 26 | ) -> None: 27 | """Initialize Service instance. 28 | If service is type "compute", then, compute_dict should be set 29 | """ 30 | self.index = index 31 | self.id = service_id 32 | self.type = service_type 33 | self.name = name 34 | self.description = description 35 | self.datatoken_address = datatoken_address 36 | self.service_endpoint = service_endpoint 37 | self.encrypted_files = encrypted_files 38 | self.timeout = timeout 39 | self.compute_dict = compute_dict 40 | 41 | @staticmethod 42 | def from_json(index: int, service_dict: Dict[str, Any]): 43 | """Create a Service object from a JSON string.""" 44 | sd = deepcopy(service_dict) 45 | return Service( 46 | index=index, 47 | service_id=sd.pop("id"), 48 | service_type=sd.pop("type"), 49 | datatoken_address=sd.pop("datatokenAddress"), 50 | service_endpoint=sd.pop("serviceEndpoint"), 51 | encrypted_files=sd.pop("files"), 52 | timeout=sd.pop("timeout"), 53 | name=sd.pop("name", None), 54 | description=sd.pop("description", None), 55 | compute_dict=sd.pop("compute", None), 56 | ) 57 | -------------------------------------------------------------------------------- /ocean_provider/serializers.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2023 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | import json 6 | 7 | 8 | class StageAlgoSerializer: 9 | def __init__( 10 | self, 11 | consumer_address, 12 | algo_data, 13 | algo_service, 14 | algo_asset=None, 15 | ): 16 | """Initialize Serializer.""" 17 | self.consumer_address = consumer_address 18 | self.algo_data = algo_data 19 | self.algo_service = algo_service 20 | self.algo_asset = algo_asset 21 | 22 | def serialize(self): 23 | algorithm_meta = self.algo_data.get("meta") 24 | algorithm_did = self.algo_data.get("documentId") 25 | algorithm_tx_id = self.algo_data.get("transferTxId") 26 | 27 | dict_template = { 28 | "id": None, 29 | "rawcode": None, 30 | "container": None, 31 | "algouserdata": None, 32 | } 33 | 34 | if algorithm_meta and isinstance(algorithm_meta, str): 35 | algorithm_meta = json.loads(algorithm_meta) 36 | 37 | if algorithm_did is None: 38 | return dict( 39 | { 40 | "id": "", 41 | "url": algorithm_meta.get("url"), 42 | "rawcode": algorithm_meta.get("rawcode"), 43 | "container": algorithm_meta.get("container"), 44 | } 45 | ) 46 | 47 | dict_template["id"] = algorithm_did 48 | dict_template["rawcode"] = "" 49 | dict_template["container"] = self.algo_asset.metadata["algorithm"]["container"] 50 | dict_template["remote"] = { 51 | "serviceEndpoint": self.algo_service.service_endpoint, 52 | "txId": algorithm_tx_id, 53 | "serviceId": self.algo_service.id, 54 | "userData": self.algo_data.get("algouserdata", None), 55 | } 56 | dict_template["algocustomdata"] = self.algo_data.get("algocustomdata", None) 57 | return dict(dict_template) 58 | -------------------------------------------------------------------------------- /tests/ddo/ddo_sample1_v4.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2023 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | json_dict = { 6 | "@context": ["https://w3id.org/did/v1"], 7 | "id": "did:op:0c184915b07b44c888d468be85a9b28253e80070e5294b1aaed81c2f0264e430", 8 | "version": "4.1.0", 9 | "chainId": 8996, 10 | "nftAddress": "0x0000000000000000000000000000000000000000", 11 | "metadata": { 12 | "created": "2000-10-31T01:30:00.000-05:00", 13 | "updated": "2000-10-31T01:30:00.000-05:00", 14 | "name": "Ocean protocol white paper", 15 | "type": "dataset", 16 | "description": "Ocean protocol white paper -- description", 17 | "author": "Ocean Protocol Foundation Ltd.", 18 | "license": "CC-BY", 19 | "contentLanguage": "en-US", 20 | "tags": ["white-papers"], 21 | "additionalInformation": {"test-key": "test-value"}, 22 | "links": [ 23 | "http://data.ceda.ac.uk/badc/ukcp09/data/gridded-land-obs/gridded-land-obs-daily/", 24 | "http://data.ceda.ac.uk/badc/ukcp09/data/gridded-land-obs/gridded-land-obs-averages-25km/" 25 | "http://data.ceda.ac.uk/badc/ukcp09/", 26 | ], 27 | }, 28 | "services": [ 29 | { 30 | "id": "test", 31 | "type": "access", 32 | "datatokenAddress": "0xC7EC1970B09224B317c52d92f37F5e1E4fF6B687", 33 | "name": "Download service", 34 | "description": "Download service", 35 | "serviceEndpoint": "http://localhost:8030/", 36 | "timeout": 0, 37 | "files": "0x04f0dddf93c186c38bfea243e06889b490a491141585669cfbe7521a5c7acb3bfea5a5527f17eb75ae1f66501e1f70f73df757490c8df479a618b0dd23b2bf3c62d07c372f64c6ad94209947471a898c71f1b2f0ab2a965024fa8e454644661d538b6aa025e517197ac87a3767820f018358999afda760225053df20ff14f499fcf4e7e036beb843ad95587c138e1f972e370d4c68c99ab2602b988c837f6f76658a23e99da369f6898ce1426d49c199cf8ffa33b79002765325c12781a2202239381866c6a06b07754024ee9a6e4aabc8", 38 | } 39 | ], 40 | } 41 | -------------------------------------------------------------------------------- /ocean_provider/utils/test/test_provider_fees.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import patch 2 | 3 | import pytest 4 | from freezegun import freeze_time 5 | from ocean_provider.utils.currency import to_wei 6 | from ocean_provider.utils.provider_fees import get_provider_fee_amount 7 | from tests.helpers.compute_helpers import get_future_valid_until 8 | from tests.test_helpers import ( 9 | BLACK_HOLE_ADDRESS, 10 | deploy_data_nft, 11 | deploy_datatoken, 12 | get_ocean_token_address, 13 | ) 14 | 15 | 16 | @pytest.mark.unit 17 | @freeze_time("Feb 11th, 2012 00:00") 18 | def test_get_provider_fee_amount(web3, publisher_wallet): 19 | valid_until = get_future_valid_until() 20 | assert ( 21 | get_provider_fee_amount( 22 | valid_until, 23 | "ocean-compute", 24 | web3, 25 | "0x0000000000000000000000000000000000000000", 26 | ) 27 | == 0 28 | ) 29 | 30 | data_nft_address = deploy_data_nft( 31 | web3, 32 | "Data NFT Name", 33 | "DATANFTSYMBOL", 34 | 1, 35 | BLACK_HOLE_ADDRESS, 36 | BLACK_HOLE_ADDRESS, 37 | "", 38 | publisher_wallet, 39 | ) 40 | 41 | datatoken_address = deploy_datatoken( 42 | web3=web3, 43 | data_nft_address=data_nft_address, 44 | template_index=1, 45 | name="Datatoken 1", 46 | symbol="DT1", 47 | minter=publisher_wallet.address, 48 | fee_manager=publisher_wallet.address, 49 | publishing_market=BLACK_HOLE_ADDRESS, 50 | publishing_market_fee_token=get_ocean_token_address(web3), 51 | cap=to_wei(1000), 52 | publishing_market_fee_amount=0, 53 | from_wallet=publisher_wallet, 54 | ) 55 | 56 | with patch("ocean_provider.utils.provider_fees.get_c2d_environments") as mock: 57 | mock.return_value = [{"id": "ocean-compute", "priceMin": 60}] 58 | assert ( 59 | get_provider_fee_amount( 60 | valid_until, "ocean-compute", web3, datatoken_address 61 | ) 62 | == 3600000000000000000000 63 | ) 64 | -------------------------------------------------------------------------------- /tests/ddo/ddo_sa_sample_with_credentials_v4.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2023 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | json_dict = { 6 | "@context": ["https://w3id.org/did/v1"], 7 | "id": "did:op:0c184915b07b44c888d468be85a9b28253e80070e5294b1aaed81c2f0264e430", 8 | "version": "4.1.0", 9 | "chainId": 8996, 10 | "metadata": { 11 | "created": "2000-10-31T01:30:00.000-05:00", 12 | "updated": "2000-10-31T01:30:00.000-05:00", 13 | "name": "Ocean protocol white paper", 14 | "type": "dataset", 15 | "description": "Ocean protocol white paper -- description", 16 | "author": "Ocean Protocol Foundation Ltd.", 17 | "license": "CC-BY", 18 | "contentLanguage": "en-US", 19 | "tags": ["white-papers"], 20 | "additionalInformation": {"test-key": "test-value"}, 21 | "links": [ 22 | "http://data.ceda.ac.uk/badc/ukcp09/data/gridded-land-obs/gridded-land-obs-daily/", 23 | "http://data.ceda.ac.uk/badc/ukcp09/data/gridded-land-obs/gridded-land-obs-averages-25km/" 24 | "http://data.ceda.ac.uk/badc/ukcp09/", 25 | ], 26 | }, 27 | "services": [ 28 | { 29 | "id": "test", 30 | "type": "access", 31 | "datatokenAddress": "0xC7EC1970B09224B317c52d92f37F5e1E4fF6B687", 32 | "name": "Download service", 33 | "description": "Download service", 34 | "serviceEndpoint": "http://localhost:8030/", 35 | "timeout": 0, 36 | "files": "0x04f0dddf93c186c38bfea243e06889b490a491141585669cfbe7521a5c7acb3bfea5a5527f17eb75ae1f66501e1f70f73df757490c8df479a618b0dd23b2bf3c62d07c372f64c6ad94209947471a898c71f1b2f0ab2a965024fa8e454644661d538b6aa025e517197ac87a3767820f018358999afda760225053df20ff14f499fcf4e7e036beb843ad95587c138e1f972e370d4c68c99ab2602b988c837f6f76658a23e99da369f6898ce1426d49c199cf8ffa33b79002765325c12781a2202239381866c6a06b07754024ee9a6e4aabc8", 37 | } 38 | ], 39 | "credentials": { 40 | "allow": [{"type": "address", "values": ["0x123", "0x456A"]}], 41 | "deny": [{"type": "address", "values": ["0x2222", "0x333"]}], 42 | }, 43 | } 44 | -------------------------------------------------------------------------------- /ocean_provider/utils/compute_environments.py: -------------------------------------------------------------------------------- 1 | import os 2 | from urllib.parse import urljoin 3 | 4 | import requests 5 | 6 | from ocean_provider.requests_session import get_requests_session 7 | from ocean_provider.utils.address import get_provider_fee_token 8 | from ocean_provider.utils.basics import get_configured_chains 9 | 10 | requests_session = get_requests_session() 11 | 12 | 13 | def get_compute_environments_endpoint(): 14 | return urljoin(os.getenv("OPERATOR_SERVICE_URL"), "api/v1/operator/environments") 15 | 16 | 17 | def get_c2d_environments(flat=False): 18 | if not os.getenv("OPERATOR_SERVICE_URL"): 19 | return [] 20 | 21 | standard_headers = {"Content-Type": "application/json", "Connection": "close"} 22 | all_environments = [] if flat else {} 23 | 24 | for chain in get_configured_chains(): 25 | params = {"chainId": chain} 26 | 27 | try: 28 | response = requests_session.get( 29 | get_compute_environments_endpoint(), 30 | headers=standard_headers, 31 | params=params, 32 | ) 33 | except requests.exceptions.ConnectionError: 34 | response = None 35 | 36 | assert ( 37 | response 38 | ), f"Compute envs could not be retrieved for chainId {chain}, check configuration." 39 | 40 | # add provider token from config 41 | envs = response.json() 42 | for env in envs: 43 | env["feeToken"] = get_provider_fee_token(chain) 44 | 45 | if flat: 46 | all_environments.extend(envs) 47 | else: 48 | all_environments[chain] = envs 49 | 50 | return all_environments 51 | 52 | 53 | def check_environment_exists(envs, env_id): 54 | """Checks if environment with id exists in environments list.""" 55 | return bool(get_environment(envs, env_id)) 56 | 57 | 58 | def get_environment(envs, env_id): 59 | """Gets environment with id exists in environments list.""" 60 | if not envs or not isinstance(envs, list): 61 | return False 62 | 63 | matching_envs = [env for env in envs if env["id"] == env_id] 64 | return matching_envs[0] if len(matching_envs) > 0 else None 65 | -------------------------------------------------------------------------------- /ocean_provider/utils/address.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2023 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | import json 6 | import os 7 | from pathlib import Path 8 | from typing import Any, Dict, Union 9 | 10 | import addresses 11 | import artifacts 12 | from eth_typing.evm import HexAddress 13 | from ocean_provider.utils.basics import get_value_from_decoded_env 14 | 15 | BLACK_HOLE_ADDRESS = "0x0000000000000000000000000000000000000000" 16 | 17 | 18 | def get_address_json(address_path: Union[str, Path]) -> Dict[str, Any]: 19 | """Return the json object of all Ocean contract addresses on all chains.""" 20 | if isinstance(address_path, str): 21 | address_path = Path(address_path) 22 | else: 23 | address_path = Path(os.path.join(addresses.__file__, "..", "address.json")) 24 | 25 | address_file = address_path.expanduser().resolve() 26 | with open(address_file) as f: 27 | return json.load(f) 28 | 29 | 30 | def get_contract_address( 31 | address_path: str, contract_name: str, chain_id: int 32 | ) -> HexAddress: 33 | """Return the contract address with the given name and chain id""" 34 | address_json = get_address_json(address_path) 35 | return next( 36 | chain_addresses[contract_name] 37 | for chain_addresses in address_json.values() 38 | if chain_addresses["chainId"] == chain_id 39 | ) 40 | 41 | 42 | def get_contract_definition(contract_name: str) -> Dict[str, Any]: 43 | """Returns the abi JSON for a contract name.""" 44 | path = os.path.join(artifacts.__file__, "..", f"{contract_name}.json") 45 | path = Path(path).expanduser().resolve() 46 | 47 | if not path.exists(): 48 | raise TypeError("Contract name does not exist in artifacts.") 49 | 50 | with open(path) as f: 51 | return json.load(f) 52 | 53 | 54 | def get_provider_fee_token(chain_id): 55 | fee_token = get_value_from_decoded_env(chain_id, "PROVIDER_FEE_TOKEN") 56 | 57 | if not fee_token: 58 | fee_token = get_ocean_address(chain_id) 59 | 60 | return fee_token if fee_token else BLACK_HOLE_ADDRESS 61 | 62 | 63 | def get_ocean_address(chain_id): 64 | return get_contract_address(os.getenv("ADDRESS_FILE"), "Ocean", chain_id) 65 | -------------------------------------------------------------------------------- /ocean_provider/utils/proof.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import requests 4 | from ocean_provider.utils.accounts import sign_message 5 | from ocean_provider.utils.basics import get_provider_wallet, get_web3 6 | from ocean_provider.utils.datatoken import get_datatoken_contract 7 | from ocean_provider.utils.util import sign_and_send 8 | from web3.main import Web3 9 | 10 | 11 | def send_proof( 12 | chain_id, 13 | order_tx_id, 14 | provider_data, 15 | consumer_data, 16 | consumer_signature, 17 | consumer_address, 18 | datatoken_address, 19 | ): 20 | if not os.getenv("USE_CHAIN_PROOF") and not os.getenv("USE_HTTP_PROOF"): 21 | return 22 | 23 | web3 = get_web3(chain_id) 24 | provider_wallet = get_provider_wallet(chain_id) 25 | provider_signature = sign_message(provider_data, provider_wallet) 26 | 27 | if os.getenv("USE_HTTP_PROOF"): 28 | payload = { 29 | "orderTxId": order_tx_id.hex(), 30 | "providerData": provider_data, 31 | "providerSignature": provider_signature, 32 | "consumerData": consumer_data, 33 | "consumerSignature": consumer_signature, 34 | "consumerAddress": consumer_address, 35 | } 36 | 37 | try: 38 | requests.post(os.getenv("USE_HTTP_PROOF"), payload) 39 | 40 | return True 41 | except Exception: 42 | pass 43 | 44 | return 45 | 46 | datatoken_contract = get_datatoken_contract(web3, datatoken_address) 47 | provider_message = order_tx_id + Web3.toBytes(text=provider_data) 48 | provider_signature = sign_message(provider_message, provider_wallet) 49 | 50 | consumer_message = Web3.toBytes(text=consumer_data) 51 | 52 | tx_dict = { 53 | "from": provider_wallet.address, 54 | } 55 | if web3.eth.chain_id == 8996: 56 | tx_dict["gasPrice"] = web3.eth.gas_price 57 | else: 58 | tx_dict["maxPriorityFeePerGas"] = web3.eth.max_priority_fee 59 | 60 | tx = datatoken_contract.functions.orderExecuted( 61 | order_tx_id, 62 | Web3.toBytes(text=provider_data), 63 | provider_signature, 64 | consumer_message, 65 | consumer_signature, 66 | consumer_address, 67 | ).buildTransaction(tx_dict) 68 | 69 | _, transaction_id = sign_and_send(web3, tx, provider_wallet) 70 | 71 | return transaction_id 72 | -------------------------------------------------------------------------------- /release-process.md: -------------------------------------------------------------------------------- 1 | 5 | 6 | # The Provider Release Process 7 | 8 | Please note that this upgrade does not affect the deployed versions of Provider, e.g https://provider.mainnet.oceanprotocol.com, https://provider.polygon.oceanprotocol.com etc. 9 | Contact the Ocean Protocol team for such a deployment. 10 | 11 | ## Step 0: Update documentation 12 | 13 | - Go to https://github.com/oceanprotocol/readthedocs, and follow the steps 14 | - This will update what's shown in https://docs.oceanprotocol.com/references/provider/. 15 | 16 | This doesn't actually affect the pip release of the following steps. And if you've just updated READMEs, you can stop after this step if you like. 17 | 18 | ## Step 1: Bump version and push changes 19 | 20 | - Create a new local feature branch, e.g. `git checkout -b feature/bumpversion-to-v5.2.5` 21 | 22 | - Use the `bumpversion.sh` script to bump the project version. You can execute the script using {major|minor|patch} as first argument to bump the version accordingly: 23 | 24 | - To bump the patch version: `./bumpversion.sh patch` 25 | - To bump the minor version: `./bumpversion.sh minor` 26 | - To bump the major version (for API-breaking changes): `./bumpversion.sh major` 27 | 28 | ### Version numbering 29 | ⚠️ Provider is still in v3. We now use a marketing version numbering convention, where non-breaking changes should be patches, and breaking changes warrant minor releases. Once we integrate the v4 contracts in the OCEAN ecosystem, the Provider will be fully SEMVER compatible. 30 | 31 | - Commit the changes to the feature branch. 32 | 33 | `git commit -m "Bump version -> "` 34 | 35 | - Push the feature branch to GitHub. 36 | 37 | `git push origin feature/bumpversion-to-v5.2.5"` 38 | 39 | ## Step 2: Merge changes to main branch 40 | 41 | - Make a pull request from the just-pushed branch. 42 | 43 | - Wait for all the tests to pass! 44 | 45 | - Merge the pull request into the `main` branch. 46 | 47 | ## Step 3: Release 48 | 49 | - To make a GitHub release (which creates a Git tag): 50 | - Go to the Provider repo's Releases page 51 | - Click "Draft a new release". 52 | - For tag version, put something like `v5.2.5` 53 | - For release title, put the same value (like `v5.2.5`). 54 | - For the target, select the `main` branch, or the just-merged commit. 55 | - Describe the main changes. 56 | - Click "Publish release". 57 | -------------------------------------------------------------------------------- /.github/workflows/codacy.yml: -------------------------------------------------------------------------------- 1 | # This workflow uses actions that are not certified by GitHub. 2 | # They are provided by a third-party and are governed by 3 | # separate terms of service, privacy policy, and support 4 | # documentation. 5 | 6 | # This workflow checks out code, performs a Codacy security scan 7 | # and integrates the results with the 8 | # GitHub Advanced Security code scanning feature. For more information on 9 | # the Codacy security scan action usage and parameters, see 10 | # https://github.com/codacy/codacy-analysis-cli-action. 11 | # For more information on Codacy Analysis CLI in general, see 12 | # https://github.com/codacy/codacy-analysis-cli. 13 | 14 | name: Codacy Security Scan 15 | 16 | on: 17 | push: 18 | branches: [ v4main, main ] 19 | pull_request: 20 | # The branches below must be a subset of the branches above 21 | branches: [ v4main ] 22 | schedule: 23 | - cron: '26 11 * * 2' 24 | 25 | permissions: 26 | contents: read 27 | 28 | jobs: 29 | codacy-security-scan: 30 | permissions: 31 | contents: read # for actions/checkout to fetch code 32 | security-events: write # for github/codeql-action/upload-sarif to upload SARIF results 33 | name: Codacy Security Scan 34 | runs-on: ubuntu-latest 35 | steps: 36 | # Checkout the repository to the GitHub Actions runner 37 | - name: Checkout code 38 | uses: actions/checkout@v2 39 | 40 | # Execute Codacy Analysis CLI and generate a SARIF output with the security issues identified during the analysis 41 | - name: Run Codacy Analysis CLI 42 | uses: codacy/codacy-analysis-cli-action@d840f886c4bd4edc059706d09c6a1586111c540b 43 | with: 44 | # Check https://github.com/codacy/codacy-analysis-cli#project-token to get your project token from your Codacy repository 45 | # You can also omit the token and run the tools that support default configurations 46 | project-token: ${{ secrets.CODACY_PROJECT_TOKEN }} 47 | verbose: true 48 | output: results.sarif 49 | format: sarif 50 | # Adjust severity of non-security issues 51 | gh-code-scanning-compat: true 52 | # Force 0 exit code to allow SARIF file generation 53 | # This will handover control about PR rejection to the GitHub side 54 | max-allowed-issues: 2147483647 55 | 56 | # Upload the SARIF file generated in the previous step 57 | - name: Upload SARIF results file 58 | uses: github/codeql-action/upload-sarif@v1 59 | with: 60 | sarif_file: results.sarif 61 | -------------------------------------------------------------------------------- /ocean_provider/utils/test/test_error_responses.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2023 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | import logging 6 | 7 | import pytest 8 | from ocean_provider.run import app, handle_error 9 | 10 | test_logger = logging.getLogger(__name__) 11 | 12 | 13 | @pytest.mark.unit 14 | def test_service_unavailable(caplog): 15 | context = {"item1": "test1", "item2": "test2"} 16 | 17 | with app.test_request_context(json=context): 18 | e = Exception("test message") 19 | response = handle_error(e) 20 | assert response.status_code == 503 21 | response = response.json 22 | assert response["error"] == "test message" 23 | assert response["context"] == context 24 | 25 | 26 | @pytest.mark.unit 27 | def test_service_unavailable_strip_infura_project_id(): 28 | """Test that service_unavilable strips out URLs.""" 29 | 30 | context = {"item1": "test1", "item2": "test2"} 31 | 32 | # HTTP Infura URL (rinkeby) 33 | with app.test_request_context(json=context): 34 | e = Exception( 35 | "429 Client Error: Too Many Requests for url: " 36 | "https://rinkeby.infura.io/v3/ffffffffffffffffffffffffffffffff" 37 | ) 38 | response = handle_error(e) 39 | assert ( 40 | response.json["error"] == "429 Client Error: Too Many Requests for url: " 41 | "" 42 | ) 43 | 44 | # Websocket Infura URL (ropsten) 45 | with app.test_request_context(json=context): 46 | e = Exception( 47 | "429 Client Error: Too Many Requests for url: " 48 | "wss://ropsten.infura.io/ws/v3/ffffffffffffffffffffffffffffffff" 49 | ) 50 | response = handle_error(e) 51 | assert ( 52 | response.json["error"] == "429 Client Error: Too Many Requests for url: " 53 | "" 54 | ) 55 | 56 | # No URL 57 | with app.test_request_context(json=context): 58 | e = Exception("string without a URL in it") 59 | response = handle_error(e) 60 | assert response.json["error"] == "string without a URL in it" 61 | 62 | # Two URLs 63 | with app.test_request_context(json=context): 64 | e = Exception("Two URLs: wss://google.com https://google.com") 65 | response = handle_error(e) 66 | assert ( 67 | response.json["error"] == "Two URLs: " 68 | " " 69 | "" 70 | ) 71 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: clean clean-test clean-pyc clean-build docs help 2 | .DEFAULT_GOAL := help 3 | 4 | define BROWSER_PYSCRIPT 5 | import os, webbrowser, sys 6 | 7 | try: 8 | from urllib import pathname2url 9 | except: 10 | from urllib.request import pathname2url 11 | 12 | webbrowser.open("file://" + pathname2url(os.path.abspath(sys.argv[1]))) 13 | endef 14 | export BROWSER_PYSCRIPT 15 | 16 | define PRINT_HELP_PYSCRIPT 17 | import re, sys 18 | 19 | for line in sys.stdin: 20 | match = re.match(r'^([a-zA-Z_-]+):.*?## (.*)$$', line) 21 | if match: 22 | target, help = match.groups() 23 | print("%-20s %s" % (target, help)) 24 | endef 25 | export PRINT_HELP_PYSCRIPT 26 | 27 | BROWSER := python -c "$$BROWSER_PYSCRIPT" 28 | 29 | help: 30 | @python -c "$$PRINT_HELP_PYSCRIPT" < $(MAKEFILE_LIST) 31 | 32 | clean: clean-build clean-pyc clean-test ## remove all build, test, coverage and Python artifacts 33 | 34 | clean-build: ## remove build artifacts 35 | rm -fr build/ 36 | rm -fr dist/ 37 | rm -fr .eggs/ 38 | find . -name '*.egg-info' -exec rm -fr {} + 39 | find . -name '*.egg' -exec rm -f {} + 40 | 41 | clean-pyc: ## remove Python file artifacts 42 | find . -name '*.pyc' -exec rm -f {} + 43 | find . -name '*.pyo' -exec rm -f {} + 44 | find . -name '*~' -exec rm -f {} + 45 | find . -name '__pycache__' -exec rm -fr {} + 46 | 47 | clean-test: ## remove test and coverage artifacts 48 | rm -f .coverage 49 | rm -f coverage.xml 50 | rm -f *.log 51 | rm -fr htmlcov/ 52 | rm -fr .pytest_cache 53 | 54 | lint: ## check style with PyLint 55 | pylint --errors-only ocean_provider tests 56 | 57 | test: ## run tests quickly with the default Python 58 | py.test 59 | 60 | coverage: ## check code coverage quickly with the default Python 61 | coverage run --source ocean_provider -m pytest 62 | coverage report -m 63 | coverage html 64 | $(BROWSER) htmlcov/index.html 65 | 66 | docs: ## generate Sphinx HTML documentation, including API docs 67 | rm -f docs/ocean_provider.rst 68 | rm -f docs/modules.rst 69 | sphinx-apidoc -o docs/ ocean_provider 70 | $(MAKE) -C docs clean 71 | $(MAKE) -C docs html 72 | $(BROWSER) docs/_build/html/index.html 73 | 74 | servedocs: docs ## compile the docs watching for changes 75 | watchmedo shell-command -p '*.rst' -c '$(MAKE) -C docs html' -R -D . 76 | 77 | release: dist ## package and upload a release 78 | twine upload dist/* 79 | 80 | dist: clean ## builds source and wheel package 81 | python setup.py sdist 82 | python setup.py bdist_wheel 83 | ls -l dist 84 | 85 | install: clean ## install the package to the active Python's site-packages 86 | python setup.py install 87 | -------------------------------------------------------------------------------- /.github/workflows/codeql-analysis.yml: -------------------------------------------------------------------------------- 1 | # For most projects, this workflow file will not need changing; you simply need 2 | # to commit it to your repository. 3 | # 4 | # You may wish to alter this file to override the set of languages analyzed, 5 | # or to provide custom queries or build logic. 6 | # 7 | # ******** NOTE ******** 8 | # We have attempted to detect the languages in your repository. Please check 9 | # the `language` matrix defined below to confirm you have the correct set of 10 | # supported CodeQL languages. 11 | # 12 | name: "CodeQL" 13 | 14 | on: 15 | push: 16 | branches: [ v4main, main ] 17 | pull_request: 18 | # The branches below must be a subset of the branches above 19 | branches: [ v4main ] 20 | schedule: 21 | - cron: '40 4 * * 4' 22 | 23 | jobs: 24 | analyze: 25 | name: Analyze 26 | runs-on: ubuntu-latest 27 | permissions: 28 | actions: read 29 | contents: read 30 | security-events: write 31 | 32 | strategy: 33 | fail-fast: false 34 | matrix: 35 | language: [ 'python' ] 36 | # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ] 37 | # Learn more about CodeQL language support at https://git.io/codeql-language-support 38 | 39 | steps: 40 | - name: Checkout repository 41 | uses: actions/checkout@v2 42 | 43 | # Initializes the CodeQL tools for scanning. 44 | - name: Initialize CodeQL 45 | uses: github/codeql-action/init@v1 46 | with: 47 | languages: ${{ matrix.language }} 48 | # If you wish to specify custom queries, you can do so here or in a config file. 49 | # By default, queries listed here will override any specified in a config file. 50 | # Prefix the list here with "+" to use these queries and those in the config file. 51 | # queries: ./path/to/local/query, your-org/your-repo/queries@main 52 | 53 | # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). 54 | # If this step fails, then you should remove it and run the build manually (see below) 55 | - name: Autobuild 56 | uses: github/codeql-action/autobuild@v1 57 | 58 | # ℹ️ Command-line programs to run using the OS shell. 59 | # 📚 https://git.io/JvXDl 60 | 61 | # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines 62 | # and modify them (or add more) to build your code if your project 63 | # uses a compiled language 64 | 65 | #- run: | 66 | # make bootstrap 67 | # make release 68 | 69 | - name: Perform CodeQL Analysis 70 | uses: github/codeql-action/analyze@v1 71 | -------------------------------------------------------------------------------- /tests/ddo/ddo_sample_algorithm_v4.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2023 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | algorithm_ddo_sample = { 6 | "@context": ["https://w3id.org/did/v1"], 7 | "id": "did:op:0bc278fee025464f8012b811d1bce8e22094d0984e4e49139df5d5ff7a028bdf", 8 | "version": "4.1.0", 9 | "chainId": 8996, 10 | "proof": { 11 | "created": "2019-02-08T08:13:41Z", 12 | "creator": "0x37BB53e3d293494DE59fBe1FF78500423dcFd43B", 13 | "signatureValue": "did:op:0bc278fee025464f8012b811d1bce8e22094d0984e4e49139df5d5ff7a028bdf", 14 | "type": "DDOIntegritySignature", 15 | "checksum": { 16 | "0": "0x52b5c93b82dd9e7ecc3d9fdf4755f7f69a54484941897dc517b4adfe3bbc3377", 17 | "1": "0x999999952b5c93b82dd9e7ecc3d9fdf4755f7f69a54484941897dc517b4adfe3", 18 | }, 19 | }, 20 | "metadata": { 21 | "created": "2019-02-08T08:13:49Z", 22 | "updated": "2019-02-08T08:13:49Z", 23 | "author": "John Doe", 24 | "license": "CC-BY", 25 | "name": "My super algorithm", 26 | "type": "algorithm", 27 | "description": "workflow for weather", 28 | "algorithm": { 29 | "language": "scala", 30 | "format": "docker-image", 31 | "version": "0.1", 32 | "container": { 33 | "entrypoint": "node $ALGO", 34 | "image": "oceanprotocol/algo_dockers", 35 | "tag": "python-branin", 36 | "checksum": "sha256:8221d20c1c16491d7d56b9657ea09082c0ee4a8ab1a6621fa720da58b09580e4", 37 | }, 38 | }, 39 | "additionalInformation": { 40 | "description": "Workflow to aggregate weather information", 41 | "tags": ["weather", "uk", "2011", "workflow", "aggregation"], 42 | "copyrightHolder": "John Doe", 43 | }, 44 | }, 45 | "services": [ 46 | { 47 | "id": "test", 48 | "type": "compute", 49 | "name": "dataAssetComputingService", 50 | "description": "dataAssetComputingService", 51 | "datatokenAddress": "0x20e91598bb797eEd2C7D4431a274c2997D080f53", 52 | "serviceEndpoint": "http://localhost:8030/", 53 | "timeout": 3600, 54 | "compute": { 55 | "namespace": "test", 56 | "allowRawAlgorithm": False, 57 | "allowNetworkAccess": False, 58 | "publisherTrustedAlgorithms": [], 59 | "publisherTrustedAlgorithmPublishers": [], 60 | }, 61 | "files": "encryptedFiles", 62 | } 63 | ], 64 | } 65 | -------------------------------------------------------------------------------- /ocean_provider/http_provider.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2023 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | from typing import Any, Dict 6 | 7 | import lru 8 | import requests 9 | from ocean_provider.version import get_version 10 | from requests.adapters import HTTPAdapter 11 | from requests.sessions import Session 12 | from web3 import HTTPProvider 13 | from web3._utils.caching import generate_cache_key 14 | 15 | 16 | def _remove_session(key: str, session: Session) -> None: 17 | session.close() 18 | 19 | 20 | _session_cache = lru.LRU(8, callback=_remove_session) 21 | 22 | 23 | class CustomHTTPProvider(HTTPProvider): 24 | """Override requests to control the connection pool to make it blocking.""" 25 | 26 | def make_request(self, method: str, params: Any) -> Dict[str, Any]: 27 | self.logger.debug( 28 | "Making request HTTP. URI: %s, Method: %s", self.endpoint_uri, method 29 | ) 30 | request_data = self.encode_rpc_request(method, params) 31 | raw_response = make_post_request( 32 | self.endpoint_uri, request_data, **self.get_request_kwargs() 33 | ) 34 | response = self.decode_rpc_response(raw_response) 35 | self.logger.debug( 36 | "Getting response HTTP. URI: %s, " "Method: %s, Response: %s", 37 | self.endpoint_uri, 38 | method, 39 | response, 40 | ) 41 | return response 42 | 43 | 44 | def make_post_request(endpoint_uri: str, data: bytes, *args, **kwargs) -> bytes: 45 | kwargs.setdefault("timeout", 10) 46 | 47 | version = get_version() 48 | version_header = {"User-Agent": f"OceanProvider/{version}"} 49 | 50 | if "headers" in kwargs: 51 | kwargs["headers"].update(version_header) 52 | else: 53 | kwargs["headers"] = version_header 54 | 55 | session = _get_session(endpoint_uri) 56 | response = session.post(endpoint_uri, data=data, *args, **kwargs) 57 | response.raise_for_status() 58 | 59 | return response.content 60 | 61 | 62 | def _get_session(*args, **kwargs) -> Session: 63 | cache_key = generate_cache_key((args, kwargs)) 64 | if cache_key not in _session_cache: 65 | # This is the main change from original Web3 `_get_session` 66 | session = requests.sessions.Session() 67 | session.mount( 68 | "http://", 69 | HTTPAdapter(pool_connections=25, pool_maxsize=25, pool_block=True), 70 | ) 71 | session.mount( 72 | "https://", 73 | HTTPAdapter(pool_connections=25, pool_maxsize=25, pool_block=True), 74 | ) 75 | _session_cache[cache_key] = session 76 | return _session_cache[cache_key] 77 | -------------------------------------------------------------------------------- /ocean_provider/utils/data_nft.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2023 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | import logging 6 | from enum import IntEnum, IntFlag 7 | from typing import Iterable, Optional, Tuple 8 | 9 | from ocean_provider.utils.address import get_contract_definition 10 | from web3.contract import Contract 11 | from web3.logs import DISCARD 12 | from web3.main import Web3 13 | from web3.types import EventData, TxReceipt 14 | 15 | logger = logging.getLogger(__name__) 16 | 17 | 18 | class MetadataState(IntEnum): 19 | ACTIVE = 0 20 | END_OF_LIFE = 1 21 | DEPRECATED = 2 22 | REVOKED = 3 23 | TEMPORARILY_DISABLED = 4 24 | UNLISTED = 5 25 | 26 | 27 | class Flags(IntFlag): 28 | PLAIN = 0 29 | COMPRESSED = 1 30 | ENCRYPTED = 2 31 | 32 | def to_byte(self): 33 | return self.to_bytes(1, "big") 34 | 35 | 36 | def get_data_nft_contract(web3: Web3, address: Optional[str] = None) -> Contract: 37 | """ 38 | Build a web3 Contract instance using the Ocean Protocol ERC721Template ABI. 39 | 40 | This function assumes that the standard `ERC721Template` stored at index 1 41 | of the `ERC721Factory` provides all the functionality needed by Provider, 42 | especially the `getMetaData` contract method. 43 | """ 44 | abi = get_contract_definition("ERC721Template")["abi"] 45 | return web3.eth.contract(address=web3.toChecksumAddress(address), abi=abi) 46 | 47 | 48 | def get_metadata(web3: Web3, address: str) -> Tuple[str, str, MetadataState, bool]: 49 | """Queries the ERC721 Template smart contract getMetaData call. 50 | Returns metaDataDecryptorUrl, metaDataDecryptorAddress, metaDataState, and hasMetaData 51 | """ 52 | data_nft_contract = get_data_nft_contract(web3, address) 53 | 54 | return data_nft_contract.caller.getMetaData() 55 | 56 | 57 | def get_metadata_logs_from_tx_receipt( 58 | web3: Web3, tx_receipt: TxReceipt, data_nft_address 59 | ) -> Iterable[EventData]: 60 | """Selects MetadataCreated or MetadataUpdated log based on tx receipt.""" 61 | data_nft_contract = get_data_nft_contract(web3, data_nft_address) 62 | logs = data_nft_contract.events.MetadataCreated().processReceipt( 63 | tx_receipt, errors=DISCARD 64 | ) 65 | if not logs: 66 | logs = data_nft_contract.events.MetadataUpdated().processReceipt( 67 | tx_receipt, errors=DISCARD 68 | ) 69 | if not logs: 70 | raise ValueError( 71 | f"MetadataCreated/MetadataUpdated event not found " 72 | f"in tx id: {tx_receipt.transactionHash}" 73 | ) 74 | if len(logs) > 1: 75 | logger.warning( 76 | f"More than 1 MetadataCreated/MetadataUpdated event found" 77 | f"in tx id: {tx_receipt.transactionHash}" 78 | ) 79 | return logs 80 | -------------------------------------------------------------------------------- /tests/test_proof.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2023 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | import json 6 | from unittest.mock import Mock, patch 7 | 8 | import pytest 9 | from ocean_provider.utils.accounts import sign_message 10 | from ocean_provider.utils.proof import send_proof 11 | from ocean_provider.utils.provider_fees import get_provider_fees 12 | from ocean_provider.utils.services import ServiceType 13 | from requests.models import Response 14 | from tests.helpers.nonce import build_nonce 15 | from tests.test_helpers import ( 16 | get_first_service_by_type, 17 | get_registered_asset, 18 | mint_100_datatokens, 19 | start_order, 20 | ) 21 | 22 | 23 | @pytest.mark.unit 24 | def test_no_proof_setup(client): 25 | assert send_proof(None, None, None, None, None, None, None) is None 26 | 27 | 28 | @pytest.mark.unit 29 | def test_http_proof(client, monkeypatch): 30 | monkeypatch.setenv("USE_HTTP_PROOF", "http://test.com") 31 | provider_data = json.dumps({"test_data": "test_value"}) 32 | 33 | with patch("requests.post") as mock: 34 | response = Mock(spec=Response) 35 | response.json.return_value = {"a valid response": ""} 36 | response.status_code = 200 37 | mock.return_value = response 38 | 39 | assert send_proof(8996, b"1", provider_data, None, None, None, None) is True 40 | 41 | mock.assert_called_once() 42 | 43 | with patch("requests.post") as mock: 44 | mock.side_effect = Exception("Boom!") 45 | 46 | assert send_proof(8996, b"1", provider_data, None, None, None, None) is None 47 | 48 | mock.assert_called_once() 49 | 50 | 51 | @pytest.mark.integration 52 | def test_chain_proof(client, monkeypatch, web3, publisher_wallet, consumer_wallet): 53 | monkeypatch.setenv("USE_CHAIN_PROOF", "1") 54 | provider_data = json.dumps({"test_data": "test_value"}) 55 | 56 | asset = get_registered_asset(publisher_wallet) 57 | service = get_first_service_by_type(asset, ServiceType.ACCESS) 58 | mint_100_datatokens( 59 | web3, service.datatoken_address, consumer_wallet.address, publisher_wallet 60 | ) 61 | tx_id, receipt = start_order( 62 | web3, 63 | service.datatoken_address, 64 | consumer_wallet.address, 65 | service.index, 66 | get_provider_fees(asset, service, consumer_wallet.address, 0), 67 | consumer_wallet, 68 | ) 69 | 70 | nonce = build_nonce(consumer_wallet.address) 71 | 72 | consumer_data = _msg = f"{asset.did}{nonce}" 73 | signature = sign_message(_msg, consumer_wallet) 74 | 75 | assert send_proof( 76 | 8996, 77 | receipt.transactionHash, 78 | provider_data, 79 | consumer_data, 80 | signature, 81 | consumer_wallet.address, 82 | service.datatoken_address, 83 | ) 84 | -------------------------------------------------------------------------------- /ocean_provider/utils/currency.py: -------------------------------------------------------------------------------- 1 | from decimal import ROUND_DOWN, Context, Decimal, localcontext 2 | from typing import Union 3 | 4 | from eth_utils.currency import units 5 | 6 | """The maximum uint256 value.""" 7 | MAX_UINT256 = 2**256 - 1 8 | 9 | """decimal.Context tuned to accomadate MAX_WEI. 10 | * precision=78 because there are 78 digits in MAX_WEI (MAX_UINT256). 11 | Any lower and decimal operations like quantize throw an InvalidOperation error. 12 | * rounding=ROUND_DOWN (towards 0, aka. truncate) to avoid issue where user 13 | removes 100% from a pool and transaction fails because it rounds up. 14 | """ 15 | ETHEREUM_DECIMAL_CONTEXT = Context(prec=78, rounding=ROUND_DOWN) 16 | 17 | 18 | """ERC20 tokens usually opt for a decimals value of 18, imitating the 19 | relationship between Ether and Wei.""" 20 | DECIMALS_18 = 18 21 | 22 | """The minimum possible token amount on Ethereum-compatible blockchains, denoted in wei""" 23 | MIN_WEI = 1 24 | 25 | """The maximum possible token amount on Ethereum-compatible blockchains, denoted in wei""" 26 | MAX_WEI = MAX_UINT256 27 | 28 | """The minimum possible token amount on Ethereum-compatible blockchains, denoted in ether""" 29 | MIN_ETHER = Decimal("0.000000000000000001") 30 | 31 | """The maximum possible token amount on Ethereum-compatible blockchains, denoted in ether""" 32 | MAX_ETHER = Decimal(MAX_WEI).scaleb(-18, context=ETHEREUM_DECIMAL_CONTEXT) 33 | 34 | 35 | def to_wei(amount_in_ether: Union[Decimal, str, int]) -> int: 36 | return parse_units(amount_in_ether, DECIMALS_18) 37 | 38 | 39 | def parse_units( 40 | amount: Union[Decimal, str, int], unit_name: Union[str, int] = DECIMALS_18 41 | ) -> int: 42 | """ 43 | Convert token amount from a formatted unit to an EVM-compatible integer. 44 | float input is purposfully not supported 45 | """ 46 | num_decimals = ( 47 | int(units[unit_name].log10()) if isinstance(unit_name, str) else unit_name 48 | ) 49 | 50 | decimal_amount = normalize_and_validate_unit(amount, num_decimals) 51 | 52 | if decimal_amount == Decimal(0): 53 | return 0 54 | 55 | unit_value = Decimal(10) ** num_decimals 56 | 57 | with localcontext(ETHEREUM_DECIMAL_CONTEXT): 58 | return int(decimal_amount * unit_value) 59 | 60 | 61 | def normalize_and_validate_unit( 62 | amount: Union[Decimal, str, int], decimals: int = DECIMALS_18 63 | ) -> Decimal: 64 | """Returns an amount in ether, encoded as a Decimal 65 | Takes Decimal, str, or int as input. Purposefully does not support float.""" 66 | if isinstance(amount, str) or isinstance(amount, int): 67 | amount = Decimal(amount) 68 | 69 | if abs(amount) > Decimal(MAX_WEI).scaleb( 70 | -decimals, context=ETHEREUM_DECIMAL_CONTEXT 71 | ): 72 | raise ValueError("Token amount exceeds maximum.") 73 | 74 | return amount 75 | -------------------------------------------------------------------------------- /ocean_provider/file_types/file_types_factory.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from typing import Any, Tuple 3 | 4 | from enforce_typing import enforce_types 5 | from ocean_provider.file_types.file_types import ( 6 | ArweaveFile, 7 | GraphqlQuery, 8 | IpfsFile, 9 | UrlFile, 10 | ) 11 | from ocean_provider.file_types.types.smartcontract import SmartContractCall 12 | 13 | logger = logging.getLogger(__name__) 14 | 15 | 16 | @enforce_types 17 | class FilesTypeFactory: 18 | """Factory Method""" 19 | 20 | ALLOWED_FILE_TYPES = ["ipfs", "url", "arweave", "graphql", "smartcontract"] 21 | 22 | @staticmethod 23 | def validate_and_create(file_obj) -> Tuple[bool, Any]: 24 | if not file_obj: 25 | return False, "cannot decrypt files for this service." 26 | 27 | try: 28 | if file_obj["type"] == "url": 29 | instance = UrlFile( 30 | file_obj.get("url"), 31 | method=file_obj.get("method"), 32 | headers=file_obj.get("headers"), 33 | userdata=file_obj.get("userdata"), 34 | ) 35 | elif file_obj["type"] == "arweave": 36 | instance = ArweaveFile( 37 | file_obj.get("transactionId"), 38 | headers=file_obj.get("headers"), 39 | userdata=file_obj.get("userdata"), 40 | ) 41 | elif file_obj["type"] == "ipfs": 42 | instance = IpfsFile( 43 | file_obj.get("hash"), 44 | headers=file_obj.get("headers"), 45 | userdata=file_obj.get("userdata"), 46 | ) 47 | elif file_obj["type"] == "graphql": 48 | instance = GraphqlQuery( 49 | url=file_obj.get("url"), 50 | query=file_obj.get("query"), 51 | headers=file_obj.get("headers"), 52 | userdata=file_obj.get("userdata"), 53 | ) 54 | elif file_obj["type"] == "smartcontract": 55 | instance = SmartContractCall( 56 | address=file_obj.get("address"), 57 | chain_id=file_obj.get("chainId"), 58 | abi=file_obj.get("abi"), 59 | userdata=file_obj.get("userdata"), 60 | ) 61 | else: 62 | logger.debug(f"Unsupported type {file_obj}") 63 | return False, f'Unsupported type {file_obj["type"]}' 64 | except TypeError: 65 | logger.debug(f"malformed file object {file_obj}") 66 | return False, "malformed file object." 67 | status = instance.validate_dict() 68 | if not status: 69 | logger.debug(f"validate_dict failed on {file_obj}") 70 | logger.debug(f"validate_dict passed on {file_obj}") 71 | return status 72 | -------------------------------------------------------------------------------- /ocean_provider/test/test_user_nonce.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2023 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | import os 6 | import sqlite3 7 | 8 | import pytest 9 | from flask_caching import Cache 10 | from ocean_provider.myapp import app 11 | from ocean_provider.user_nonce import ( 12 | get_nonce, 13 | update_nonce, 14 | ) 15 | from tests.helpers.nonce import build_nonce 16 | 17 | cache = Cache( 18 | app, 19 | config={ 20 | "CACHE_TYPE": "redis", 21 | "CACHE_KEY_PREFIX": "ocean_provider", 22 | "CACHE_REDIS_URL": os.getenv("REDIS_CONNECTION"), 23 | }, 24 | ) 25 | 26 | 27 | @pytest.mark.unit 28 | def test_get_and_update_nonce(monkeypatch, publisher_address, consumer_address): 29 | # pass through sqlite 30 | monkeypatch.delenv("REDIS_CONNECTION") 31 | 32 | # get_nonce can be used on addresses that are not in the user_nonce table 33 | assert get_nonce("0x0000000000000000000000000000000000000000") is None 34 | 35 | # update two times because, if we just pruned, we start from None 36 | publisher_nonce = build_nonce(publisher_address) 37 | new_publisher_nonce = build_nonce(publisher_address) 38 | 39 | assert new_publisher_nonce >= publisher_nonce 40 | 41 | # get_nonce doesn't affect the value of nonce 42 | publisher_nonce = get_nonce(publisher_address) 43 | assert get_nonce(publisher_address) == publisher_nonce 44 | 45 | 46 | @pytest.mark.unit 47 | def test_get_and_update_nonce_redis(publisher_address, consumer_address): 48 | # get_nonce can be used on addresses that are not in the user_nonce table 49 | cache.delete("0x0000000000000000000000000000000000000000") 50 | assert get_nonce("0x0000000000000000000000000000000000000000") is None 51 | 52 | # update two times because, if we just pruned, we start from None 53 | update_nonce(publisher_address, build_nonce(publisher_address)) 54 | publisher_nonce = get_nonce(publisher_address) 55 | update_nonce(publisher_address, build_nonce(publisher_address)) 56 | new_publisher_nonce = get_nonce(publisher_address) 57 | 58 | assert new_publisher_nonce >= publisher_nonce 59 | 60 | # get_nonce doesn't affect the value of nonce 61 | publisher_nonce = get_nonce(publisher_address) 62 | assert get_nonce(publisher_address) == publisher_nonce 63 | 64 | 65 | @pytest.mark.unit 66 | def test_update_nonce_exception(monkeypatch, publisher_address): 67 | # pass through sqlite 68 | monkeypatch.delenv("REDIS_CONNECTION") 69 | 70 | nonce_object = get_nonce(publisher_address) 71 | 72 | # Create duplicate nonce_object 73 | with pytest.raises(sqlite3.IntegrityError): 74 | update_nonce(publisher_address, nonce_object) 75 | 76 | publisher_nonce = get_nonce(publisher_address) 77 | update_nonce(publisher_address, None) 78 | # no effect 79 | assert publisher_nonce == get_nonce(publisher_address) 80 | -------------------------------------------------------------------------------- /conftest.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2023 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | 6 | import os 7 | 8 | import pytest 9 | from eth_account import Account 10 | from ocean_provider.run import app 11 | from ocean_provider.utils.basics import get_provider_private_key, get_web3, send_ether 12 | from ocean_provider.utils.provider_fees import get_c2d_environments 13 | 14 | app = app 15 | 16 | 17 | @pytest.fixture 18 | def client(): 19 | client = app.test_client() 20 | yield client 21 | 22 | 23 | @pytest.fixture 24 | def publisher_wallet(): 25 | return Account.from_key(os.getenv("TEST_PRIVATE_KEY1")) 26 | 27 | 28 | @pytest.fixture 29 | def publisher_address(publisher_wallet): 30 | return publisher_wallet.address 31 | 32 | 33 | @pytest.fixture 34 | def consumer_wallet(): 35 | return Account.from_key(os.getenv("TEST_PRIVATE_KEY2")) 36 | 37 | 38 | @pytest.fixture 39 | def consumer_address(consumer_wallet): 40 | return consumer_wallet.address 41 | 42 | 43 | @pytest.fixture 44 | def ganache_wallet(): 45 | web3 = get_web3(8996) 46 | if ( 47 | web3.eth.accounts 48 | and web3.eth.accounts[0].lower() 49 | == "0xe2DD09d719Da89e5a3D0F2549c7E24566e947260".lower() 50 | ): 51 | return Account.from_key( 52 | "0xfd5c1ccea015b6d663618850824154a3b3fb2882c46cefb05b9a93fea8c3d215" 53 | ) 54 | 55 | return None 56 | 57 | 58 | @pytest.fixture 59 | def provider_wallet(): 60 | pk = get_provider_private_key(8996) 61 | return Account.from_key(pk) 62 | 63 | 64 | @pytest.fixture 65 | def provider_address(provider_wallet): 66 | return provider_wallet.address 67 | 68 | 69 | @pytest.fixture(autouse=True) 70 | def setup_all(provider_address, consumer_address, ganache_wallet): 71 | web3 = get_web3(8996) 72 | if ganache_wallet: 73 | if ( 74 | web3.fromWei( 75 | web3.eth.get_balance(provider_address, block_identifier="latest"), 76 | "ether", 77 | ) 78 | < 10 79 | ): 80 | send_ether(web3, ganache_wallet, provider_address, 25) 81 | 82 | if ( 83 | web3.fromWei( 84 | web3.eth.get_balance(provider_address, block_identifier="latest"), 85 | "ether", 86 | ) 87 | < 10 88 | ): 89 | send_ether(web3, ganache_wallet, consumer_address, 25) 90 | 91 | 92 | @pytest.fixture 93 | def web3(): 94 | return get_web3(8996) 95 | 96 | 97 | @pytest.fixture 98 | def free_c2d_env(): 99 | environments = get_c2d_environments(flat=True) 100 | 101 | return next(env for env in environments if float(env["priceMin"]) == float(0)) 102 | 103 | 104 | @pytest.fixture 105 | def paid_c2d_env(): 106 | environments = get_c2d_environments(flat=True) 107 | 108 | return next(env for env in environments if env["id"] == "ocean-compute-env2") 109 | -------------------------------------------------------------------------------- /ocean_provider/routes/encrypt.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2023 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | import logging 6 | 7 | from flask import Response, request 8 | from ocean_provider.requests_session import get_requests_session 9 | from ocean_provider.utils.basics import get_provider_wallet 10 | from ocean_provider.utils.encryption import do_encrypt 11 | from ocean_provider.utils.error_responses import error_response 12 | 13 | from . import services 14 | 15 | # provider_wallet = get_provider_wallet() 16 | requests_session = get_requests_session() 17 | 18 | logger = logging.getLogger(__name__) 19 | 20 | 21 | @services.route("/encrypt", methods=["POST"]) 22 | def encrypt(): 23 | """Encrypt data using the Provider's own symmetric key (symmetric encryption). 24 | This can be used by the publisher of an asset to encrypt the DDO of the 25 | asset data files before publishing the asset DDO. The publisher to use this 26 | service is one that is using a front-end with a wallet app such as MetaMask. 27 | The DDO is encrypted by the provider so that the provider will be able 28 | to decrypt at time of providing the service later on. 29 | 30 | --- 31 | tags: 32 | - decrypt 33 | consumes: 34 | - application/octet-stream 35 | parameters: 36 | - in: chainId 37 | name: chainId 38 | required: true 39 | description: chainId to be used for encryption, given as query parameter 40 | - in: body 41 | name: body 42 | required: true 43 | description: Binary document contents to encrypt. 44 | responses: 45 | 201: 46 | description: DDO successfully encrypted. 47 | 400: 48 | description: Invalid request content type or failure to encrypt. 49 | 503: 50 | description: Service Unavailable 51 | 52 | return: the encrypted DDO (hex str) 53 | """ 54 | if request.content_type != "application/octet-stream": 55 | return error_response( 56 | "Invalid request content type: should be application/octet-stream", 57 | 400, 58 | logger, 59 | ) 60 | 61 | chain_id = request.args.get("chainId") 62 | if not chain_id: 63 | return error_response( 64 | "Missing chainId query parameter.", 65 | 400, 66 | logger, 67 | ) 68 | 69 | data = request.get_data() 70 | logger.debug(f"encrypt called. arguments = {data}") 71 | 72 | return _encrypt(data, chain_id) 73 | 74 | 75 | def _encrypt(data: bytes, chain_id) -> Response: 76 | try: 77 | provider_wallet = get_provider_wallet(chain_id) 78 | encrypted_data = do_encrypt(data, provider_wallet) 79 | logger.info(f"encrypted_data = {encrypted_data}") 80 | except Exception: 81 | return error_response("Failed to encrypt.", 400, logger) 82 | 83 | response = Response( 84 | encrypted_data, 85 | 201, 86 | headers={"Content-type": "text/plain"}, 87 | ) 88 | logger.info(f"encrypt response = {response}") 89 | 90 | return response 91 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | # 4 | # Copyright 2023 Ocean Protocol Foundation 5 | # SPDX-License-Identifier: Apache-2.0 6 | # 7 | 8 | """The setup script.""" 9 | 10 | # Copyright 2018 Ocean Protocol Foundation 11 | # SPDX-License-Identifier: Apache-2.0 12 | 13 | from setuptools import find_packages, setup 14 | 15 | with open("README.md") as readme_file: 16 | readme = readme_file.read() 17 | 18 | # Installed by pip install ocean-provider 19 | # or pip install -e . 20 | install_requirements = [ 21 | "ocean-contracts==2.0.2", 22 | "web3==5.25.0", 23 | "Flask==2.1.2", 24 | "Flask-Cors==3.0.9", 25 | "flask_caching==1.10.1", 26 | "Flask-RESTful==0.3.8", 27 | "flask-swagger==0.2.14", 28 | "flask-swagger-ui==3.25.0", 29 | "Jinja2>=2.10.1,<3.1", 30 | "gunicorn==20.0.4", 31 | "coloredlogs==15.0.1", 32 | "Werkzeug==2.0.3", 33 | "eciespy==0.3.11", 34 | "coincurve>=13,<15", 35 | "ipaddress==1.0.23", 36 | "dnspython==2.2.1", 37 | "flask-sieve==1.3.1", 38 | "SQLAlchemy==1.3.23", 39 | "redis==4.5.4", 40 | "enforce-typing==1.0.0.post1", 41 | "pyjwt==2.4.0", 42 | "pysha3==1.0.2", 43 | ] 44 | 45 | # Required to run setup.py: 46 | setup_requirements = ["pytest-runner"] 47 | 48 | test_requirements = [ 49 | "codacy-coverage==1.3.11", 50 | "coverage==6.4.4", 51 | "docker==6.0.0", 52 | "freezegun==1.1.0", 53 | "mccabe==0.7.0", 54 | "pytest==7.1.2", 55 | "pytest-env==0.6.2", 56 | "requests_testadapter==0.3.0", 57 | ] 58 | 59 | # Possibly required by developers of ocean-provider: 60 | dev_requirements = [ 61 | "bumpversion==0.6.0", 62 | "pkginfo", 63 | "twine", 64 | "python-dotenv==0.15.0", 65 | "flake8==5.0.4", 66 | "isort==5.10.1", 67 | "black==22.6.0", 68 | "pre-commit==2.20.0", 69 | "licenseheaders==0.8.8", 70 | ] 71 | 72 | setup( 73 | author="leucothia", 74 | author_email="devops@oceanprotocol.com", 75 | classifiers=[ 76 | "Development Status :: 2 - Pre-Alpha", 77 | "Intended Audience :: Developers", 78 | "License :: OSI Approved :: Apache Software License", 79 | "Natural Language :: English", 80 | "Programming Language :: Python :: 3.8", 81 | ], 82 | description="🐳 Ocean Provider.", 83 | extras_require={ 84 | "test": test_requirements, 85 | "dev": dev_requirements + test_requirements, 86 | }, 87 | install_requires=install_requirements, 88 | license="Apache Software License 2.0", 89 | long_description=readme, 90 | long_description_content_type="text/markdown", 91 | include_package_data=True, 92 | keywords="ocean-provider", 93 | name="ocean-provider", 94 | packages=find_packages( 95 | include=["ocean_provider", "ocean_provider.utils", "ocean_provider.app"] 96 | ), 97 | setup_requires=setup_requirements, 98 | test_suite="tests", 99 | tests_require=test_requirements, 100 | url="https://github.com/oceanprotocol/provider-py", 101 | # fmt: off 102 | # bumpversion needs single quotes 103 | version='2.1.6', 104 | # fmt: on 105 | zip_safe=False, 106 | ) 107 | -------------------------------------------------------------------------------- /ocean_provider/utils/test/test_url.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2023 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | import logging 6 | from unittest.mock import Mock, patch 7 | 8 | import pytest 9 | from ocean_provider.utils.url import ( 10 | get_redirect, 11 | is_safe_url, 12 | is_this_same_provider, 13 | is_url, 14 | ) 15 | from requests.models import Response 16 | 17 | test_logger = logging.getLogger(__name__) 18 | 19 | 20 | @pytest.mark.unit 21 | def test_is_url(): 22 | assert is_url("https://jsonplaceholder.typicode.com/") is True 23 | assert is_url("127.0.0.1") is False 24 | assert is_url("169.254.169.254") is False 25 | assert is_url("http://169.254.169.254/latest/meta-data/hostname") is True 26 | 27 | 28 | @pytest.mark.unit 29 | def test_is_safe_url(): 30 | assert is_safe_url("https://jsonplaceholder.typicode.com/") is True 31 | assert is_safe_url("127.0.0.1") is False 32 | assert is_safe_url("169.254.169.254") is False 33 | assert is_safe_url("http://169.254.169.254/latest/meta-data/hostname") is False 34 | 35 | assert is_safe_url("https://bit.ly/3zqzc4m") is True # jsonplaceholder example 36 | assert is_safe_url("https://bit.ly/3znh0Zg") is False # meta-data/hostname example 37 | 38 | assert is_safe_url("blabla") is False 39 | 40 | 41 | @pytest.mark.unit 42 | def test_is_same_provider(): 43 | assert is_this_same_provider("http://localhost:8030", 8996) 44 | 45 | 46 | @pytest.mark.unit 47 | def test_get_redirect(): 48 | assert ( 49 | get_redirect("https://bit.ly/3zqzc4m") 50 | == "https://jsonplaceholder.typicode.com/" 51 | ) 52 | 53 | redirect_response = Mock(spec=Response) 54 | redirect_response.is_redirect = True 55 | redirect_response.status_code = 200 56 | redirect_response.headers = {"Location": "/root-relative.html"} 57 | 58 | normal_response = Mock(spec=Response) 59 | normal_response.is_redirect = False 60 | normal_response.status_code = 200 61 | 62 | with patch("ocean_provider.utils.url.requests.head") as mock: 63 | mock.side_effect = [redirect_response, normal_response] 64 | assert ( 65 | get_redirect("https://some-url.com:3000/index") 66 | == "https://some-url.com:3000/root-relative.html" 67 | ) 68 | 69 | redirect_response = Mock(spec=Response) 70 | redirect_response.is_redirect = True 71 | redirect_response.status_code = 200 72 | redirect_response.headers = {"Location": "relative.html"} 73 | 74 | normal_response = Mock(spec=Response) 75 | normal_response.is_redirect = False 76 | normal_response.status_code = 200 77 | 78 | with patch("ocean_provider.utils.url.requests.head") as mock: 79 | mock.side_effect = [redirect_response, normal_response] 80 | assert ( 81 | get_redirect("https://some-url.com:3000/index") 82 | == "https://some-url.com:3000/index/relative.html" 83 | ) 84 | 85 | redirect_response = Mock(spec=Response) 86 | redirect_response.is_redirect = True 87 | redirect_response.status_code = 200 88 | redirect_response.headers = {"Location": "https://some-url.com:3000/index"} 89 | 90 | with patch("ocean_provider.utils.url.requests.head") as mock: 91 | mock.return_value = redirect_response 92 | assert get_redirect("https://some-url.com:3000/index") is None 93 | assert mock.call_count == 6 94 | -------------------------------------------------------------------------------- /.github/workflows/pytest.yml: -------------------------------------------------------------------------------- 1 | ## 2 | ## Copyright 2023 Ocean Protocol Foundation 3 | ## SPDX-License-Identifier: Apache-2.0 4 | ## 5 | name: Provider tests 6 | 7 | on: 8 | - push 9 | - pull_request 10 | 11 | jobs: 12 | build: 13 | runs-on: ubuntu-latest 14 | steps: 15 | - name: Setup Provider 16 | uses: actions/checkout@v2 17 | - name: Set up Python 3.8 18 | uses: actions/setup-python@v2 19 | with: 20 | python-version: '3.8' 21 | - uses: actions/checkout@v2 22 | name: Checkout Barge 23 | with: 24 | repository: "oceanprotocol/barge" 25 | path: 'barge' 26 | - name: Login to Docker Hub 27 | if: ${{ env.DOCKERHUB_PASSWORD && env.DOCKERHUB_USERNAME }} 28 | run: | 29 | echo "Login to Docker Hub";echo "$DOCKERHUB_PASSWORD" | docker login -u "$DOCKERHUB_USERNAME" --password-stdin 30 | env: 31 | DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} 32 | DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }} 33 | - name: Run Barge 34 | working-directory: ${{ github.workspace }}/barge 35 | run: | 36 | bash -x start_ocean.sh --no-dashboard 2>&1 --with-rbac --with-provider2 --with-thegraph --with-c2d --skip-subgraph-deploy > start_ocean.log & 37 | - name: Install dependencies 38 | working-directory: ${{ github.workspace }} 39 | run: | 40 | python -m pip install --upgrade pip 41 | pip install -r requirements_dev.txt 42 | - name: Delete default runner images 43 | run: | 44 | docker image rm node:14 45 | docker image rm node:14-alpine 46 | docker image rm node:16 47 | docker image rm node:16-alpine 48 | docker image rm node:18 49 | docker image rm node:18-alpine 50 | docker image rm node:20 51 | docker image rm node:20-alpine 52 | docker image rm buildpack-deps:buster 53 | docker image rm buildpack-deps:bullseye 54 | docker image rm debian:10 55 | docker image rm debian:11 56 | docker image rm moby/buildkit:latest 57 | docker image rm alpine:3.16 58 | docker image rm alpine:3.17 59 | docker image rm alpine:3.18 60 | docker image rm ubuntu:18.04 61 | docker image rm ubuntu:20.04 62 | - name: Wait for contracts deployment and C2D cluster to be ready 63 | working-directory: ${{ github.workspace }}/barge 64 | run: | 65 | for i in $(seq 1 250); do 66 | sleep 10 67 | [ -f "$HOME/.ocean/ocean-contracts/artifacts/ready" -a -f "$HOME/.ocean/ocean-c2d/ready" ] && break 68 | done 69 | - name: Verify deployments 70 | run: | 71 | cat $HOME/.ocean/ocean-contracts/artifacts/address.json 72 | - name: Test with pytest 73 | run: | 74 | coverage run --source ocean_provider -m pytest 75 | coverage report 76 | coverage xml 77 | - name: docker logs 78 | run: docker logs ocean_aquarius_1 && docker logs ocean_provider_1 && docker logs ocean_provider2_1 && docker logs ocean_ipfs_1 79 | if: ${{ failure() }} 80 | - name: Publish code coverage 81 | uses: paambaati/codeclimate-action@v2.7.5 82 | env: 83 | CC_TEST_REPORTER_ID: b0d75c25d5176c59e8ea665bf74396d9ee1bdf2c97f11ccc6869f9e91d80a6c7 84 | 85 | dockerbuild: 86 | runs-on: ubuntu-latest 87 | needs: [build] 88 | if: ${{ success() && github.event_name == 'pull_request'}} 89 | steps: 90 | - name: Checkout 91 | uses: actions/checkout@v2 92 | - name: Build 93 | run: docker build -t "testimage:latest" . 94 | -------------------------------------------------------------------------------- /ocean_provider/utils/test/test_credentials.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2023 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | from copy import deepcopy 6 | 7 | import pytest 8 | from ocean_provider.utils.asset import Asset 9 | from ocean_provider.utils.consumable import ConsumableCodes, MalformedCredential 10 | from ocean_provider.utils.credentials import AddressCredential 11 | from tests.ddo.ddo_sa_sample_with_credentials_v4 import json_dict 12 | from tests.test_helpers import get_resource_path 13 | 14 | 15 | @pytest.mark.unit 16 | def test_asset_credentials_addresses_both(): 17 | """Tests asset credentials when both deny and allow lists exist on the asset.""" 18 | sample_asset_path = get_resource_path("ddo", "ddo_sa_sample_with_credentials.json") 19 | assert sample_asset_path.exists(), "{} does not exist!".format(sample_asset_path) 20 | 21 | ddo = deepcopy(json_dict) 22 | asset = Asset(ddo) 23 | 24 | address_credential = AddressCredential(asset) 25 | assert address_credential.get_addresses_of_class("allow") == ["0x123", "0x456a"] 26 | assert address_credential.get_addresses_of_class("deny") == ["0x2222", "0x333"] 27 | assert ( 28 | address_credential.validate_access({"type": "address", "value": "0x111"}) 29 | == ConsumableCodes.CREDENTIAL_NOT_IN_ALLOW_LIST 30 | ) 31 | assert ( 32 | address_credential.validate_access({"type": "address", "value": "0x456A"}) 33 | == ConsumableCodes.OK 34 | ) 35 | # if "allow" exists, "deny" is not checked anymore 36 | 37 | 38 | @pytest.mark.unit 39 | def test_asset_credentials_addresses_only_deny(): 40 | """Tests asset credentials when only the deny list exists on the asset.""" 41 | sample_asset_path = get_resource_path("ddo", "ddo_sa_sample_with_credentials.json") 42 | assert sample_asset_path.exists(), "{} does not exist!".format(sample_asset_path) 43 | 44 | ddo = deepcopy(json_dict) 45 | asset = Asset(ddo) 46 | 47 | # remove allow to test the behaviour of deny 48 | asset.credentials.pop("allow") 49 | 50 | address_credential = AddressCredential(asset) 51 | assert address_credential.get_addresses_of_class("allow") == [] 52 | assert address_credential.get_addresses_of_class("deny") == ["0x2222", "0x333"] 53 | assert ( 54 | address_credential.validate_access({"type": "address", "value": "0x111"}) 55 | == ConsumableCodes.OK 56 | ) 57 | assert ( 58 | address_credential.validate_access({"type": "address", "value": "0x333"}) 59 | == ConsumableCodes.CREDENTIAL_IN_DENY_LIST 60 | ) 61 | 62 | credential = {"type": "address", "value": ""} 63 | with pytest.raises(MalformedCredential): 64 | address_credential.validate_access(credential) 65 | 66 | 67 | @pytest.mark.unit 68 | def test_asset_credentials_addresses_no_access_list(): 69 | """Tests asset credentials when neither deny, nor allow lists exist on the asset.""" 70 | sample_asset_path = get_resource_path("ddo", "ddo_sa_sample_with_credentials.json") 71 | assert sample_asset_path.exists(), "{} does not exist!".format(sample_asset_path) 72 | 73 | ddo = deepcopy(json_dict) 74 | asset = Asset(ddo) 75 | 76 | # if "allow" OR "deny" exist, we need a credential, 77 | # so remove both to test the behaviour of no credential supplied 78 | address_credential = AddressCredential(asset) 79 | asset.credentials.pop("allow") 80 | asset.credentials.pop("deny") 81 | 82 | assert address_credential.validate_access() == ConsumableCodes.OK 83 | 84 | # test that we can use another credential if address is not required 85 | assert ( 86 | asset.is_consumable( 87 | {"type": "somethingelse", "value": "test"}, with_connectivity_check=False 88 | ) 89 | == ConsumableCodes.OK 90 | ) 91 | -------------------------------------------------------------------------------- /ocean_provider/utils/test/test_currency.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2023 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | from decimal import Decimal, localcontext 6 | 7 | import pytest 8 | from ocean_provider.utils.currency import ( 9 | ETHEREUM_DECIMAL_CONTEXT, 10 | MAX_ETHER, 11 | MAX_WEI, 12 | MIN_ETHER, 13 | MIN_WEI, 14 | parse_units, 15 | to_wei, 16 | ) 17 | 18 | USDT_DECIMALS = 6 19 | MIN_USDT = Decimal("0.000001") 20 | MAX_USDT = Decimal(MAX_WEI).scaleb(-USDT_DECIMALS, context=ETHEREUM_DECIMAL_CONTEXT) 21 | 22 | SEVEN_DECIMALS = 7 23 | MIN_SEVEN = Decimal("0.0000001") 24 | MAX_SEVEN = Decimal(MAX_WEI).scaleb(-SEVEN_DECIMALS, context=ETHEREUM_DECIMAL_CONTEXT) 25 | 26 | 27 | @pytest.mark.unit 28 | def test_to_wei(): 29 | """Test the to_wei function""" 30 | assert to_wei(Decimal("0")) == 0, "Zero ether (Decimal) should equal zero wei" 31 | assert to_wei("0") == 0, "Zero ether (string) should equal zero wei" 32 | assert to_wei(0) == 0, "Zero ether (int) should equal zero wei" 33 | assert ( 34 | to_wei(Decimal("0.123456789123456789")) == 123456789_123456789 35 | ), "Conversion from ether (Decimal) to wei failed." 36 | assert ( 37 | to_wei("0.123456789123456789") == 123456789_123456789 38 | ), "Conversion from ether (string) to wei failed." 39 | assert ( 40 | to_wei(1) == 1_000000000_000000000 41 | ), "Conversion from ether (int) to wei failed." 42 | 43 | assert ( 44 | to_wei("0.1234567891234567893") == 123456789_123456789 45 | ), "Conversion from ether to wei failed, supposed to round towards 0 (aka. truncate)." 46 | assert ( 47 | to_wei("0.1234567891234567897") == 123456789_123456789 48 | ), "Conversion from ether to wei failed, supposed to round towards 0 (aka. truncate)." 49 | 50 | assert ( 51 | to_wei(MIN_ETHER) == MIN_WEI 52 | ), "Conversion from minimum ether to minimum wei failed." 53 | 54 | assert ( 55 | to_wei(MAX_ETHER) == MAX_WEI 56 | ), "Conversion from maximum ether to maximum wei failed." 57 | 58 | # Use ETHEREUM_DECIMAL_CONTEXT when performing arithmetic on MAX_ETHER 59 | with localcontext(ETHEREUM_DECIMAL_CONTEXT): 60 | with pytest.raises(ValueError): 61 | to_wei(MAX_ETHER + 1) 62 | 63 | 64 | @pytest.mark.unit 65 | def test_parse_units(): 66 | """Test the parse_units function""" 67 | assert parse_units("0", USDT_DECIMALS) == 0 68 | assert parse_units("0.123456789123456789", USDT_DECIMALS) == 123456 69 | assert parse_units("1.123456789123456789", USDT_DECIMALS) == 1_123456 70 | assert parse_units("5278.02", USDT_DECIMALS) == 5278_020000 71 | assert parse_units(MIN_USDT, USDT_DECIMALS) == MIN_WEI 72 | assert parse_units(MAX_USDT, USDT_DECIMALS) == MAX_WEI 73 | 74 | # Use ETHEREUM_DECIMAL_CONTEXT when performing arithmetic on MAX_USDT 75 | with localcontext(ETHEREUM_DECIMAL_CONTEXT): 76 | with pytest.raises(ValueError): 77 | parse_units(MAX_USDT + 1, USDT_DECIMALS) 78 | 79 | assert parse_units("0", "mwei") == 0 80 | assert parse_units("0.123456789123456789", "mwei") == 123456 81 | assert parse_units("1.123456789123456789", "mwei") == 1_123456 82 | assert parse_units("5278.02", "mwei") == 5278_020000 83 | assert parse_units(MIN_USDT, "mwei") == MIN_WEI 84 | assert parse_units(MAX_USDT, "mwei") == MAX_WEI 85 | 86 | # Use ETHEREUM_DECIMAL_CONTEXT when performing arithmetic on MAX_USDT 87 | with localcontext(ETHEREUM_DECIMAL_CONTEXT): 88 | with pytest.raises(ValueError): 89 | parse_units(MAX_USDT + 1, "mwei") 90 | 91 | assert parse_units("0", SEVEN_DECIMALS) == 0 92 | assert parse_units("0.123456789", SEVEN_DECIMALS) == 1234567 93 | assert parse_units("1.123456789", SEVEN_DECIMALS) == 1_1234567 94 | assert parse_units("5278.02", SEVEN_DECIMALS) == 5278_0200000 95 | assert parse_units(MIN_SEVEN, SEVEN_DECIMALS) == MIN_WEI 96 | assert parse_units(MAX_SEVEN, SEVEN_DECIMALS) == MAX_WEI 97 | -------------------------------------------------------------------------------- /tests/ddo/ddo_with_compute_service.json: -------------------------------------------------------------------------------- 1 | { 2 | "@context": "https://w3id.org/future-method/v1", 3 | "authentication": [], 4 | "created": "2019-04-09T19:02:11Z", 5 | "id": "did:op:8d1b4d73e7af4634958f071ab8dfe7ab0df14019755e444090fd392c8ec9c3f4", 6 | "proof": { 7 | "created": "2019-04-09T19:02:11Z", 8 | "creator": "0x00Bd138aBD70e2F00903268F3Db08f2D25677C9e", 9 | "signatureValue": "1cd57300733bcbcda0beb59b3e076de6419c0d7674e7befb77820b53c79e3aa8f1776effc64cf088bad8cb694cc4d71ebd74a13b2f75893df5a53f3f318f6cf828", 10 | "type": "DDOIntegritySignature" 11 | }, 12 | "publicKey": [ 13 | { 14 | "id": "did:op:8d1b4d73e7af4634958f071ab8dfe7ab0df14019755e444090fd392c8ec9c3f4", 15 | "owner": "0x00Bd138aBD70e2F00903268F3Db08f2D25677C9e", 16 | "type": "EthereumECDSAKey" 17 | } 18 | ], 19 | "service": [ 20 | { 21 | "type": "metadata", 22 | "index": 0, 23 | "serviceEndpoint": "http://myaquarius.org/api/provider/assets/metadata/{did}", 24 | "attributes": { 25 | "main": { 26 | "author": "Met Office", 27 | "dateCreated": "2019-02-08T08:13:49Z", 28 | "files": [ 29 | { 30 | "url": "https://raw.githubusercontent.com/tbertinmahieux/MSongsDB/master/Tasks_Demos/CoverSongs/shs_dataset_test.txt", 31 | "index": 0, 32 | "checksum": "efb2c764274b745f5fc37f97c6b0e764", 33 | "contentLength": "4535431", 34 | "contentType": "text/csv", 35 | "encoding": "UTF-8", 36 | "compression": "zip" 37 | } 38 | ], 39 | "license": "CC-BY", 40 | "name": "UK Weather information 2011", 41 | "cost": "1", 42 | "type": "dataset" 43 | }, 44 | "additionalInformation": {} 45 | } 46 | }, 47 | { 48 | "type": "compute", 49 | "index": 2, 50 | "serviceEndpoint": "http://ocean-provider.org/api/services/compute", 51 | "templateId": "", 52 | "attributes": { 53 | "main": { 54 | "name": "dataAssetComputingServiceAgreement", 55 | "creator": "0x00Bd138aBD70e2F00903268F3Db08f2D25677C9e", 56 | "datePublished": "2019-04-09T19:02:11Z", 57 | "cost": "10", 58 | "timeout": 86400, 59 | "provider": { 60 | "type": "Azure", 61 | "description": "", 62 | "environment": { 63 | "cluster": { 64 | "type": "Kubernetes", 65 | "url": "http://10.0.0.17/xxx" 66 | }, 67 | "supportedContainers": [ 68 | { 69 | "image": "tensorflow/tensorflow", 70 | "tag": "latest", 71 | "checksum": "sha256:cb57ecfa6ebbefd8ffc7f75c0f00e57a7fa739578a429b6f72a0df19315deadc" 72 | }, 73 | { 74 | "image": "tensorflow/tensorflow", 75 | "tag": "latest", 76 | "checksum": "sha256:cb57ecfa6ebbefd8ffc7f75c0f00e57a7fa739578a429b6f72a0df19315deadc" 77 | } 78 | ], 79 | "supportedServers": [ 80 | { 81 | "serverId": "1", 82 | "serverType": "xlsize", 83 | "cost": "50", 84 | "cpu": "16", 85 | "gpu": "0", 86 | "memory": "128gb", 87 | "disk": "160gb", 88 | "maxExecutionTime": 86400 89 | }, 90 | { 91 | "serverId": "2", 92 | "serverType": "medium", 93 | "cost": "10", 94 | "cpu": "2", 95 | "gpu": "0", 96 | "memory": "8gb", 97 | "disk": "80gb", 98 | "maxExecutionTime": 86400 99 | } 100 | ] 101 | } 102 | } 103 | }, 104 | "additionalInformation": {} 105 | } 106 | } 107 | ] 108 | } -------------------------------------------------------------------------------- /ocean_provider/utils/accounts.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2023 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | import logging 6 | from _decimal import Decimal 7 | 8 | from eth_keys import KeyAPI 9 | from eth_keys.backends import NativeECCBackend 10 | from ocean_provider.exceptions import InvalidSignatureError 11 | from ocean_provider.user_nonce import get_nonce 12 | from web3 import Web3 13 | 14 | logger = logging.getLogger(__name__) 15 | keys = KeyAPI(NativeECCBackend) 16 | 17 | 18 | def verify_nonce(signer_address, nonce): 19 | db_nonce = get_nonce(signer_address) 20 | if db_nonce and Decimal(nonce) < Decimal(db_nonce): 21 | msg = ( 22 | f"Invalid signature expected nonce ({db_nonce}) > current nonce ({nonce})." 23 | ) 24 | logger.error(msg) 25 | raise InvalidSignatureError(msg) 26 | 27 | return True 28 | 29 | 30 | def verify_signature(signer_address, signature, original_msg, nonce): 31 | """ 32 | :return: True if signature is valid, throws InvalidSignatureError otherwise 33 | """ 34 | verify_nonce(signer_address, nonce) 35 | message = f"{original_msg}{str(nonce)}" 36 | 37 | try: 38 | signature_bytes = Web3.toBytes(hexstr=signature) 39 | if signature_bytes[64] == 27: 40 | new_signature = b"".join([signature_bytes[0:64], b"\x00"]) 41 | elif signature_bytes[64] == 28: 42 | new_signature = b"".join([signature_bytes[0:64], b"\x01"]) 43 | else: 44 | new_signature = signature_bytes 45 | 46 | signature = keys.Signature(signature_bytes=new_signature) 47 | 48 | message_hash = Web3.solidityKeccak( 49 | ["bytes"], 50 | [Web3.toBytes(text=message)], 51 | ) 52 | prefix = "\x19Ethereum Signed Message:\n32" 53 | signable_hash = Web3.solidityKeccak( 54 | ["bytes", "bytes"], [Web3.toBytes(text=prefix), Web3.toBytes(message_hash)] 55 | ) 56 | vkey = keys.ecdsa_recover(signable_hash, signature) 57 | except Exception as e: 58 | msg = ( 59 | f"Invalid signature {signature} for " 60 | f"ethereum address {signer_address}, message {original_msg} " 61 | f"and nonce {nonce}. Got {e}" 62 | ) 63 | logger.error(msg) 64 | raise InvalidSignatureError(msg) 65 | 66 | if Web3.toChecksumAddress(signer_address) != Web3.toChecksumAddress( 67 | vkey.to_address() 68 | ): 69 | msg = ( 70 | f"Invalid signature {signature} for " 71 | f"ethereum address {signer_address}, message {original_msg} " 72 | f"and nonce {nonce}. Got {vkey.to_address()}" 73 | ) 74 | logger.error(msg) 75 | raise InvalidSignatureError(msg) 76 | 77 | return True 78 | 79 | 80 | def get_private_key(wallet): 81 | """Returns the private key of the given wallet.""" 82 | pk = wallet.key 83 | if not isinstance(pk, bytes): 84 | pk = Web3.toBytes(hexstr=pk) 85 | return keys.PrivateKey(pk) 86 | 87 | 88 | def sign_message(message, wallet): 89 | """ 90 | Signs the message with the private key of the given Wallet 91 | 92 | :param message: str 93 | :param wallet: Wallet instance 94 | :return: signature 95 | """ 96 | keys_pk = keys.PrivateKey(wallet.key) 97 | hexable = Web3.toBytes(text=message) if isinstance(message, str) else message 98 | 99 | message_hash = Web3.solidityKeccak( 100 | ["bytes"], 101 | [Web3.toHex(hexable)], 102 | ) 103 | prefix = "\x19Ethereum Signed Message:\n32" 104 | signable_hash = Web3.solidityKeccak( 105 | ["bytes", "bytes"], [Web3.toBytes(text=prefix), Web3.toBytes(message_hash)] 106 | ) 107 | signed = keys.ecdsa_sign(message_hash=signable_hash, private_key=keys_pk) 108 | 109 | v = str(Web3.toHex(Web3.toBytes(signed.v))) 110 | r = str(Web3.toHex(Web3.toBytes(signed.r).rjust(32, b"\0"))) 111 | s = str(Web3.toHex(Web3.toBytes(signed.s).rjust(32, b"\0"))) 112 | 113 | signature = "0x" + r[2:] + s[2:] + v[2:] 114 | 115 | return signature 116 | -------------------------------------------------------------------------------- /ocean_provider/run.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2023 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | import logging 6 | 7 | from flask import jsonify, request 8 | from flask_swagger import swagger 9 | from flask_swagger_ui import get_swaggerui_blueprint 10 | from ocean_provider.constants import BaseURLs, Metadata 11 | from ocean_provider.myapp import app 12 | from ocean_provider.routes import services 13 | from ocean_provider.utils.basics import get_configured_chains, get_provider_addresses 14 | from ocean_provider.utils.error_responses import strip_and_replace_urls 15 | from ocean_provider.utils.util import get_request_data 16 | from ocean_provider.version import get_version 17 | 18 | logger = logging.getLogger(__name__) 19 | 20 | 21 | @app.before_request 22 | def log_incoming_request(): 23 | logger.info( 24 | f"incoming request = {request.scheme}, {request.method}, {request.remote_addr}, {request.full_path}" 25 | ) 26 | 27 | 28 | @app.after_request 29 | def add_header(response): 30 | response.headers["Connection"] = "close" 31 | return response 32 | 33 | 34 | @app.errorhandler(Exception) 35 | def handle_error(error): 36 | code = getattr(error, "code", 503) 37 | 38 | error = strip_and_replace_urls(str(error)) 39 | 40 | response = jsonify(error=str(error), context=get_request_data(request)) 41 | response.status_code = code 42 | response.headers["Connection"] = "close" 43 | 44 | if code != 404: 45 | logger.error(f"error: {error}, payload: {request.data}", exc_info=1) 46 | else: 47 | logger.info(f"error: {str(error)}, payload: {request.data}") 48 | 49 | return response 50 | 51 | 52 | def get_services_endpoints(): 53 | services_endpoints = dict( 54 | map( 55 | lambda url: (url.endpoint.replace("services.", ""), url), 56 | filter( 57 | lambda url: url.endpoint.startswith("services."), 58 | app.url_map.iter_rules(), 59 | ), 60 | ) 61 | ) 62 | for key, value in services_endpoints.items(): 63 | services_endpoints[key] = ( 64 | list( 65 | map( 66 | str, 67 | filter( 68 | lambda method: str(method) not in ["OPTIONS", "HEAD"], 69 | value.methods, 70 | ), 71 | ) 72 | )[0], 73 | str(value), 74 | ) 75 | return services_endpoints 76 | 77 | 78 | @app.route("/") 79 | def version(): 80 | """ 81 | Contains the provider data for an user: 82 | - software; 83 | - version; 84 | - network url; 85 | - provider address; 86 | - service endpoints, which has all 87 | the existing endpoints from routes.py. 88 | """ 89 | logger.info("root endpoint called") 90 | info = dict() 91 | info["software"] = Metadata.TITLE 92 | info["version"] = get_version() 93 | info["providerAddresses"] = get_provider_addresses() 94 | info["chainIds"] = get_configured_chains() 95 | info["serviceEndpoints"] = get_services_endpoints() 96 | response = jsonify(info) 97 | logger.info(f"root endpoint response = {response}") 98 | return response 99 | 100 | 101 | @app.route("/spec") 102 | def spec(): 103 | logger.info("spec endpoint called") 104 | swag = swagger(app) 105 | swag["info"]["version"] = get_version() 106 | swag["info"]["title"] = Metadata.TITLE 107 | swag["info"]["description"] = Metadata.DESCRIPTION 108 | response = jsonify(swag) 109 | logger.debug(f"spec endpoint response = {response}") 110 | return response 111 | 112 | 113 | # Call factory function to create our blueprint 114 | swaggerui_blueprint = get_swaggerui_blueprint( 115 | BaseURLs.SWAGGER_URL, 116 | "/spec", 117 | config={"app_name": "Test application"}, # Swagger UI config overrides 118 | ) 119 | 120 | # Register blueprint at URL 121 | app.register_blueprint(swaggerui_blueprint, url_prefix=BaseURLs.SWAGGER_URL) 122 | app.register_blueprint(services, url_prefix=BaseURLs.SERVICES_URL) 123 | 124 | if __name__ == "__main__": 125 | app.run(port=8030) 126 | -------------------------------------------------------------------------------- /ocean_provider/file_types/types/smartcontract.py: -------------------------------------------------------------------------------- 1 | import hashlib 2 | import json 3 | import logging 4 | from typing import Any, Optional, Tuple 5 | from uuid import uuid4 6 | 7 | from enforce_typing import enforce_types 8 | from flask import Response 9 | from ocean_provider.file_types.definitions import FilesType 10 | from ocean_provider.utils.basics import get_web3 11 | 12 | logger = logging.getLogger(__name__) 13 | 14 | 15 | class SmartContractCall(FilesType): 16 | @enforce_types 17 | def __init__( 18 | self, 19 | address: Optional[str] = None, 20 | chain_id: Optional[int] = None, 21 | abi: Optional[dict] = None, 22 | userdata=None, 23 | ) -> None: 24 | self.address = address 25 | self.chain_id = chain_id 26 | self.type = "smartcontract" 27 | self.abi = abi 28 | self.userdata = None 29 | if userdata: 30 | self.userdata = ( 31 | userdata if isinstance(userdata, dict) else json.loads(userdata) 32 | ) 33 | 34 | def get_download_url(self): 35 | return "" 36 | 37 | @enforce_types 38 | def validate_dict(self) -> Tuple[bool, Any]: 39 | if not self.address: 40 | return False, "malformed smartcontract type, missing contract address" 41 | # validate abi 42 | 43 | inputs = self.abi.get("inputs") 44 | t_type = self.abi.get("type") 45 | if inputs is None or t_type != "function": 46 | return False, "invalid abi" 47 | 48 | mutability = self.abi.get("stateMutability", None) 49 | if mutability not in ["view", "pure"]: 50 | return False, "only view or pure functions are allowed" 51 | 52 | if not self.abi.get("name"): 53 | return False, "missing name" 54 | 55 | # check that all inputs have a match in userdata 56 | if len(inputs) > 0 and self.userdata is None: 57 | return False, "Missing parameters" 58 | 59 | missing_inputs = [] 60 | for input_item in inputs: 61 | value = self.userdata.get(input_item.get("name")) 62 | if not value: 63 | missing_inputs.append(input_item.name) 64 | 65 | if missing_inputs: 66 | return False, "Missing userparams: " + ",".join(missing_inputs) 67 | 68 | return True, self 69 | 70 | @enforce_types 71 | def get_filename(self) -> str: 72 | return uuid4().hex 73 | 74 | def fetch_smartcontract_call(self): 75 | web3 = get_web3(self.chain_id) 76 | contract = web3.eth.contract( 77 | address=web3.toChecksumAddress(self.address), abi=[self.abi] 78 | ) 79 | function = contract.functions[self.abi.get("name")] 80 | args = dict() 81 | 82 | for input_item in self.abi.get("inputs"): 83 | name = input_item.get("name") 84 | args[name] = self.userdata.get(name) 85 | if input_item.get("type") == "address": 86 | args[name] = web3.toChecksumAddress(args[name]) 87 | 88 | result = function(**args).call() 89 | 90 | if isinstance(result, object): 91 | return json.dumps(result), "application/json" 92 | 93 | return result, "application/text" 94 | 95 | def check_details(self, with_checksum=False): 96 | try: 97 | result, t_type = self.fetch_smartcontract_call() 98 | details = {"contentLength": len(result) or "", "contentType": t_type} 99 | 100 | if with_checksum: 101 | sha = hashlib.sha256() 102 | sha.update(result.encode("utf-8")) 103 | details["checksumType"] = "sha256" 104 | details["checksum"] = sha.hexdigest() 105 | 106 | return True, details 107 | except Exception: 108 | return False, {} 109 | 110 | def build_download_response( 111 | self, 112 | request, 113 | validate_url=True, 114 | ): 115 | try: 116 | result, t_type = self.fetch_smartcontract_call() 117 | return Response( 118 | result, 119 | 200, 120 | ) 121 | except Exception: 122 | raise ValueError("Failed to call contract") 123 | -------------------------------------------------------------------------------- /tests/ddo/ddo_sample_algorithm.json: -------------------------------------------------------------------------------- 1 | { 2 | "@context": "https://w3id.org/did/v1", 3 | "authentication": [ 4 | { 5 | "type": "RsaSignatureAuthentication2018", 6 | "publicKey": "did:op:0ebed8226ada17fde24b6bf2b95d27f8f05fcce09139ff5cec31f6d81a7cd2ea" 7 | } 8 | ], 9 | "created": "2019-02-08T08:13:49Z", 10 | "id": "did:op:0bc278fee025464f8012b811d1bce8e22094d0984e4e49139df5d5ff7a028bdf", 11 | "proof": { 12 | "created": "2019-02-08T08:13:41Z", 13 | "creator": "0x37BB53e3d293494DE59fBe1FF78500423dcFd43B", 14 | "signatureValue": "did:op:0bc278fee025464f8012b811d1bce8e22094d0984e4e49139df5d5ff7a028bdf", 15 | "type": "DDOIntegritySignature", 16 | "checksum": { 17 | "0": "0x52b5c93b82dd9e7ecc3d9fdf4755f7f69a54484941897dc517b4adfe3bbc3377", 18 | "1": "0x999999952b5c93b82dd9e7ecc3d9fdf4755f7f69a54484941897dc517b4adfe3" 19 | } 20 | }, 21 | "publicKey": [ 22 | { 23 | "id": "did:op:b6e2eb5eff1a093ced9826315d5a4ef6c5b5c8bd3c49890ee284231d7e1d0aaa#keys-1", 24 | "type": "RsaVerificationKey2018", 25 | "owner": "did:op:6027c1e7cbae06a91fce0557ee53195284825f56a7100be0c53cbf4391aa26cc", 26 | "publicKeyPem": "-----BEGIN PUBLIC KEY...END PUBLIC KEY-----\r\n" 27 | }, 28 | { 29 | "id": "did:op:b6e2eb5eff1a093ced9826315d5a4ef6c5b5c8bd3c49890ee284231d7e1d0aaa#keys-2", 30 | "type": "Ed25519VerificationKey2018", 31 | "owner": "did:op:4c27a254e607cdf91a1206480e7eb8c74856102316c1a462277d4f21c02373b6", 32 | "publicKeyBase58": "H3C2AVvLMv6gmMNam3uVAjZpfkcJCwDwnZn6z3wXmqPV" 33 | }, 34 | { 35 | "id": "did:op:b6e2eb5eff1a093ced9826315d5a4ef6c5b5c8bd3c49890ee284231d7e1d0aaa#keys-3", 36 | "type": "RsaPublicKeyExchangeKey2018", 37 | "owner": "did:op:5f6b885202ffb9643874be529302eb00d55e226959f1fbacaeda592c5b5c9484", 38 | "publicKeyPem": "-----BEGIN PUBLIC KEY...END PUBLIC KEY-----\r\n" 39 | } 40 | ], 41 | "verifiableCredential": [ 42 | { 43 | "@context": [ 44 | "https://www.w3.org/2018/credentials/v1", 45 | "https://www.w3.org/2018/credentials/examples/v1" 46 | ], 47 | "id": "1872", 48 | "type": [ 49 | "read", 50 | "update", 51 | "deactivate" 52 | ], 53 | "issuer": "0x610D9314EDF2ced7681BA1633C33fdb8cF365a12", 54 | "issuanceDate": "2019-01-01T19:73:24Z", 55 | "credentialSubject": { 56 | "id": "0x89328493849328493284932" 57 | }, 58 | "proof": { 59 | "type": "RsaSignature2018", 60 | "created": "2019-01-01T19:73:24Z", 61 | "proofPurpose": "assertionMethod", 62 | "signatureValue": "ABCJSDAO23...1tzjn4w==" 63 | } 64 | } 65 | ], 66 | "service": [ 67 | { 68 | "index": 0, 69 | "serviceEndpoint": "http://localhost:5000/api/aquarius/assets/ddo/{did}", 70 | "type": "metadata", 71 | "attributes": { 72 | "main": { 73 | "author": "John Doe", 74 | "dateCreated": "2019-02-08T08:13:49Z", 75 | "license": "CC-BY", 76 | "name": "My super algorithm", 77 | "cost": "1", 78 | "type": "algorithm", 79 | "algorithm": { 80 | "language": "scala", 81 | "format": "docker-image", 82 | "version": "0.1", 83 | "container": { 84 | "entrypoint": "node $ALGO", 85 | "image": "node", 86 | "tag": "10" 87 | } 88 | }, 89 | "files": [ 90 | { 91 | "name": "build_model", 92 | "url": "https://raw.githubusercontent.com/oceanprotocol/test-algorithm/master/javascript/algo.js", 93 | "index": 0, 94 | "checksum": "efb2c764274b745f5fc37f97c6b0e761", 95 | "contentLength": "4535431", 96 | "contentType": "text/plain", 97 | "encoding": "UTF-8", 98 | "compression": "zip" 99 | } 100 | ] 101 | }, 102 | "additionalInformation": { 103 | "description": "Workflow to aggregate weather information", 104 | "tags": [ 105 | "weather", 106 | "uk", 107 | "2011", 108 | "workflow", 109 | "aggregation" 110 | ], 111 | "copyrightHolder": "John Doe" 112 | } 113 | } 114 | } 115 | ] 116 | } 117 | -------------------------------------------------------------------------------- /ocean_provider/routes/auth.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2023 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | import logging 6 | 7 | import jwt 8 | from flask import jsonify, request 9 | from flask_sieve import validate 10 | from ocean_provider.user_nonce import ( 11 | force_expire_token, 12 | force_restore_token, 13 | is_token_valid, 14 | ) 15 | from ocean_provider.utils.basics import get_provider_private_key 16 | from ocean_provider.utils.util import get_request_data 17 | from ocean_provider.validation.provider_requests import ( 18 | CreateTokenRequest, 19 | DeleteTokenRequest, 20 | ) 21 | 22 | from . import services 23 | 24 | logger = logging.getLogger(__name__) 25 | 26 | 27 | @services.route("/createAuthToken", methods=["GET"]) 28 | @validate(CreateTokenRequest) 29 | def create_auth_token(): 30 | """Creates an AuthToken for the given address, that can replace signature in API calls. 31 | 32 | Accepts a user address and an expiration parameter (future UTC timestamp). 33 | If the token was previously deleted with the same parameters and they are still valid 34 | (expiration date is in the future), the same token is re-enabled. 35 | --- 36 | tags: 37 | - auth 38 | consumes: 39 | - application/json 40 | parameters: 41 | - name: address 42 | description: The address of the API caller 43 | required: true 44 | type: string 45 | - name: expiration 46 | description: A valid future UTC timestamp 47 | required: true 48 | type: string 49 | - name: signature 50 | in: query 51 | description: Signature to verify that the address requestor has rights to create the token. 52 | responses: 53 | 200: 54 | description: the token was successfully created or restored 55 | 400: 56 | description: issue with the request parameters 57 | 503: 58 | description: Service Unavailable. 59 | 60 | return: created or restored token if successfull, otherwise an error string 61 | """ 62 | data = get_request_data(request) 63 | address = data.get("address") 64 | expiration = int(data.get("expiration")) 65 | 66 | pk = get_provider_private_key(use_universal_key=True) 67 | token = jwt.encode({"exp": expiration, "address": address}, pk, algorithm="HS256") 68 | token = token.decode("utf-8") if isinstance(token, bytes) else token 69 | 70 | valid, message = is_token_valid(token, address) 71 | if not valid: 72 | if message == "Token is deleted.": 73 | force_restore_token(token) 74 | else: 75 | return jsonify(error=message), 400 76 | 77 | return jsonify(token=token) 78 | 79 | 80 | @services.route("/deleteAuthToken", methods=["DELETE"]) 81 | @validate(DeleteTokenRequest) 82 | def delete_auth_token(): 83 | """Revokes a given AuthToken if it is still valid. 84 | 85 | Accepts the token and signed request parameters to determine whether the user has 86 | rights to delete/revoke. If the token is already expired or deleted, returns an 87 | error string. If the token is still valid at the time of the request, it is blacklisted, 88 | disallowing API calls with that token. 89 | --- 90 | tags: 91 | - auth 92 | consumes: 93 | - application/json 94 | parameters: 95 | - name: address 96 | description: The address of the API caller 97 | required: true 98 | type: string 99 | - name: token 100 | description: The token string 101 | required: true 102 | type: string 103 | - name: signature 104 | in: query 105 | description: Signature to verify that the address requestor has rights to delete the token. 106 | responses: 107 | 200: 108 | description: the token was successfully deleted 109 | 400: 110 | description: issue with the request parameters 111 | 503: 112 | description: Service Unavailable. 113 | 114 | return: success or error message 115 | """ 116 | data = get_request_data(request) 117 | address = data.get("address") 118 | token = data.get("token") 119 | 120 | valid, message = is_token_valid(token, address) 121 | if not valid: 122 | return jsonify(error=message), 400 123 | 124 | force_expire_token(token) 125 | 126 | return jsonify(success="Token has been deactivated.") 127 | -------------------------------------------------------------------------------- /ocean_provider/utils/asset.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2023 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | import copy 6 | import logging 7 | from typing import Optional 8 | 9 | import requests 10 | from ocean_provider.utils.basics import get_web3 11 | from ocean_provider.utils.consumable import ConsumableCodes 12 | from ocean_provider.utils.credentials import AddressCredential 13 | from ocean_provider.utils.data_nft import get_data_nft_contract 14 | from ocean_provider.utils.services import Service 15 | 16 | logger = logging.getLogger(__name__) 17 | 18 | 19 | class Asset: 20 | def __init__(self, asset_dict: dict) -> None: 21 | ad = copy.deepcopy(asset_dict) 22 | self.did = ad.pop("id", None) 23 | self.version = ad.pop("version", None) 24 | self.nftAddress = ad.pop("nftAddress", None) 25 | self.chain_id = ad.pop("chainId", None) 26 | self.metadata = ad.pop("metadata", None) 27 | self.services = [ 28 | Service.from_json(index, service_dict) 29 | for index, service_dict in enumerate(ad.pop("services", [])) 30 | ] 31 | self.credentials = ad.pop("credentials", None) 32 | self.nft = ad.pop("nft", None) 33 | self.datatokens = ad.pop("datatokens", None) 34 | self.event = ad.pop("event", None) 35 | self.stats = ad.pop("stats", None) 36 | 37 | def get_service_by_index(self, index: int) -> Service: 38 | """Return the first Service with the given index""" 39 | return next((service for service in self.services if service.index == index)) 40 | 41 | def get_service_by_id(self, service_id: str) -> Service: 42 | """Return the Service with the matching id""" 43 | try: 44 | return next( 45 | (service for service in self.services if service.id == service_id) 46 | ) 47 | except StopIteration: 48 | return None 49 | 50 | @property 51 | def requires_address_credential(self) -> bool: 52 | """Checks if an address credential is required on this asset.""" 53 | manager = AddressCredential(self) 54 | return manager.requires_credential() 55 | 56 | @property 57 | def allowed_addresses(self) -> list: 58 | """Lists addresses that are explicitly allowed in credentials.""" 59 | manager = AddressCredential(self) 60 | return manager.get_addresses_of_class("allow") 61 | 62 | @property 63 | def denied_addresses(self) -> list: 64 | """Lists addresesses that are explicitly denied in credentials.""" 65 | manager = AddressCredential(self) 66 | return manager.get_addresses_of_class("deny") 67 | 68 | @property 69 | def is_disabled(self) -> bool: 70 | return not self.metadata or (self.nft and self.nft["state"] not in [0, 5]) 71 | 72 | def is_consumable( 73 | self, 74 | credential: Optional[dict] = None, 75 | with_connectivity_check: bool = True, 76 | provider_uri: Optional[str] = None, 77 | ) -> ConsumableCodes: 78 | """Checks whether an asset is consumable and returns a ConsumableCode.""" 79 | if self.is_disabled: 80 | return ConsumableCodes.ASSET_DISABLED 81 | 82 | manager = AddressCredential(self) 83 | 84 | if manager.requires_credential(): 85 | return manager.validate_access(credential) 86 | 87 | return ConsumableCodes.OK 88 | 89 | 90 | def get_asset_from_metadatastore(metadata_url, document_id) -> Optional[Asset]: 91 | """ 92 | :return: `Asset` instance or None 93 | """ 94 | url = f"{metadata_url}/api/aquarius/assets/ddo/{document_id}" 95 | response = requests.get(url) 96 | 97 | return Asset(response.json()) if response.status_code == 200 else None 98 | 99 | 100 | def check_asset_consumable(asset, consumer_address, logger, custom_url=None): 101 | if not asset.nft or "address" not in asset.nft or not asset.chain_id: 102 | return False, "Asset malformed or disabled." 103 | 104 | web3 = get_web3(asset.chain_id) 105 | nft_contract = get_data_nft_contract(web3, asset.nft["address"]) 106 | 107 | if nft_contract.functions.getMetaData().call()[2] not in [0, 5]: 108 | return False, "Asset is not consumable." 109 | 110 | code = asset.is_consumable({"type": "address", "value": consumer_address}) 111 | 112 | if code == ConsumableCodes.OK: # is consumable 113 | return True, "" 114 | 115 | message = f"Error: Access to asset {asset.did} was denied with code: {code}." 116 | logger.error(message, exc_info=1) 117 | 118 | return False, message 119 | -------------------------------------------------------------------------------- /ocean_provider/user_nonce.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2023 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | import logging 6 | import os 7 | import sqlite3 8 | 9 | import jwt 10 | from flask_caching import Cache 11 | from ocean_provider import models 12 | from ocean_provider.myapp import app 13 | from ocean_provider.utils.basics import get_provider_private_key 14 | from web3.main import Web3 15 | 16 | logger = logging.getLogger(__name__) 17 | db = app.session 18 | 19 | cache = Cache( 20 | app, 21 | config={ 22 | "CACHE_TYPE": "redis", 23 | "CACHE_KEY_PREFIX": "ocean_provider", 24 | "CACHE_REDIS_URL": os.getenv("REDIS_CONNECTION"), 25 | }, 26 | ) 27 | 28 | 29 | def get_nonce(address): 30 | """ 31 | :return: `nonce` for the given address stored in the database 32 | """ 33 | if os.getenv("REDIS_CONNECTION"): 34 | result = cache.get(address) 35 | return result if result else None 36 | 37 | result = models.UserNonce.query.filter_by(address=address).first() 38 | 39 | return result.nonce if result else None 40 | 41 | 42 | def update_nonce(address, nonce_value): 43 | """ 44 | Updates the value of `nonce` in the database 45 | :param: address 46 | :param: nonce_value 47 | """ 48 | if nonce_value is None: 49 | logger.debug(f"Nonce value is not provided.") 50 | return 51 | 52 | logger.debug(f"Received nonce value: {nonce_value}") 53 | 54 | if os.getenv("REDIS_CONNECTION"): 55 | cache.set(address, nonce_value) 56 | 57 | return 58 | 59 | nonce_object = models.UserNonce.query.filter_by(address=address).first() 60 | if nonce_object is None: 61 | nonce_object = models.UserNonce(address=address, nonce=nonce_value) 62 | else: 63 | if nonce_object.nonce == nonce_value: 64 | msg = f"Cannot create duplicates in the database.\n Existing nonce: {nonce_object.nonce} vs. new nonce: {nonce_value}" 65 | logger.debug(msg) 66 | raise sqlite3.IntegrityError(msg) 67 | 68 | nonce_object.nonce = nonce_value 69 | 70 | logger.debug(f"Wallet address: {address}, new nonce {nonce_object.nonce}") 71 | 72 | try: 73 | db.add(nonce_object) 74 | db.commit() 75 | except Exception: 76 | db.rollback() 77 | logger.exception("Database update failed.") 78 | raise 79 | 80 | 81 | def force_expire_token(token): 82 | """ 83 | Creates the token in the database of Revoked Tokens. 84 | :param: token 85 | """ 86 | if os.getenv("REDIS_CONNECTION"): 87 | cache.set("token//" + token, True) 88 | 89 | return 90 | 91 | existing_token = models.RevokedToken.query.filter_by(token=token).first() 92 | if existing_token: 93 | return 94 | 95 | existing_token = models.RevokedToken(token=token) 96 | try: 97 | db.add(existing_token) 98 | db.commit() 99 | except Exception: 100 | db.rollback() 101 | logger.exception("Database update failed.") 102 | raise 103 | 104 | 105 | def force_restore_token(token): 106 | """ 107 | Removes the token from the database of Revoked Tokens. 108 | :param: token 109 | """ 110 | if os.getenv("REDIS_CONNECTION"): 111 | cache.delete("token//" + token) 112 | 113 | return 114 | 115 | existing_token = models.RevokedToken.query.filter_by(token=token).first() 116 | if not existing_token: 117 | return 118 | 119 | try: 120 | db.delete(existing_token) 121 | db.commit() 122 | except Exception: 123 | db.rollback() 124 | logger.exception("Database update failed.") 125 | raise 126 | 127 | 128 | def is_token_valid(token, address): 129 | """ 130 | Decodes the token, checks expiration, ownership and presence in the blacklist. 131 | 132 | Returns a tuple of boolean, message representing validity and issue (only if invalid). 133 | :param: token 134 | """ 135 | try: 136 | pk = get_provider_private_key(use_universal_key=True) 137 | decoded = jwt.decode(token, pk, algorithms=["HS256"]) 138 | if Web3.toChecksumAddress(decoded["address"]) != Web3.toChecksumAddress( 139 | address 140 | ): 141 | return False, "Token is invalid." 142 | except jwt.ExpiredSignatureError: 143 | return False, "Token is expired." 144 | except Exception: 145 | return False, "Token is invalid." 146 | 147 | if os.getenv("REDIS_CONNECTION"): 148 | valid = not cache.get("token//" + token) 149 | else: 150 | valid = not models.RevokedToken.query.filter_by(token=token).first() 151 | 152 | message = "" if valid else "Token is deleted." 153 | 154 | return valid, message 155 | -------------------------------------------------------------------------------- /ocean_provider/utils/url.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2023 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | import ipaddress 6 | import logging 7 | from urllib.parse import urljoin, urlparse 8 | 9 | import dns.resolver 10 | import requests 11 | from ocean_provider.utils.basics import bool_value_of_env, get_provider_addresses 12 | 13 | logger = logging.getLogger(__name__) 14 | 15 | 16 | def get_redirect(url, redirect_count=0): 17 | if not is_url(url): 18 | return None 19 | 20 | if redirect_count > 5: 21 | logger.info(f"More than 5 redirects for url {url}. Aborting.") 22 | 23 | return None 24 | try: 25 | result = requests.head(url, allow_redirects=False) 26 | except Exception: 27 | return None 28 | if result.status_code == 405: 29 | # HEAD not allowed, so defaulting to get 30 | try: 31 | result = requests.get(url, allow_redirects=False) 32 | except Exception: 33 | return None 34 | 35 | if result.is_redirect: 36 | location = urljoin( 37 | url if url.endswith("/") else f"{url}/", result.headers["Location"] 38 | ) 39 | logger.info(f"Redirecting for url {url} to location {location}.") 40 | 41 | return get_redirect(location, redirect_count + 1) 42 | 43 | return url 44 | 45 | 46 | def is_safe_url(url): 47 | url = get_redirect(url) 48 | 49 | if not url: 50 | return False 51 | 52 | result = urlparse(url) 53 | 54 | return is_safe_domain(result.hostname) 55 | 56 | 57 | def is_url(url): 58 | try: 59 | result = urlparse(url) 60 | return all([result.scheme, result.netloc]) 61 | except: # noqa 62 | return False 63 | 64 | 65 | def is_ip(address): 66 | return address.replace(".", "").isnumeric() 67 | 68 | 69 | def is_this_same_provider(url, chain_id): 70 | result = urlparse(url) 71 | try: 72 | provider_info = requests.get(f"{result.scheme}://{result.netloc}/").json() 73 | chain_address = ( 74 | provider_info["providerAddresses"][str(chain_id)] 75 | if "providerAddresses" in provider_info 76 | else provider_info["providerAddress"] 77 | ) 78 | except (requests.exceptions.RequestException, KeyError): 79 | chain_address = None 80 | 81 | return chain_address == get_provider_addresses()[chain_id] 82 | 83 | 84 | def _get_records(domain, record_type): 85 | DNS_RESOLVER = dns.resolver.Resolver() 86 | try: 87 | return DNS_RESOLVER.resolve(domain, record_type, search=True) 88 | except Exception as e: 89 | logger.info(f"[i] Cannot get {record_type} record for domain {domain}: {e}\n") 90 | 91 | return None 92 | 93 | 94 | def is_safe_domain(domain): 95 | ip_v4_records = _get_records(domain, "A") 96 | ip_v6_records = _get_records(domain, "AAAA") 97 | 98 | result = validate_dns_records(domain, ip_v4_records, "A") and validate_dns_records( 99 | domain, ip_v6_records, "AAAA" 100 | ) 101 | 102 | if not is_ip(domain): 103 | return result 104 | 105 | return result and validate_dns_record(domain, domain, "") 106 | 107 | 108 | def validate_dns_records(domain, records, record_type): 109 | """ 110 | Verify if all DNS records resolve to public IP addresses. 111 | Return True if they do, False if any error has been detected. 112 | """ 113 | if records is None: 114 | return True 115 | 116 | for record in records: 117 | if not validate_dns_record(record, domain, record_type): 118 | return False 119 | 120 | return True 121 | 122 | 123 | def validate_dns_record(record, domain, record_type): 124 | value = record if isinstance(record, str) else record.to_text().strip() 125 | allow_non_public_ip = bool_value_of_env("ALLOW_NON_PUBLIC_IP") 126 | 127 | try: 128 | ip = ipaddress.ip_address(value) 129 | # noqa See https://docs.python.org/3/library/ipaddress.html#ipaddress.IPv4Address.is_global 130 | if ip.is_private or ip.is_reserved or ip.is_loopback: 131 | if allow_non_public_ip: 132 | logger.warning( 133 | f"[!] DNS record type {record_type} for domain name " 134 | f"{domain} resolves to a non public IP address {value}, " 135 | "but allowed by config!" 136 | ) 137 | return True 138 | else: 139 | logger.error( 140 | f"[!] DNS record type {record_type} for domain name " 141 | f"{domain} resolves to a non public IP address {value}. " 142 | ) 143 | 144 | return False 145 | except ValueError: 146 | logger.info("[!] '%s' is not valid IP address!" % value) 147 | return False 148 | 149 | return True 150 | -------------------------------------------------------------------------------- /ocean_provider/utils/credentials.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2023 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | import json 6 | from typing import Optional 7 | 8 | from ocean_provider.utils.consumable import ConsumableCodes, MalformedCredential 9 | 10 | 11 | class AddressCredential: 12 | def __init__(self, asset) -> None: 13 | self.asset = asset 14 | 15 | def get_addresses_of_class(self, access_class: str = "allow") -> list: 16 | """Get a filtered list of addresses from credentials (use with allow/deny).""" 17 | address_entry = self.get_address_entry_of_class(access_class) 18 | if not address_entry: 19 | return [] 20 | 21 | if "values" not in address_entry: 22 | raise MalformedCredential("No values key in the address credential.") 23 | 24 | return [addr.lower() for addr in address_entry["values"]] 25 | 26 | def requires_credential(self) -> bool: 27 | """Checks whether the asset requires an address credential.""" 28 | allowed_addresses = self.get_addresses_of_class("allow") 29 | denied_addresses = self.get_addresses_of_class("deny") 30 | 31 | return bool(allowed_addresses or denied_addresses) 32 | 33 | def validate_access(self, credential: Optional[dict] = None) -> int: 34 | """Checks a credential dictionary against the address allow/deny lists.""" 35 | address = simplify_credential_to_address(credential) 36 | 37 | allowed_addresses = self.get_addresses_of_class("allow") 38 | denied_addresses = self.get_addresses_of_class("deny") 39 | 40 | if not address and not self.requires_credential(): 41 | return ConsumableCodes.OK 42 | 43 | if allowed_addresses and address.lower() not in allowed_addresses: 44 | return ConsumableCodes.CREDENTIAL_NOT_IN_ALLOW_LIST 45 | 46 | if not allowed_addresses and address.lower() in denied_addresses: 47 | return ConsumableCodes.CREDENTIAL_IN_DENY_LIST 48 | 49 | return ConsumableCodes.OK 50 | 51 | def add_address_to_access_class( 52 | self, address: str, access_class: str = "allow" 53 | ) -> None: 54 | """Adds an address to an address list (either allow or deny).""" 55 | address = address.lower() 56 | 57 | if not self.asset.credentials or access_class not in self.asset.credentials: 58 | self.asset.credentials[access_class] = [ 59 | {"type": "address", "values": [address]} 60 | ] 61 | return 62 | 63 | address_entry = self.get_address_entry_of_class(access_class) 64 | 65 | if not address_entry: 66 | self.asset.credentials[access_class].append( 67 | {"type": "address", "values": [address]} 68 | ) 69 | return 70 | 71 | lc_addresses = self.get_addresses_of_class(access_class) 72 | 73 | if address not in lc_addresses: 74 | lc_addresses.append(address) 75 | 76 | address_entry["values"] = lc_addresses 77 | 78 | def remove_address_from_access_class( 79 | self, address: str, access_class: str = "allow" 80 | ) -> None: 81 | """Removes an address from an address list (either allow or deny)i.""" 82 | address = address.lower() 83 | 84 | if not self.asset.credentials or access_class not in self.asset.credentials: 85 | return 86 | 87 | address_entry = self.get_address_entry_of_class(access_class) 88 | 89 | if not address_entry: 90 | return 91 | 92 | lc_addresses = self.get_addresses_of_class(access_class) 93 | 94 | if address not in lc_addresses: 95 | return 96 | 97 | lc_addresses.remove(address) 98 | address_entry["values"] = lc_addresses 99 | 100 | def get_address_entry_of_class(self, access_class: str = "allow") -> Optional[dict]: 101 | """Get address credentials entry of the specified access class. access_class = "allow" or "deny".""" 102 | if not self.asset.credentials: 103 | return None 104 | if isinstance(self.asset.credentials, str): 105 | credentials = json.loads(self.asset.credentials) 106 | else: 107 | credentials = self.asset.credentials 108 | 109 | entries = credentials.get(access_class, []) 110 | address_entries = [entry for entry in entries if entry.get("type") == "address"] 111 | return address_entries[0] if address_entries else None 112 | 113 | 114 | def simplify_credential_to_address(credential: Optional[dict]) -> Optional[str]: 115 | """Extracts address value from credential dictionary.""" 116 | if not credential: 117 | return None 118 | 119 | if not credential.get("value"): 120 | raise MalformedCredential("Received empty address.") 121 | 122 | return credential["value"] 123 | -------------------------------------------------------------------------------- /tests/test_auth.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2023 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | import time 6 | from datetime import datetime, timedelta, timezone 7 | 8 | import pytest 9 | from ocean_provider.constants import BaseURLs 10 | from ocean_provider.user_nonce import is_token_valid 11 | from ocean_provider.utils.accounts import sign_message 12 | from tests.helpers.nonce import build_nonce 13 | 14 | 15 | def create_token(client, consumer_wallet, expiration=None): 16 | """Helper function to create a token using the API.""" 17 | address = consumer_wallet.address 18 | if expiration is None: 19 | expiration = int((datetime.now(timezone.utc) + timedelta(hours=1)).timestamp()) 20 | 21 | payload = {"address": address, "expiration": expiration} 22 | 23 | endpoint = BaseURLs.SERVICES_URL + "/createAuthToken" 24 | nonce = build_nonce(address) 25 | _msg = f"{address}{nonce}" 26 | payload["signature"] = sign_message(_msg, consumer_wallet) 27 | payload["nonce"] = nonce 28 | response = client.get(endpoint, query_string=payload) 29 | 30 | assert response.status_code == 200, f"{response.data}" 31 | assert "token" in response.json, "token is missing from response" 32 | 33 | return response.json["token"] 34 | 35 | 36 | @pytest.mark.unit 37 | def test_create_auth_token(client, consumer_wallet, provider_wallet): 38 | """Test that tokens can be created and they are only valid for their creators' addresses.""" 39 | consumer_token = create_token(client, consumer_wallet) 40 | provider_token = create_token(client, provider_wallet) 41 | assert is_token_valid(consumer_token, consumer_wallet.address)[0] 42 | assert not is_token_valid(provider_token, consumer_wallet.address)[0] 43 | 44 | 45 | @pytest.mark.unit 46 | def test_delete_auth_token_sqlite(client, consumer_wallet, monkeypatch): 47 | """Tests token deletion and recreation with the sqlite backend.""" 48 | monkeypatch.delenv("REDIS_CONNECTION") 49 | expiration = int((datetime.now(timezone.utc) + timedelta(hours=1)).timestamp()) 50 | address = consumer_wallet.address 51 | token = create_token(client, consumer_wallet, expiration) 52 | assert is_token_valid(token, address)[0] 53 | 54 | payload = {"address": address, "token": token} 55 | 56 | endpoint = BaseURLs.SERVICES_URL + "/deleteAuthToken" 57 | nonce = build_nonce(address) 58 | _msg = f"{address}{nonce}" 59 | payload["signature"] = sign_message(_msg, consumer_wallet) 60 | payload["nonce"] = nonce 61 | response = client.delete(endpoint, query_string=payload) 62 | 63 | assert response.status_code == 200, f"{response.data}" 64 | assert not is_token_valid(token, address)[0] 65 | 66 | # create with same parameters restores the token 67 | token2 = create_token(client, consumer_wallet, expiration) 68 | assert token == token2 69 | assert is_token_valid(token, address)[0] 70 | 71 | 72 | @pytest.mark.unit 73 | def test_delete_auth_token_redis(client, consumer_wallet): 74 | """Tests token deletion and recreation with the redis backend.""" 75 | address = consumer_wallet.address 76 | expiration = int((datetime.now(timezone.utc) + timedelta(hours=1)).timestamp()) 77 | token = create_token(client, consumer_wallet, expiration) 78 | assert is_token_valid(token, address)[0] 79 | 80 | payload = {"address": address, "token": token} 81 | 82 | endpoint = BaseURLs.SERVICES_URL + "/deleteAuthToken" 83 | nonce = build_nonce(address) 84 | _msg = f"{address}{nonce}" 85 | payload["signature"] = sign_message(_msg, consumer_wallet) 86 | payload["nonce"] = nonce 87 | response = client.delete(endpoint, query_string=payload) 88 | 89 | assert response.status_code == 200, f"{response.data}" 90 | assert response.json["success"] == "Token has been deactivated." 91 | assert not is_token_valid(token, address)[0] 92 | assert is_token_valid(token, address)[1] == "Token is deleted." 93 | 94 | # can not delete again 95 | nonce = build_nonce(address) 96 | _msg = f"{address}{nonce}" 97 | payload["signature"] = sign_message(_msg, consumer_wallet) 98 | payload["nonce"] = nonce 99 | response = client.delete(endpoint, query_string=payload) 100 | assert response.status_code == 400 101 | assert response.json["error"] == "Token is deleted." 102 | 103 | # create with same parameters restores the token 104 | token2 = create_token(client, consumer_wallet, expiration) 105 | assert token == token2 106 | assert is_token_valid(token, address)[0] 107 | 108 | 109 | @pytest.mark.unit 110 | def test_expiration(client, consumer_wallet): 111 | """Tests token expiration.""" 112 | address = consumer_wallet.address 113 | expiration = int((datetime.now(timezone.utc) + timedelta(seconds=5)).timestamp()) 114 | token = create_token(client, consumer_wallet, expiration) 115 | time.sleep(6) 116 | valid, message = is_token_valid(token, address) 117 | assert not valid 118 | assert message == "Token is expired." 119 | -------------------------------------------------------------------------------- /tests/test_graphql.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2023 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | import json 6 | 7 | import pytest 8 | from ocean_provider.constants import BaseURLs 9 | from ocean_provider.utils.accounts import sign_message 10 | from ocean_provider.utils.provider_fees import get_provider_fees 11 | from ocean_provider.utils.services import ServiceType 12 | from tests.helpers.nonce import build_nonce 13 | from tests.test_helpers import ( 14 | get_first_service_by_type, 15 | get_registered_asset, 16 | mint_100_datatokens, 17 | start_order, 18 | ) 19 | 20 | 21 | @pytest.mark.integration 22 | def test_download_graphql_asset(client, publisher_wallet, consumer_wallet, web3): 23 | unencrypted_files_list = [ 24 | { 25 | "type": "graphql", 26 | "url": "http://172.15.0.15:8030/graphql", 27 | "query": """ 28 | query { 29 | indexingStatuses { 30 | subgraph 31 | chains 32 | node 33 | } 34 | } 35 | """, 36 | } 37 | ] 38 | asset = get_registered_asset( 39 | publisher_wallet, unencrypted_files_list=unencrypted_files_list 40 | ) 41 | service = get_first_service_by_type(asset, ServiceType.ACCESS) 42 | mint_100_datatokens( 43 | web3, service.datatoken_address, consumer_wallet.address, publisher_wallet 44 | ) 45 | tx_id, _ = start_order( 46 | web3, 47 | service.datatoken_address, 48 | consumer_wallet.address, 49 | service.index, 50 | get_provider_fees(asset, service, consumer_wallet.address, 0), 51 | consumer_wallet, 52 | ) 53 | 54 | payload = { 55 | "documentId": asset.did, 56 | "serviceId": service.id, 57 | "consumerAddress": consumer_wallet.address, 58 | "transferTxId": tx_id, 59 | "fileIndex": 0, 60 | } 61 | 62 | download_endpoint = BaseURLs.SERVICES_URL + "/download" 63 | 64 | # Consume using url index and signature (with nonce) 65 | nonce = build_nonce(consumer_wallet.address) 66 | _msg = f"{asset.did}{nonce}" 67 | payload["signature"] = sign_message(_msg, consumer_wallet) 68 | payload["nonce"] = nonce 69 | response = client.get( 70 | service.service_endpoint + download_endpoint, query_string=payload 71 | ) 72 | assert response.status_code == 200, f"{response.data}" 73 | 74 | 75 | @pytest.mark.integration 76 | def test_download_graphql_asset_with_userdata( 77 | client, publisher_wallet, consumer_wallet, web3 78 | ): 79 | unencrypted_files_list = [ 80 | { 81 | "type": "graphql", 82 | "url": "http://172.15.0.15:8030/graphql", 83 | "query": """ 84 | query GetSubgraph($name: [String!]){ 85 | indexingStatuses(subgraphs: $name) { 86 | subgraph 87 | chains 88 | node 89 | } 90 | } 91 | """, 92 | } 93 | ] 94 | asset = get_registered_asset( 95 | publisher_wallet, 96 | unencrypted_files_list=unencrypted_files_list, 97 | custom_userdata=[ 98 | { 99 | "name": "name", 100 | "type": "text", 101 | "label": "name", 102 | "required": True, 103 | "description": "Subgraph indexing status", 104 | "default": ["subgraph"], 105 | } 106 | ], 107 | ) 108 | service = get_first_service_by_type(asset, ServiceType.ACCESS) 109 | mint_100_datatokens( 110 | web3, service.datatoken_address, consumer_wallet.address, publisher_wallet 111 | ) 112 | tx_id, _ = start_order( 113 | web3, 114 | service.datatoken_address, 115 | consumer_wallet.address, 116 | service.index, 117 | get_provider_fees(asset, service, consumer_wallet.address, 0), 118 | consumer_wallet, 119 | ) 120 | 121 | payload = { 122 | "documentId": asset.did, 123 | "serviceId": service.id, 124 | "consumerAddress": consumer_wallet.address, 125 | "transferTxId": tx_id, 126 | "fileIndex": 0, 127 | "userdata": json.dumps({"name": ["subgraph"]}), 128 | } 129 | 130 | download_endpoint = BaseURLs.SERVICES_URL + "/download" 131 | # Consume using url index and signature (with nonce) 132 | nonce = build_nonce(consumer_wallet.address) 133 | _msg = f"{asset.did}{nonce}" 134 | payload["signature"] = sign_message(_msg, consumer_wallet) 135 | payload["nonce"] = nonce 136 | response = client.get( 137 | service.service_endpoint + download_endpoint, query_string=payload 138 | ) 139 | assert response.status_code == 200, f"{response.data}" 140 | reply = json.loads(response.data) 141 | assert len(reply["data"]) == 1 142 | assert "indexingStatuses" in reply["data"].keys() 143 | -------------------------------------------------------------------------------- /ocean_provider/utils/util.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2023 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | import hashlib 6 | import json 7 | import logging 8 | from typing import Tuple 9 | 10 | import werkzeug 11 | from eth_account.signers.local import LocalAccount 12 | from eth_keys import KeyAPI 13 | from eth_keys.backends import NativeECCBackend 14 | from eth_typing.encoding import HexStr 15 | from ocean_provider.utils.asset import Asset 16 | from ocean_provider.utils.basics import get_network_name 17 | from ocean_provider.utils.encryption import do_decrypt 18 | from ocean_provider.utils.services import Service 19 | from web3 import Web3 20 | from web3.types import TxParams, TxReceipt 21 | 22 | logger = logging.getLogger(__name__) 23 | keys = KeyAPI(NativeECCBackend) 24 | 25 | 26 | def get_request_data(request): 27 | try: 28 | return request.args if request.args else request.json 29 | except werkzeug.exceptions.BadRequest: 30 | return {} 31 | 32 | 33 | def msg_hash(message: str): 34 | return hashlib.sha256(message.encode("utf-8")).hexdigest() 35 | 36 | 37 | def get_service_files_list( 38 | service: Service, provider_wallet: LocalAccount, asset: Asset = None 39 | ) -> list: 40 | version = asset.version if asset is not None and asset.version else "4.0.0" 41 | if asset is None or version == "4.0.0": 42 | return get_service_files_list_old_structure(service, provider_wallet) 43 | 44 | network_name = get_network_name(asset.chain_id) 45 | try: 46 | files_str = do_decrypt(service.encrypted_files, provider_wallet) 47 | if not files_str: 48 | return None 49 | 50 | files_json = json.loads(files_str) 51 | 52 | for key in ["datatokenAddress", "nftAddress", "files"]: 53 | if key not in files_json: 54 | raise Exception( 55 | f"Provider {network_name}: Key {key} not found in files." 56 | ) 57 | 58 | if Web3.toChecksumAddress( 59 | files_json["datatokenAddress"] 60 | ) != Web3.toChecksumAddress(service.datatoken_address): 61 | raise Exception( 62 | f"Provider {network_name}: Mismatch of datatoken. Got {files_json['datatokenAddress']} vs expected {service.datatoken_address}" 63 | ) 64 | 65 | if Web3.toChecksumAddress(files_json["nftAddress"]) != Web3.toChecksumAddress( 66 | asset.nftAddress 67 | ): 68 | raise Exception( 69 | f"Provider {network_name}: Mismatch of dataNft. Got {files_json['nftAddress']} vs expected {asset.nftAddress}" 70 | ) 71 | 72 | files_list = files_json["files"] 73 | if not isinstance(files_list, list): 74 | raise TypeError( 75 | f"Provider {network_name}: Expected a files list, got {type(files_list)}." 76 | ) 77 | 78 | return files_list 79 | except Exception as e: 80 | logger.error( 81 | f"Provider {network_name}: Error decrypting service files {Service}: {str(e)}" 82 | ) 83 | return None 84 | 85 | 86 | def get_service_files_list_old_structure( 87 | service: Service, provider_wallet: LocalAccount 88 | ) -> list: 89 | try: 90 | files_str = do_decrypt(service.encrypted_files, provider_wallet) 91 | if not files_str: 92 | return None 93 | logger.debug(f"Got decrypted files str {files_str}") 94 | files_list = json.loads(files_str) 95 | if not isinstance(files_list, list): 96 | raise TypeError(f"Expected a files list, got {type(files_list)}.") 97 | 98 | return files_list 99 | except Exception as e: 100 | logger.error(f"Error decrypting service files {Service}: {str(e)}") 101 | return None 102 | 103 | 104 | def sign_tx(web3, tx, private_key): 105 | """ 106 | :param web3: Web3 object instance 107 | :param tx: transaction 108 | :param private_key: Private key of the account 109 | :return: rawTransaction (str) 110 | """ 111 | account = web3.eth.account.from_key(private_key) 112 | nonce = web3.eth.get_transaction_count(account.address) 113 | tx["nonce"] = nonce 114 | signed_tx = web3.eth.account.sign_transaction(tx, private_key) 115 | 116 | return signed_tx.rawTransaction 117 | 118 | 119 | def sign_and_send( 120 | web3: Web3, transaction: TxParams, from_account: LocalAccount 121 | ) -> Tuple[HexStr, TxReceipt]: 122 | """Returns the transaction id and transaction receipt.""" 123 | transaction_signed = sign_tx(web3, transaction, from_account.key) 124 | transaction_hash = web3.eth.send_raw_transaction(transaction_signed) 125 | transaction_id = Web3.toHex(transaction_hash) 126 | 127 | return transaction_hash, transaction_id 128 | 129 | 130 | def sign_send_and_wait_for_receipt( 131 | web3: Web3, transaction: TxParams, from_account: LocalAccount 132 | ) -> Tuple[HexStr, TxReceipt]: 133 | """Returns the transaction id and transaction receipt.""" 134 | transaction_hash, transaction_id = sign_and_send(web3, transaction, from_account) 135 | 136 | return (transaction_id, web3.eth.wait_for_transaction_receipt(transaction_hash)) 137 | -------------------------------------------------------------------------------- /ocean_provider/validation/RBAC.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2023 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | 6 | import json 7 | import os 8 | 9 | import requests 10 | from ocean_provider.exceptions import RequestNotFound 11 | from ocean_provider.utils.accounts import sign_message 12 | from ocean_provider.utils.basics import get_provider_wallet 13 | 14 | 15 | class RBACValidator: 16 | def __init__( 17 | self, 18 | request_name, 19 | request, 20 | ): 21 | self.request = request 22 | action_mapping = self.get_action_mapping() 23 | if request_name not in action_mapping.keys(): 24 | raise RequestNotFound("Request name is not valid!") 25 | self.action = action_mapping[request_name] 26 | self.provider_address = get_provider_wallet(use_universal_key=True).address 27 | address = self.request.get( 28 | "consumerAddress", self.request.get("publisherAddress") 29 | ) 30 | self.credentials = {"type": "address", "value": address} 31 | self.component = "provider" 32 | 33 | @staticmethod 34 | def get_action_mapping(): 35 | return { 36 | "EncryptRequest": "encryptUrl", 37 | "InitializeRequest": "initialize", 38 | "DownloadRequest": "access", 39 | "ComputeRequest": "compute", 40 | "ComputeStartRequest": "compute", 41 | } 42 | 43 | def messages(self): 44 | return [{"RBAC": "RBAC Validation failed!"}] 45 | 46 | def fails(self): 47 | payload = self.build_payload() 48 | response = requests.post(os.getenv("RBAC_SERVER_URL"), json=payload) 49 | return not response.json() 50 | 51 | def get_dids(self): 52 | main_key = ( 53 | self.request["dataset"] if "dataset" in self.request else self.request 54 | ) 55 | 56 | return [{"did": main_key["documentId"], "serviceId": main_key["serviceId"]}] 57 | 58 | def get_algos(self): 59 | return [ 60 | { 61 | "did": self.request["algorithm"]["documentId"], 62 | "serviceId": self.request["algorithm"]["serviceId"], 63 | } 64 | ] 65 | 66 | def get_additional_dids(self): 67 | if "additionalDatasets" not in self.request.keys(): 68 | return [] 69 | 70 | additional_inputs = self.request["additionalDatasets"] 71 | return [ 72 | { 73 | "did": additional_input["documentId"], 74 | "serviceId": additional_input["serviceId"], 75 | } 76 | for additional_input in additional_inputs 77 | ] 78 | 79 | def get_data(self): 80 | if "data" not in self.request.keys(): 81 | raise Exception("Data to encrypt is empty.") 82 | return self.request["data"] 83 | 84 | def build_payload(self): 85 | provider_access = ( 86 | "private" if os.getenv("PRIVATE_PROVIDER", False) else "public" 87 | ) 88 | payload = { 89 | "eventType": self.action, 90 | "component": self.component, 91 | "providerAddress": self.provider_address, 92 | "providerAccess": provider_access, 93 | "credentials": self.credentials, 94 | } 95 | # builds actions like build_encrtyptUrl_payload to update the dictionary 96 | # with request - specific key-values. 97 | payload.update(getattr(self, f"build_{self.action}_payload")()) 98 | return payload 99 | 100 | def build_encryptUrl_payload(self): 101 | message = "encryptUrl" + json.dumps(self.credentials) 102 | signature = sign_message(message, get_provider_wallet(use_universal_key=True)) 103 | 104 | return {"signature": signature, "data": self.get_data()} 105 | 106 | def build_initialize_payload(self): 107 | message = "initialize" + json.dumps(self.credentials) 108 | signature = sign_message(message, get_provider_wallet(use_universal_key=True)) 109 | return { 110 | "signature": signature, 111 | "dids": self.get_dids(), 112 | } 113 | 114 | def build_access_payload(self): 115 | message = "access" + json.dumps(self.credentials) 116 | signature = sign_message(message, get_provider_wallet(use_universal_key=True)) 117 | return {"signature": signature, "dids": self.get_dids()} 118 | 119 | def build_compute_payload(self): 120 | dids = self.get_dids() 121 | algos = self.get_algos() 122 | algos_text = json.dumps(algos) if algos else "" 123 | additional_dids = self.get_additional_dids() 124 | additional_dids_text = json.dumps(additional_dids) if additional_dids else "" 125 | message = ( 126 | "compute" 127 | + json.dumps(self.credentials) 128 | + json.dumps(dids) 129 | + algos_text 130 | + additional_dids_text 131 | ) 132 | signature = sign_message(message, get_provider_wallet(use_universal_key=True)) 133 | compute_payload = {"signature": signature, "dids": dids} 134 | if algos: 135 | compute_payload["algos"] = algos 136 | if additional_dids: 137 | compute_payload["additionalDids"] = additional_dids 138 | return compute_payload 139 | -------------------------------------------------------------------------------- /tests/ddo/ddo_sa_sample_with_credentials.json: -------------------------------------------------------------------------------- 1 | { 2 | "@context": "https://w3id.org/did/v1", 3 | "created": "2019-02-08T08:13:49Z", 4 | "updated": "2019-03-08T08:13:49Z", 5 | "id": "did:op:8d1b4d73e7af4634958f071ab8dfe7ab0df14019", 6 | "proof": { 7 | "created": "2019-02-08T08:13:41Z", 8 | "creator": "0x37BB53e3d293494DE59fBe1FF78500423dcFd43B", 9 | "signatureValue": "did:op:0bc278fee025464f8012b811d1bce8e22094d0984e4e49139df5d5ff7a028bdf", 10 | "type": "DDOIntegritySignature", 11 | "checksum": { 12 | "0": "0x52b5c93b82dd9e7ecc3d9fdf4755f7f69a54484941897dc517b4adfe3bbc3377", 13 | "1": "0x999999952b5c93b82dd9e7ecc3d9fdf4755f7f69a54484941897dc517b4adfe3" 14 | } 15 | }, 16 | "verifiableCredential": [ 17 | { 18 | "@context": [ 19 | "https://www.w3.org/2018/credentials/v1", 20 | "https://www.w3.org/2018/credentials/examples/v1" 21 | ], 22 | "id": "1872", 23 | "type": [ 24 | "read", 25 | "update", 26 | "deactivate" 27 | ], 28 | "issuer": "0x610D9314EDF2ced7681BA1633C33fdb8cF365a12", 29 | "issuanceDate": "2019-01-01T19:73:24Z", 30 | "credentialSubject": { 31 | "id": "0x89328493849328493284932" 32 | }, 33 | "proof": { 34 | "type": "RsaSignature2018", 35 | "created": "2019-01-01T19:73:24Z", 36 | "proofPurpose": "assertionMethod", 37 | "signatureValue": "ABCJSDAO23...1tzjn4w==" 38 | } 39 | } 40 | ], 41 | "service": [ 42 | { 43 | "index": 0, 44 | "serviceEndpoint": "http://localhost:5000/api/aquarius/assets/ddo/{did}", 45 | "type": "metadata", 46 | "attributes": { 47 | "encryptedFiles": "0x2e48ceefcca7abb024f90c87c676fce8f7913f889605a349c08c0c4a822c69ad651e122cc81db4fbb52938ac627786491514f37a2ebfd04fd98ec726f1d9061ed52f13fde132222af34d9af8ec358429cf45fc669f81a607185cb9a8150df3cbb2b4e3e382fb16429be228ddd920f061b78dd54701025fac8aab976239fb31a5b60a57393e96a338324c5ac8a5600a1247339c4835533cecdb5b53caf6b6f9d6478b579b7426f650a4154a20d18a9d49f509770af62647a57fc174741b47af3c8beeaaa76bee276cce8fba1f3fec0e1c", 48 | "main": { 49 | "author": "Met Office", 50 | "dateCreated": "2019-02-08T08:13:49Z", 51 | "files": [ 52 | { 53 | "url": "https://raw.githubusercontent.com/tbertinmahieux/MSongsDB/master/Tasks_Demos/CoverSongs/shs_dataset_test.txt", 54 | "index": 0, 55 | "checksum": "efb2c764274b745f5fc37f97c6b0e761", 56 | "contentLength": "4535431", 57 | "contentType": "text/csv", 58 | "encoding": "UTF-8", 59 | "compression": "zip" 60 | } 61 | ], 62 | "license": "CC-BY", 63 | "name": "UK Weather information 2011", 64 | "type": "dataset" 65 | }, 66 | "additionalInformation": { 67 | "description": "Weather information of UK including temperature and humidity", 68 | "tags": [ 69 | "weather", 70 | "uk", 71 | "2011", 72 | "temperature", 73 | "humidity" 74 | ], 75 | "workExample": "423432fsd,51.509865,-0.118092,2011-01-01T10:55:11+00:00,7.2,68", 76 | "copyrightHolder": "Met Office", 77 | "links": [ 78 | { 79 | "name": "Sample of Asset Data", 80 | "type": "sample", 81 | "url": "https://foo.com/sample.csv" 82 | }, 83 | { 84 | "name": "Data Format Definition", 85 | "type": "format", 86 | "url": "https://foo.com/sampl2.csv" 87 | } 88 | ], 89 | "inLanguage": "en", 90 | "updateFrecuency": "yearly", 91 | "structuredMarkup": [ 92 | { 93 | "uri": "http://skos.um.es/unescothes/C01194/jsonld", 94 | "mediaType": "application/ld+json" 95 | }, 96 | { 97 | "uri": "http://skos.um.es/unescothes/C01194/turtle", 98 | "mediaType": "text/turtle" 99 | } 100 | ] 101 | }, 102 | "curation": { 103 | "numVotes": 123, 104 | "rating": 0.0, 105 | "schema": "Binary Votting", 106 | "isListed": true 107 | } 108 | } 109 | }, 110 | { 111 | "type": "access", 112 | "index": 1, 113 | "serviceEndpoint": "http://localhost:8030", 114 | "attributes": { 115 | "main": { 116 | "name": "dataAssetAccessServiceAgreement", 117 | "creator": "", 118 | "datePublished": "2019-02-08T08:13:49Z", 119 | "cost": "1.0", 120 | "timeout": 36000 121 | }, 122 | "additionalInformation": { 123 | "description": "" 124 | } 125 | } 126 | } 127 | ], 128 | "credentials": { 129 | "allow": [ 130 | {"type": "address", "values": ["0x123", "0x456A"]} 131 | ], 132 | "deny": [ 133 | {"type": "address", "values": ["0x2222", "0x333"]} 134 | ] 135 | } 136 | } 137 | -------------------------------------------------------------------------------- /tests/ddo/ddo_sample_invalid_url.json: -------------------------------------------------------------------------------- 1 | { 2 | "@context": "https://w3id.org/did/v1", 3 | "authentication": [{ 4 | "type": "RsaSignatureAuthentication2018", 5 | "publicKey": "did:op:0ebed8226ada17fde24b6bf2b95d27f8f05fcce09139ff5cec31f6d81a7cd2ea" 6 | }], 7 | "created": "2019-02-08T08:13:49Z", 8 | "updated": "2019-03-08T08:13:49Z", 9 | "id": "did:op:0bc278fee025464f8012b811d1bce8e22094d0984e4e49139df5d5ff7a028bdf", 10 | "proof": { 11 | "created": "2019-02-08T08:13:41Z", 12 | "creator": "0x37BB53e3d293494DE59fBe1FF78500423dcFd43B", 13 | "signatureValue": "did:op:0bc278fee025464f8012b811d1bce8e22094d0984e4e49139df5d5ff7a028bdf", 14 | "type": "DDOIntegritySignature", 15 | "checksum": { 16 | "0": "0x52b5c93b82dd9e7ecc3d9fdf4755f7f69a54484941897dc517b4adfe3bbc3377", 17 | "1": "0x999999952b5c93b82dd9e7ecc3d9fdf4755f7f69a54484941897dc517b4adfe3" 18 | } 19 | }, 20 | "publicKey": [{ 21 | "id": "did:op:b6e2eb5eff1a093ced9826315d5a4ef6c5b5c8bd3c49890ee284231d7e1d0aaa#keys-1", 22 | "type": "RsaVerificationKey2018", 23 | "owner": "did:op:6027c1e7cbae06a91fce0557ee53195284825f56a7100be0c53cbf4391aa26cc", 24 | "publicKeyPem": "-----BEGIN PUBLIC KEY...END PUBLIC KEY-----\r\n" 25 | }, { 26 | "id": "did:op:b6e2eb5eff1a093ced9826315d5a4ef6c5b5c8bd3c49890ee284231d7e1d0aaa#keys-2", 27 | "type": "Ed25519VerificationKey2018", 28 | "owner": "did:op:4c27a254e607cdf91a1206480e7eb8c74856102316c1a462277d4f21c02373b6", 29 | "publicKeyBase58": "H3C2AVvLMv6gmMNam3uVAjZpfkcJCwDwnZn6z3wXmqPV" 30 | }, { 31 | "id": "did:op:b6e2eb5eff1a093ced9826315d5a4ef6c5b5c8bd3c49890ee284231d7e1d0aaa#keys-3", 32 | "type": "RsaPublicKeyExchangeKey2018", 33 | "owner": "did:op:5f6b885202ffb9643874be529302eb00d55e226959f1fbacaeda592c5b5c9484", 34 | "publicKeyPem": "-----BEGIN PUBLIC KEY...END PUBLIC KEY-----\r\n" 35 | }], 36 | "verifiableCredential": [{ 37 | "@context": [ 38 | "https://www.w3.org/2018/credentials/v1", 39 | "https://www.w3.org/2018/credentials/examples/v1" 40 | ], 41 | "id": "1872", 42 | "type": ["read", "update", "deactivate"], 43 | "issuer": "0x610D9314EDF2ced7681BA1633C33fdb8cF365a12", 44 | "issuanceDate": "2019-01-01T19:73:24Z", 45 | "credentialSubject": { 46 | "id": "0x89328493849328493284932" 47 | }, 48 | "proof": { 49 | "type": "RsaSignature2018", 50 | "created": "2019-01-01T19:73:24Z", 51 | "proofPurpose": "assertionMethod", 52 | "signatureValue": "ABCJSDAO23...1tzjn4w==" 53 | } 54 | }], 55 | "service": [ 56 | { 57 | "index": 0, 58 | "serviceEndpoint": "http://localhost:5000/api/aquarius/assets/ddo/{did}", 59 | "type": "metadata", 60 | "attributes": { 61 | "encryptedFiles": "0x2e48ceefcca7abb024f90c87c676fce8f7913f889605a349c08c0c4a822c69ad651e122cc81db4fbb52938ac627786491514f37a2ebfd04fd98ec726f1d9061ed52f13fde132222af34d9af8ec358429cf45fc669f81a607185cb9a8150df3cbb2b4e3e382fb16429be228ddd920f061b78dd54701025fac8aab976239fb31a5b60a57393e96a338324c5ac8a5600a1247339c4835533cecdb5b53caf6b6f9d6478b579b7426f650a4154a20d18a9d49f509770af62647a57fc174741b47af3c8beeaaa76bee276cce8fba1f3fec0e1c", 62 | "main": { 63 | "author": "Met Office", 64 | "dateCreated": "2019-02-08T08:13:49Z", 65 | "files": [ 66 | { 67 | "url": "http://localhost/not_valid_url", 68 | "index": 0, 69 | "checksum": "efb2c764274b745f5fc37f97c6b0e764", 70 | "contentLength": "4535431", 71 | "contentType": "text/csv", 72 | "encoding": "UTF-8", 73 | "compression": "zip" 74 | } 75 | ], 76 | "license": "CC-BY", 77 | "name": "UK Weather information 2011", 78 | "cost": "1.0", 79 | "type": "dataset" 80 | }, 81 | 82 | "additionalInformation": { 83 | 84 | "description": "Weather information of UK including temperature and humidity", 85 | "tags": ["weather", "uk", "2011", "temperature", "humidity"], 86 | "workExample": "423432fsd,51.509865,-0.118092,2011-01-01T10:55:11+00:00,7.2,68", 87 | "copyrightHolder": "Met Office", 88 | "links": [ 89 | { 90 | "name": "Sample of Asset Data", 91 | "type": "sample", 92 | "url": "https://foo.com/sample.csv" 93 | }, 94 | { 95 | "name": "Data Format Definition", 96 | "type": "format", 97 | "url": "https://foo.com/sampl2.csv" 98 | } 99 | ], 100 | "inLanguage": "en", 101 | 102 | "updateFrequency": "yearly", 103 | "structuredMarkup": [ 104 | { 105 | "uri": "http://skos.um.es/unescothes/C01194/jsonld", 106 | "mediaType": "application/ld+json" 107 | }, 108 | { 109 | "uri": "http://skos.um.es/unescothes/C01194/turtle", 110 | "mediaType": "text/turtle" 111 | } 112 | ] 113 | }, 114 | 115 | "curation": { 116 | "numVotes": 123, 117 | "rating": 0.0, 118 | "schema": "Binary Votting", 119 | "isListed": true 120 | } 121 | } 122 | }, 123 | { 124 | "type": "access", 125 | "index": 1, 126 | "serviceEndpoint": "http://localhost:8030/api/services/consume", 127 | "templateId": "0x1234", 128 | "attributes": { 129 | "main": { 130 | "name": "dataAssetAccessServiceAgreement", 131 | "creator": "", 132 | "datePublished": "2019-02-08T08:13:49Z", 133 | "cost": "1.0", 134 | "timeout": 36000 135 | }, 136 | "additionalInformation": { 137 | "description": "" 138 | } 139 | } 140 | 141 | } 142 | ] 143 | } 144 | -------------------------------------------------------------------------------- /tests/ddo/ddo_sample_ipfs_url.json: -------------------------------------------------------------------------------- 1 | { 2 | "@context": "https://w3id.org/did/v1", 3 | "authentication": [{ 4 | "type": "RsaSignatureAuthentication2018", 5 | "publicKey": "did:op:0ebed8226ada17fde24b6bf2b95d27f8f05fcce09139ff5cec31f6d81a7cd2ea" 6 | }], 7 | "created": "2019-02-08T08:13:49Z", 8 | "updated": "2019-03-08T08:13:49Z", 9 | "id": "did:op:0bc278fee025464f8012b811d1bce8e22094d0984e4e49139df5d5ff7a028bdf", 10 | "proof": { 11 | "created": "2019-02-08T08:13:41Z", 12 | "creator": "0x37BB53e3d293494DE59fBe1FF78500423dcFd43B", 13 | "signatureValue": "did:op:0bc278fee025464f8012b811d1bce8e22094d0984e4e49139df5d5ff7a028bdf", 14 | "type": "DDOIntegritySignature", 15 | "checksum": { 16 | "0": "0x52b5c93b82dd9e7ecc3d9fdf4755f7f69a54484941897dc517b4adfe3bbc3377", 17 | "1": "0x999999952b5c93b82dd9e7ecc3d9fdf4755f7f69a54484941897dc517b4adfe3" 18 | } 19 | }, 20 | "publicKey": [{ 21 | "id": "did:op:b6e2eb5eff1a093ced9826315d5a4ef6c5b5c8bd3c49890ee284231d7e1d0aaa#keys-1", 22 | "type": "RsaVerificationKey2018", 23 | "owner": "did:op:6027c1e7cbae06a91fce0557ee53195284825f56a7100be0c53cbf4391aa26cc", 24 | "publicKeyPem": "-----BEGIN PUBLIC KEY...END PUBLIC KEY-----\r\n" 25 | }, { 26 | "id": "did:op:b6e2eb5eff1a093ced9826315d5a4ef6c5b5c8bd3c49890ee284231d7e1d0aaa#keys-2", 27 | "type": "Ed25519VerificationKey2018", 28 | "owner": "did:op:4c27a254e607cdf91a1206480e7eb8c74856102316c1a462277d4f21c02373b6", 29 | "publicKeyBase58": "H3C2AVvLMv6gmMNam3uVAjZpfkcJCwDwnZn6z3wXmqPV" 30 | }, { 31 | "id": "did:op:b6e2eb5eff1a093ced9826315d5a4ef6c5b5c8bd3c49890ee284231d7e1d0aaa#keys-3", 32 | "type": "RsaPublicKeyExchangeKey2018", 33 | "owner": "did:op:5f6b885202ffb9643874be529302eb00d55e226959f1fbacaeda592c5b5c9484", 34 | "publicKeyPem": "-----BEGIN PUBLIC KEY...END PUBLIC KEY-----\r\n" 35 | }], 36 | "verifiableCredential": [{ 37 | "@context": [ 38 | "https://www.w3.org/2018/credentials/v1", 39 | "https://www.w3.org/2018/credentials/examples/v1" 40 | ], 41 | "id": "1872", 42 | "type": ["read", "update", "deactivate"], 43 | "issuer": "0x610D9314EDF2ced7681BA1633C33fdb8cF365a12", 44 | "issuanceDate": "2019-01-01T19:73:24Z", 45 | "credentialSubject": { 46 | "id": "0x89328493849328493284932" 47 | }, 48 | "proof": { 49 | "type": "RsaSignature2018", 50 | "created": "2019-01-01T19:73:24Z", 51 | "proofPurpose": "assertionMethod", 52 | "signatureValue": "ABCJSDAO23...1tzjn4w==" 53 | } 54 | }], 55 | "service": [ 56 | { 57 | "index": 0, 58 | "serviceEndpoint": "http://localhost:5000/api/aquarius/assets/ddo/{did}", 59 | "type": "metadata", 60 | "attributes": { 61 | "encryptedFiles": "0x2e48ceefcca7abb024f90c87c676fce8f7913f889605a349c08c0c4a822c69ad651e122cc81db4fbb52938ac627786491514f37a2ebfd04fd98ec726f1d9061ed52f13fde132222af34d9af8ec358429cf45fc669f81a607185cb9a8150df3cbb2b4e3e382fb16429be228ddd920f061b78dd54701025fac8aab976239fb31a5b60a57393e96a338324c5ac8a5600a1247339c4835533cecdb5b53caf6b6f9d6478b579b7426f650a4154a20d18a9d49f509770af62647a57fc174741b47af3c8beeaaa76bee276cce8fba1f3fec0e1c", 62 | "main": { 63 | "author": "Met Office", 64 | "dateCreated": "2019-02-08T08:13:49Z", 65 | "files": [ 66 | { 67 | "url": "ipfs://QmXtkGkWCG47tVpiBr8f5FdHuCMPq8h2jhck4jgjSXKiWZ", 68 | "index": 0, 69 | "checksum": "efb2c764274b745f5fc37f97c6b0e764", 70 | "contentLength": "4535431", 71 | "contentType": "text/csv", 72 | "encoding": "UTF-8", 73 | "compression": "zip" 74 | } 75 | ], 76 | "license": "CC-BY", 77 | "name": "UK Weather information 2011", 78 | "cost": "0", 79 | "type": "dataset" 80 | }, 81 | 82 | "additionalInformation": { 83 | 84 | "description": "Weather information of UK including temperature and humidity", 85 | "tags": ["weather", "uk", "2011", "temperature", "humidity"], 86 | "workExample": "423432fsd,51.509865,-0.118092,2011-01-01T10:55:11+00:00,7.2,68", 87 | "copyrightHolder": "Met Office", 88 | "links": [ 89 | { 90 | "name": "Sample of Asset Data", 91 | "type": "sample", 92 | "url": "https://foo.com/sample.csv" 93 | }, 94 | { 95 | "name": "Data Format Definition", 96 | "type": "format", 97 | "url": "https://foo.com/sampl2.csv" 98 | } 99 | ], 100 | "inLanguage": "en", 101 | 102 | "updateFrequency": "yearly", 103 | "structuredMarkup": [ 104 | { 105 | "uri": "http://skos.um.es/unescothes/C01194/jsonld", 106 | "mediaType": "application/ld+json" 107 | }, 108 | { 109 | "uri": "http://skos.um.es/unescothes/C01194/turtle", 110 | "mediaType": "text/turtle" 111 | } 112 | ] 113 | }, 114 | 115 | "curation": { 116 | "numVotes": 123, 117 | "rating": 0.0, 118 | "schema": "Binary Votting", 119 | "isListed": true 120 | } 121 | } 122 | }, 123 | { 124 | "type": "access", 125 | "index": 1, 126 | "serviceEndpoint": "http://localhost:8030/api/services/consume", 127 | "templateId": "0x1234", 128 | "attributes": { 129 | "main": { 130 | "name": "dataAssetAccessServiceAgreement", 131 | "creator": "", 132 | "datePublished": "2019-02-08T08:13:49Z", 133 | "cost": "0", 134 | "timeout": 36000 135 | }, 136 | "additionalInformation": { 137 | "description": "" 138 | } 139 | } 140 | 141 | } 142 | ] 143 | } 144 | -------------------------------------------------------------------------------- /ocean_provider/utils/test/test_basics.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2023 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | from datetime import datetime, timedelta, timezone 6 | 7 | import pytest 8 | from ocean_provider.utils.basics import ( 9 | decode_keyed, 10 | get_configured_chains, 11 | get_provider_addresses, 12 | get_provider_private_key, 13 | get_value_from_decoded_env, 14 | get_web3, 15 | get_web3_connection_provider, 16 | send_ether, 17 | validate_timestamp, 18 | ) 19 | from ocean_provider.utils.currency import to_wei 20 | 21 | 22 | @pytest.mark.unit 23 | def test_get_web3_connection_provider(monkeypatch): 24 | # typical http uri "http://foo.com" 25 | provider = get_web3_connection_provider("http://foo.com") 26 | assert provider.endpoint_uri == "http://foo.com" 27 | 28 | # typical https uri "https://bar.com" 29 | provider = get_web3_connection_provider("https://bar.com") 30 | assert provider.endpoint_uri == "https://bar.com" 31 | 32 | # non-supported name 33 | with pytest.raises(AssertionError): 34 | get_web3_connection_provider("not_network_name") 35 | 36 | # typical websockets uri "wss://foo.com" 37 | provider = get_web3_connection_provider("wss://bah.com") 38 | assert provider.endpoint_uri == "wss://bah.com" 39 | 40 | 41 | @pytest.mark.unit 42 | def test_send_ether(publisher_wallet, consumer_address): 43 | assert send_ether( 44 | get_web3(8996), publisher_wallet, consumer_address, to_wei(1) 45 | ), "Send ether was unsuccessful." 46 | 47 | 48 | @pytest.mark.unit 49 | def test_validate_timestamp(): 50 | timestamp_future = int( 51 | (datetime.now(timezone.utc) + timedelta(hours=1)).timestamp() 52 | ) 53 | assert validate_timestamp(timestamp_future) 54 | assert validate_timestamp(1644831664000) is False 55 | assert validate_timestamp(str(timestamp_future)) 56 | 57 | timestamp_past = (datetime.now(timezone.utc) - timedelta(hours=1)).timestamp() 58 | assert validate_timestamp(timestamp_past) is False 59 | 60 | 61 | @pytest.mark.unit 62 | def test_decode_keyed(monkeypatch): 63 | monkeypatch.setenv("TEST_ENV", '{"valid": "json"}') 64 | assert decode_keyed("TEST_ENV") == {"valid": "json"} 65 | monkeypatch.setenv("TEST_ENV", '{"invalid json"}') 66 | assert not decode_keyed("TEST_ENV") 67 | monkeypatch.setenv("TEST_ENV", "simple string") 68 | assert not decode_keyed("TEST_ENV") 69 | 70 | 71 | @pytest.mark.unit 72 | def test_get_configured_chains(monkeypatch): 73 | monkeypatch.setenv("NETWORK_URL", '{"3": "http://127.0.0.1:8545", "15": "fifteen"}') 74 | assert get_configured_chains() == [3, 15] 75 | 76 | monkeypatch.setenv("NETWORK_URL", "http://127.0.0.1:8545") 77 | assert get_configured_chains() == [8996] 78 | 79 | monkeypatch.delenv("NETWORK_URL") 80 | with pytest.raises(Exception, match="No chains configured"): 81 | get_configured_chains() 82 | 83 | 84 | @pytest.mark.unit 85 | def test_get_value_from_decoded_env(monkeypatch): 86 | monkeypatch.setenv("SOME_ENV", '{"3": "three", "15": "fifteen"}') 87 | assert get_value_from_decoded_env(3, "SOME_ENV") == "three" 88 | 89 | with pytest.raises(Exception, match="Unconfigured chain_id"): 90 | get_value_from_decoded_env(7, "SOME_ENV") 91 | 92 | with pytest.raises(Exception, match="Unconfigured chain_id"): 93 | get_value_from_decoded_env(None, "SOME_ENV") 94 | 95 | monkeypatch.setenv("SOME_ENV", "simple string") 96 | assert get_value_from_decoded_env(3, "SOME_ENV") == "simple string" 97 | 98 | 99 | @pytest.mark.unit 100 | def test_get_provider_addresses(monkeypatch): 101 | monkeypatch.setenv("NETWORK_URL", '{"3": "http://127.0.0.1:8545"}') 102 | monkeypatch.setenv( 103 | "PROVIDER_PRIVATE_KEY", 104 | '{"3": "0xfd5c1ccea015b6d663618850824154a3b3fb2882c46cefb05b9a93fea8c3d215"}', 105 | ) 106 | assert 3 in get_provider_addresses() 107 | 108 | monkeypatch.setenv("NETWORK_URL", "http://127.0.0.1:8545") 109 | monkeypatch.setenv( 110 | "PROVIDER_PRIVATE_KEY", 111 | "0xfd5c1ccea015b6d663618850824154a3b3fb2882c46cefb05b9a93fea8c3d215", 112 | ) 113 | assert 8996 in get_provider_addresses() 114 | 115 | monkeypatch.setenv("NETWORK_URL", '{"3": "http://127.0.0.1:8545"}') 116 | monkeypatch.setenv( 117 | "PROVIDER_PRIVATE_KEY", 118 | "0xfd5c1ccea015b6d663618850824154a3b3fb2882c46cefb05b9a93fea8c3d215", 119 | ) 120 | with pytest.raises(Exception, match="must both be single or both json encoded"): 121 | get_provider_addresses() 122 | 123 | monkeypatch.setenv( 124 | "PROVIDER_PRIVATE_KEY", 125 | '{"3": "0xfd5c1ccea015b6d663618850824154a3b3fb2882c46cefb05b9a93fea8c3d215"}', 126 | ) 127 | monkeypatch.setenv("NETWORK_URL", "http://127.0.0.1:8545") 128 | with pytest.raises(Exception, match="must both be single or both json encoded"): 129 | get_provider_addresses() 130 | 131 | 132 | @pytest.mark.unit 133 | def test_get_provider_private_key(monkeypatch): 134 | monkeypatch.delenv("UNIVERSAL_PRIVATE_KEY") 135 | monkeypatch.setenv( 136 | "PROVIDER_PRIVATE_KEY", 137 | '{"3": "0xfd5c1ccea015b6d663618850824154a3b3fb2882c46cefb05b9a93fea8c3d215"}', 138 | ) 139 | assert get_provider_private_key(3).startswith("0xfd5c1") 140 | 141 | with pytest.raises( 142 | Exception, 143 | match="Must define UNIVERSAL_PRIVATE_KEY or a single PROVIDER_PRIVATE_KEY.", 144 | ): 145 | get_provider_private_key(None, use_universal_key=True) 146 | 147 | monkeypatch.setenv( 148 | "PROVIDER_PRIVATE_KEY", 149 | "0xfd5c1ccea015b6d663618850824154a3b3fb2882c46cefb05b9a93fea8c3d215", 150 | ) 151 | assert get_provider_private_key(8996).startswith("0xfd5c1") 152 | 153 | monkeypatch.delenv("PROVIDER_PRIVATE_KEY") 154 | monkeypatch.setenv( 155 | "UNIVERSAL_PRIVATE_KEY", 156 | "0xfd5c1ccea015b6d663618850824154a3b3fb2882c46cefb05b9a93fea8c3d215", 157 | ) 158 | assert get_provider_private_key(None, use_universal_key=True).startswith("0xfd5c1") 159 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 5 | [![banner](https://raw.githubusercontent.com/oceanprotocol/art/master/github/repo-banner%402x.png)](https://oceanprotocol.com) 6 | 7 | [![Maintainability](https://api.codeclimate.com/v1/badges/6f5987cfdd2fd265047b/maintainability)](https://codeclimate.com/github/oceanprotocol/provider/maintainability) 8 | [![Test Coverage](https://api.codeclimate.com/v1/badges/6f5987cfdd2fd265047b/test_coverage)](https://codeclimate.com/github/oceanprotocol/provider/test_coverage) 9 | [![GitHub contributors](https://img.shields.io/github/contributors/oceanprotocol/provider.svg)](https://github.com/oceanprotocol/provider/graphs/contributors) 10 | 11 | # Provider 12 | REST API for provider of data services 13 | 14 | This is part of the Ocean Protocol stack. 15 | 16 | This is feature complete and is a BETA version. 17 | 18 | ## What Provider does: 19 | 20 | - The only component that can access your data 21 | - Performs checks on chain for buyer permissions and payments 22 | - Encrypts the URL and metadata during publish 23 | - Decrypts the URL when the dataset is downloaded or a compute job is started 24 | - Provides access to data assets by streaming data (and never the URL) 25 | - Provides compute services (connects to C2D environment) 26 | - Typically run by the Data Provider 27 | 28 | ### Technology used: 29 | 30 | Python Flask HTTP server 31 | 32 | ## Starting the server locally 33 | 34 | ### Quick start 35 | Uses the rinkeby network with a remote metadatastore instance running at https://aquarius.marketplace.dev-ocean.com 36 | 37 | ```bash 38 | git clone git@github.com:oceanprotocol/provider.git 39 | cd provider/ 40 | 41 | python3 -m venv venv 42 | source venv/bin/activate 43 | 44 | pip install -r requirements_dev.txt 45 | cp .env.example .env 46 | 47 | flask run --port=8030 48 | 49 | ``` 50 | 51 | ### Detailed steps 52 | 53 | #### 1. Clone the repo 54 | ```bash 55 | git clone git@github.com:oceanprotocol/provider.git 56 | cd provider/ 57 | ``` 58 | 59 | #### 2. Virtual env (optional) 60 | Before running it locally we recommend to set up virtual environment: 61 | 62 | ```bash 63 | virtualenv venv -p python3.8 64 | # OR: python -m venv venv 65 | source venv/bin/activate 66 | ``` 67 | 68 | #### 3. Requirements 69 | 70 | Install all the requirements: 71 | 72 | ``` 73 | pip install -r requirements_dev.txt 74 | ``` 75 | 76 | #### 4. Dependencies 77 | 78 | *Metadata store (Aquarius).* Do one of the following: 79 | * Run Aquarius locally, see https://github.com/oceanprotocol/aquarius 80 | * Point to a remote instance such as `https://aquarius.marketplace.dev-ocean.com`. 81 | In this case replace the `aquarius.url` option in the `config.ini` file with the appropriate URL. 82 | 83 | 84 | *Ethereum network.* Do one of the following: 85 | * Run ganache-cli 86 | * Point to rinkeby testnet or any other ethereum network 87 | 88 | Make sure that ocean contracts (https://github.com/oceanprotocol/contracts) are deployed to the your network of choice. 89 | Update the `network` option in the `config.ini` file with the proper network URL. For now it must be a URL, a simple network name (e.g. mainnet) will be supported in the future. 90 | 91 | #### 5. Start the provider server 92 | Add the corresponding environment variables in your `.env` file. Here is an example: 93 | 94 | ``` 95 | FLASK_APP=ocean_provider/run.py 96 | PROVIDER_ADDRESS=your ethereum address goes here 97 | PROVIDER_PRIVATE_KEY= the private key or string containing a dict of chain_id to private key pairs 98 | PROVIDER_FEE_TOKEN = the address of ERC20 token used to get fees, or string containing a dict of chain_id to token address pairs 99 | ``` 100 | 101 | You might also want to set `FLASK_ENV=development`. Then run ```flask run --port=8030``` 102 | 103 | Refer to the [API.md](API.md) file for endpoints and payloads. 104 | 105 | ##### Environment variables 106 | * `REQUEST_RETRIES` defines the number of times file downloads are tried, accounting got network glitches and connectivity issues. Defaults to 1 (one trial, meaning no retries). 107 | * `RBAC_SERVER_URL` defines the URL to the RBAC permissions server. Defaults to None (no special permissions). 108 | * `PRIVATE_PROVIDER` if set, adds a "providerAccess": "private" key-value pair to RBAC requests 109 | * `REDIS_CONNECTION` defines a connection URL to Redis. Defaults to None (no Redis connection, SQLite database is used instead) 110 | * `TEST_PRIVATE_KEY1` and `TEST_PRIVATE_KEY2` are private wallet keys for publisher and consumer tests. 111 | * `OPERATOR_SERVICE_URL` defines connection to C2D 112 | * `LOG_CFG` and `LOG_LEVEL` define the location of the log file and logging leve, respectively 113 | * `IPFS_GATEWAY` defines ipfs gateway for resolving urls 114 | * `ARWEAVE_GATEWAY` defines arweave gateway for resolving arweave transaction ids. 115 | * `AUTHORIZED_DECRYPTERS` list of authorized addresses that are allowed to decrypt chain data. Use it to restrict access only to certain callers (e.g. custom Aquarius instance). Empty by default, meaning all decrypters are authorized. 116 | * `USE_CHAIN_PROOF` or `USE_HTTP_PROOF` set a mechanism for saving proof-of-download information. For any present true-ish value of `USE_CHAIN_PROOF`, the proof is sent on-chain. When defining `USE_HTTP_PROOF` the env var must configure a HTTP endpoint that accepts a POST request. 117 | * `MAX_CHECKSUM_LENGTH` define the maximum length for a file if checksum is required. If file size is greater, we do not compute checksum 118 | 119 | 120 | #### Before you commit 121 | If you are a contributor, make sure you install the pre-commit hooks using the command `pre-commit install`. This will make sure your imports are sorted and your code is properly formatted before committing. We use `black`, `isort` and `flake8` to keep code clean. 122 | 123 | Licensing your commits is also available: use the command `licenseheaders -t .copyright.tmpl -x venv` (or replace "venv" with your local virtual environment path). This option is not available as a precommit since it takes longer. 124 | 125 | #### Versioning and releases 126 | To release a new version of Provider, please follow the steps decribed in [the release process](release-process.md) 127 | -------------------------------------------------------------------------------- /tests/resources/branin.arff: -------------------------------------------------------------------------------- 1 | % 1. Title: Branin Function 2 | % 3. Number of instances: 225 3 | % 6. Number of attributes: 2 4 | 5 | @relation branin 6 | 7 | @attribute 'x0' numeric 8 | @attribute 'x1' numeric 9 | @attribute 'y' numeric 10 | 11 | @data 12 | -5.0000,0.0000,308.1291 13 | -3.9286,0.0000,206.1783 14 | -2.8571,0.0000,135.3867 15 | -1.7857,0.0000,93.5886 16 | -0.7143,0.0000,69.1343 17 | 0.3571,0.0000,48.6776 18 | 1.4286,0.0000,27.2812 19 | 2.5000,0.0000,10.3079 20 | 3.5714,0.0000,5.1273 21 | 4.6429,0.0000,11.2800 22 | 5.7143,0.0000,19.3524 23 | 6.7857,0.0000,19.7343 24 | 7.8571,0.0000,12.1309 25 | 8.9286,0.0000,5.9167 26 | 10.0000,0.0000,10.9609 27 | -5.0000,1.0714,272.4470 28 | -3.9286,1.0714,176.7984 29 | -2.8571,1.0714,111.6735 30 | -1.7857,1.0714,74.9065 31 | -0.7143,1.0714,54.8479 32 | 0.3571,1.0714,38.1512 33 | 1.4286,1.0714,19.8791 34 | 2.5000,1.0714,5.3947 35 | 3.5714,1.0714,2.0674 36 | 4.6429,1.0714,9.4378 37 | 5.7143,1.0714,18.0925 38 | 6.7857,1.0714,18.4209 39 | 7.8571,1.0714,10.1286 40 | 8.9286,1.0714,2.5899 41 | 10.0000,1.0714,5.6739 42 | -5.0000,2.1429,239.0608 43 | -3.9286,2.1429,149.7145 44 | -2.8571,2.1429,90.2563 45 | -1.7857,2.1429,58.5204 46 | -0.7143,2.1429,42.8573 47 | 0.3571,2.1429,29.9206 48 | 1.4286,2.1429,14.7730 49 | 2.5000,2.1429,2.7775 50 | 3.5714,2.1429,1.3035 51 | 4.6429,2.1429,9.8916 52 | 5.7143,2.1429,19.1284 53 | 6.7857,2.1429,19.4035 54 | 7.8571,2.1429,10.4222 55 | 8.9286,2.1429,1.5589 56 | 10.0000,2.1429,2.6829 57 | -5.0000,3.2143,207.9706 58 | -3.9286,3.2143,124.9265 59 | -2.8571,3.2143,71.1350 60 | -1.7857,3.2143,44.4302 61 | -0.7143,3.2143,33.1627 62 | 0.3571,3.2143,23.9860 63 | 1.4286,3.2143,11.9628 64 | 2.5000,3.2143,2.4561 65 | 3.5714,3.2143,2.8354 66 | 4.6429,3.2143,12.6413 67 | 5.7143,3.2143,22.4603 68 | 6.7857,3.2143,22.6820 69 | 7.8571,3.2143,13.0117 70 | 8.9286,3.2143,2.8239 71 | 10.0000,3.2143,1.9878 72 | -5.0000,4.2857,179.1762 73 | -3.9286,4.2857,102.4344 74 | -2.8571,4.2857,54.3096 75 | -1.7857,4.2857,32.6360 76 | -0.7143,4.2857,25.7640 77 | 0.3571,4.2857,20.3473 78 | 1.4286,4.2857,11.4485 79 | 2.5000,4.2857,4.4307 80 | 3.5714,4.2857,6.6633 81 | 4.6429,4.2857,17.6869 82 | 5.7143,4.2857,28.0880 83 | 6.7857,4.2857,28.2563 84 | 7.8571,4.2857,17.8971 85 | 8.9286,4.2857,6.3848 86 | 10.0000,4.2857,3.5886 87 | -5.0000,5.3571,152.6778 88 | -3.9286,5.3571,82.2383 89 | -2.8571,5.3571,39.7801 90 | -1.7857,5.3571,23.1376 91 | -0.7143,5.3571,20.6612 92 | 0.3571,5.3571,19.0045 93 | 1.4286,5.3571,13.2301 94 | 2.5000,5.3571,8.7012 95 | 3.5714,5.3571,12.7871 96 | 4.6429,5.3571,25.0284 97 | 5.7143,5.3571,36.0118 98 | 6.7857,5.3571,36.1266 99 | 7.8571,5.3571,25.0785 100 | 8.9286,5.3571,12.2416 101 | 10.0000,5.3571,7.4853 102 | -5.0000,6.4286,128.4753 103 | -3.9286,6.4286,64.3380 104 | -2.8571,6.4286,27.5466 105 | -1.7857,6.4286,15.9352 106 | -0.7143,6.4286,17.8543 107 | 0.3571,6.4286,19.9576 108 | 1.4286,6.4286,17.3077 109 | 2.5000,6.4286,15.2676 110 | 3.5714,6.4286,21.2068 111 | 4.6429,6.4286,34.6659 112 | 5.7143,6.4286,46.2314 113 | 6.7857,6.4286,46.2929 114 | 7.8571,6.4286,34.5557 115 | 8.9286,6.4286,20.3944 116 | 10.0000,6.4286,13.6780 117 | -5.0000,7.5000,106.5687 118 | -3.9286,7.5000,48.7337 119 | -2.8571,7.5000,17.6089 120 | -1.7857,7.5000,11.0287 121 | -0.7143,7.5000,17.3434 122 | 0.3571,7.5000,23.2066 123 | 1.4286,7.5000,23.6812 124 | 2.5000,7.5000,24.1300 125 | 3.5714,7.5000,31.9225 126 | 4.6429,7.5000,46.5993 127 | 5.7143,7.5000,58.7469 128 | 6.7857,7.5000,58.7550 129 | 7.8571,7.5000,46.3289 130 | 8.9286,7.5000,30.8430 131 | 10.0000,7.5000,22.1665 132 | -5.0000,8.5714,86.9580 133 | -3.9286,8.5714,35.4253 134 | -2.8571,8.5714,9.9672 135 | -1.7857,8.5714,8.4181 136 | -0.7143,8.5714,19.1283 137 | 0.3571,8.5714,28.7516 138 | 1.4286,8.5714,32.3505 139 | 2.5000,8.5714,35.2882 140 | 3.5714,8.5714,44.9340 141 | 4.6429,8.5714,60.8286 142 | 5.7143,8.5714,73.5584 143 | 6.7857,8.5714,73.5131 144 | 7.8571,8.5714,60.3980 145 | 8.9286,8.5714,43.5876 146 | 10.0000,8.5714,32.9510 147 | -5.0000,9.6429,69.6433 148 | -3.9286,9.6429,24.4128 149 | -2.8571,9.6429,4.6214 150 | -1.7857,9.6429,8.1034 151 | -0.7143,9.6429,23.2092 152 | 0.3571,9.6429,36.5925 153 | 1.4286,9.6429,43.3159 154 | 2.5000,9.6429,48.7424 155 | 3.5714,9.6429,60.2415 156 | 4.6429,9.6429,77.3538 157 | 5.7143,9.6429,90.6657 158 | 6.7857,9.6429,90.5670 159 | 7.8571,9.6429,76.7630 160 | 8.9286,9.6429,58.6281 161 | 10.0000,9.6429,46.0314 162 | -5.0000,10.7143,54.6244 163 | -3.9286,10.7143,15.6962 164 | -2.8571,10.7143,1.5715 165 | -1.7857,10.7143,10.0846 166 | -0.7143,10.7143,29.5860 167 | 0.3571,10.7143,46.7293 168 | 1.4286,10.7143,56.5771 169 | 2.5000,10.7143,64.4925 170 | 3.5714,10.7143,77.8449 171 | 4.6429,10.7143,96.1749 172 | 5.7143,10.7143,110.0690 173 | 6.7857,10.7143,109.9169 174 | 7.8571,10.7143,95.4240 175 | 8.9286,10.7143,75.9645 176 | 10.0000,10.7143,61.4077 177 | -5.0000,11.7857,41.9015 178 | -3.9286,11.7857,9.2755 179 | -2.8571,11.7857,0.8175 180 | -1.7857,11.7857,14.3618 181 | -0.7143,11.7857,38.2587 182 | 0.3571,11.7857,59.1620 183 | 1.4286,11.7857,72.1342 184 | 2.5000,11.7857,82.5385 185 | 3.5714,11.7857,97.7442 186 | 4.6429,11.7857,117.2919 187 | 5.7143,11.7857,131.7682 188 | 6.7857,11.7857,131.5628 189 | 7.8571,11.7857,116.3808 190 | 8.9286,11.7857,95.5968 191 | 10.0000,11.7857,79.0800 192 | -5.0000,12.8571,31.4745 193 | -3.9286,12.8571,5.1508 194 | -2.8571,12.8571,2.3595 195 | -1.7857,12.8571,20.9349 196 | -0.7143,12.8571,49.2274 197 | 0.3571,12.8571,73.8906 198 | 1.4286,12.8571,89.9873 199 | 2.5000,12.8571,102.8804 200 | 3.5714,12.8571,119.9394 201 | 4.6429,12.8571,140.7049 202 | 5.7143,12.8571,155.7634 203 | 6.7857,12.8571,155.5045 204 | 7.8571,12.8571,139.6336 205 | 8.9286,12.8571,117.5251 206 | 10.0000,12.8571,99.0481 207 | -5.0000,13.9286,23.3435 208 | -3.9286,13.9286,3.3220 209 | -2.8571,13.9286,6.1974 210 | -1.7857,13.9286,29.8039 211 | -0.7143,13.9286,62.4919 212 | 0.3571,13.9286,90.9152 213 | 1.4286,13.9286,110.1363 214 | 2.5000,13.9286,125.5183 215 | 3.5714,13.9286,144.4305 216 | 4.6429,13.9286,166.4138 217 | 5.7143,13.9286,182.0544 218 | 6.7857,13.9286,181.7421 219 | 7.8571,13.9286,165.1823 220 | 8.9286,13.9286,141.7492 221 | 10.0000,13.9286,121.3122 222 | -5.0000,15.0000,17.5083 223 | -3.9286,15.0000,3.7891 224 | -2.8571,15.0000,12.3311 225 | -1.7857,15.0000,40.9688 226 | -0.7143,15.0000,78.0524 227 | 0.3571,15.0000,110.2356 228 | 1.4286,15.0000,132.5812 229 | 2.5000,15.0000,150.4520 230 | 3.5714,15.0000,171.2176 231 | 4.6429,15.0000,194.4186 232 | 5.7143,15.0000,210.6414 233 | 6.7857,15.0000,210.2757 234 | 7.8571,15.0000,193.0269 235 | 8.9286,15.0000,168.2693 236 | 10.0000,15.0000,145.8722 -------------------------------------------------------------------------------- /ocean_provider/utils/provider_fees.py: -------------------------------------------------------------------------------- 1 | import json 2 | import logging 3 | from datetime import datetime, timezone 4 | from typing import Any, Dict 5 | 6 | import requests 7 | from eth_keys import KeyAPI 8 | from eth_keys.backends import NativeECCBackend 9 | from ocean_provider.requests_session import get_requests_session 10 | from ocean_provider.utils.address import get_provider_fee_token 11 | from ocean_provider.utils.asset import get_asset_from_metadatastore 12 | from ocean_provider.utils.basics import ( 13 | get_metadata_url, 14 | get_provider_wallet, 15 | get_web3, 16 | get_network_name, 17 | ) 18 | from ocean_provider.utils.compute_environments import ( 19 | get_c2d_environments, 20 | get_environment, 21 | ) 22 | from ocean_provider.utils.currency import parse_units 23 | from ocean_provider.utils.datatoken import get_datatoken_contract, validate_order 24 | from ocean_provider.utils.services import Service 25 | from ocean_provider.utils.url import is_this_same_provider 26 | from web3.main import Web3 27 | 28 | logger = logging.getLogger(__name__) 29 | keys = KeyAPI(NativeECCBackend) 30 | requests_session = get_requests_session() 31 | 32 | 33 | def get_provider_fees( 34 | asset, 35 | service: Service, 36 | consumer_address: str, 37 | valid_until: int, 38 | compute_env: str = None, 39 | force_zero: bool = False, 40 | ) -> Dict[str, Any]: 41 | provider_wallet = get_provider_wallet(asset.chain_id) 42 | provider_fee_address = provider_wallet.address 43 | provider_fee_token = get_provider_fee_token(asset.chain_id) 44 | 45 | web3 = get_web3(asset.chain_id) 46 | 47 | if compute_env and not force_zero: 48 | provider_fee_amount = get_provider_fee_amount( 49 | valid_until, compute_env, web3, provider_fee_token 50 | ) 51 | else: 52 | provider_fee_amount = 0 53 | 54 | provider_data = json.dumps( 55 | { 56 | "environment": compute_env, 57 | "timestamp": datetime.now(timezone.utc).timestamp(), 58 | "dt": service.datatoken_address, 59 | "id": service.id, 60 | }, 61 | separators=(",", ":"), 62 | ) 63 | message_hash = Web3.solidityKeccak( 64 | ["bytes", "address", "address", "uint256", "uint256"], 65 | [ 66 | Web3.toHex(Web3.toBytes(text=provider_data)), 67 | Web3.toChecksumAddress(provider_fee_address), 68 | Web3.toChecksumAddress(provider_fee_token), 69 | provider_fee_amount, 70 | valid_until, 71 | ], 72 | ) 73 | 74 | pk = keys.PrivateKey(provider_wallet.key) 75 | prefix = "\x19Ethereum Signed Message:\n32" 76 | signable_hash = Web3.solidityKeccak( 77 | ["bytes", "bytes"], [Web3.toBytes(text=prefix), Web3.toBytes(message_hash)] 78 | ) 79 | signed = keys.ecdsa_sign(message_hash=signable_hash, private_key=pk) 80 | 81 | provider_fee = { 82 | "providerFeeAddress": provider_fee_address, 83 | "providerFeeToken": provider_fee_token, 84 | "providerFeeAmount": provider_fee_amount, 85 | "providerData": Web3.toHex(Web3.toBytes(text=provider_data)), 86 | # make it compatible with last openzepellin https://github.com/OpenZeppelin/openzeppelin-contracts/pull/1622 87 | "v": (signed.v + 27) if signed.v <= 1 else signed.v, 88 | "r": Web3.toHex(Web3.toBytes(signed.r).rjust(32, b"\0")), 89 | "s": Web3.toHex(Web3.toBytes(signed.s).rjust(32, b"\0")), 90 | "validUntil": valid_until, 91 | } 92 | network_name = get_network_name(asset.chain_id) 93 | logger.debug(f"Provider {network_name}: Returning provider_fees: {provider_fee}") 94 | return provider_fee 95 | 96 | 97 | def comb_for_valid_transfer_and_fees(all_datasets, compute_env): 98 | for i, dataset in enumerate(all_datasets): 99 | if "transferTxId" not in dataset: 100 | continue 101 | 102 | asset = get_asset_from_metadatastore( 103 | get_metadata_url(), dataset.get("documentId") 104 | ) 105 | service = asset.get_service_by_id(dataset["serviceId"]) 106 | web3 = get_web3(asset.chain_id) 107 | 108 | try: 109 | _tx, _order_log, _provider_fees_log, start_order_tx_id = validate_order( 110 | web3, 111 | dataset["consumerAddress"], 112 | dataset["transferTxId"], 113 | asset, 114 | service, 115 | {"environment": compute_env}, 116 | allow_expired_provider_fees=False, 117 | ) 118 | except Exception: 119 | # order does not exist or is expired, so we need new provider fees 120 | continue 121 | 122 | return i 123 | 124 | return 0 125 | 126 | 127 | def get_provider_fees_or_remote( 128 | asset, service, consumer_address, valid_until, compute_env, force_zero, dataset 129 | ): 130 | valid_order = None 131 | if "transferTxId" in dataset: 132 | web3 = get_web3(asset.chain_id) 133 | try: 134 | _tx, _order_log, _provider_fees_log, start_order_tx_id = validate_order( 135 | web3, 136 | consumer_address, 137 | dataset["transferTxId"], 138 | asset, 139 | service, 140 | {"environment": compute_env}, 141 | allow_expired_provider_fees=True, 142 | ) 143 | log_valid_until = _provider_fees_log.args.validUntil 144 | if valid_until <= log_valid_until: 145 | # already paid provider fees and both order and provider fees are still valid 146 | return {"validOrder": "" + start_order_tx_id.hex()} 147 | else: 148 | valid_order = "" + start_order_tx_id.hex() 149 | except Exception: 150 | # order does not exist or is expired, so we need new provider fees 151 | pass 152 | if is_this_same_provider(service.service_endpoint, asset.chain_id): 153 | provider_fee = get_provider_fees( 154 | asset, 155 | service, 156 | consumer_address, 157 | valid_until, 158 | compute_env, 159 | force_zero=force_zero, 160 | ) 161 | if provider_fee: 162 | provider_fee["providerFeeAmount"] = str(provider_fee["providerFeeAmount"]) 163 | result = {"datatoken": service.datatoken_address, "providerFee": provider_fee} 164 | else: 165 | # delegate to different provider 166 | response = requests.get( 167 | service.service_endpoint + "/api/services/initialize", params=dataset 168 | ) 169 | 170 | result = response.json() 171 | 172 | if valid_order: 173 | result["validOrder"] = valid_order 174 | 175 | return result 176 | 177 | 178 | def get_provider_fee_amount(valid_until, compute_env, web3, provider_fee_token): 179 | seconds = ( 180 | datetime.fromtimestamp(valid_until, timezone.utc) - datetime.now(timezone.utc) 181 | ).seconds 182 | env = get_environment(get_c2d_environments(flat=True), compute_env) 183 | 184 | if provider_fee_token == "0x0000000000000000000000000000000000000000": 185 | return 0 186 | 187 | provider_fee_amount = float(seconds * float(env["priceMin"]) / 60) 188 | 189 | dt = get_datatoken_contract(web3, provider_fee_token) 190 | decimals = dt.caller.decimals() 191 | 192 | return parse_units(str(provider_fee_amount), decimals) 193 | --------------------------------------------------------------------------------