├── .DS_Store
├── .gitignore
├── .gitmodules
├── 0.to-update
├── IntegrationTests
│ ├── BigUpload
│ │ ├── Dockerfile
│ │ ├── build.sh
│ │ └── generateBigFile.sh
│ ├── DoNothingOneMinute
│ │ ├── Dockerfile
│ │ ├── build.sh
│ │ └── doNothingForOneMinute.sh
│ ├── NonDeterminist
│ │ ├── Dockerfile
│ │ ├── build.sh
│ │ └── generateNonDeterminist.sh
│ └── SgxApp
│ │ ├── build.sh
│ │ ├── dataset.enc.zip
│ │ ├── dataset.secret
│ │ └── dataset.txt
├── MemeGenerator
│ ├── Dockerfile
│ ├── README.md
│ ├── build.sh
│ ├── entrypoint.sh
│ ├── ressources
│ │ └── impact.ttf
│ └── src
│ │ ├── base.css
│ │ ├── base.html
│ │ └── memegenerator.py
├── R-Clifford-Attractors
│ ├── Dockerfile
│ └── build.sh
├── SudokuCLI
│ ├── Dockerfile
│ ├── build.sh
│ └── sudokuCLI.py
├── WebNotary
│ ├── Dockerfile
│ ├── build.sh
│ └── entrypoint.sh
├── Wordcloud
│ └── Dockerfile
├── anemometer
│ ├── .DS_Store
│ ├── app
│ │ ├── .DS_Store
│ │ ├── Dockerfile
│ │ ├── anemometer.js
│ │ └── entrypoint.sh
│ ├── chain.json
│ ├── deployed.json
│ ├── iexec.json
│ ├── orders.json
│ └── smart-contract
│ │ ├── .DS_Store
│ │ ├── .gitignore
│ │ ├── README.md
│ │ ├── contracts
│ │ ├── AnemometerOracle.sol
│ │ └── Migrations.sol
│ │ ├── daemon
│ │ ├── Dockerfile
│ │ ├── daemon.ts
│ │ ├── docker-compose.yml
│ │ ├── entrypoint.sh
│ │ ├── launch.ts
│ │ ├── package.json
│ │ └── utils
│ │ │ ├── addrToKey.ts
│ │ │ ├── index.ts
│ │ │ └── require.ts
│ │ ├── migrations
│ │ ├── 1_initial_migration.js
│ │ └── 2_deploy_contracts.js
│ │ ├── package.json
│ │ └── truffle.js
├── blender-images
│ ├── Dockerfile
│ ├── README.md
│ ├── build.sh
│ └── script.sh
├── blur-face
│ ├── Dockerfile
│ ├── README.md
│ ├── blurFace.py
│ ├── build.sh
│ └── script.sh
├── curl
│ └── Dockerfile
├── erazhu31
│ └── SoccerLiveScores
│ │ ├── Dockerfile
│ │ ├── smart-contract
│ │ ├── .gitignore
│ │ ├── README.md
│ │ ├── contracts
│ │ │ ├── LiveScoreOracle.sol
│ │ │ └── Migrations.sol
│ │ ├── daemon
│ │ │ ├── Dockerfile
│ │ │ ├── daemon.ts
│ │ │ ├── docker-compose.yml
│ │ │ ├── entrypoint.sh
│ │ │ ├── launch.ts
│ │ │ ├── package.json
│ │ │ └── utils
│ │ │ │ ├── addrToKey.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── require.ts
│ │ ├── migrations
│ │ │ ├── 1_initial_migration.js
│ │ │ └── 2_deploy_contracts.js
│ │ ├── package.json
│ │ └── truffle-config.js
│ │ └── src
│ │ └── oracle.js
├── face-swap
│ ├── Dockerfile
│ └── entrypoint.sh
├── factorial
│ ├── Dockerfile
│ └── factorial.py
├── find-face
│ ├── Dockerfile
│ ├── README.md
│ ├── build.sh
│ ├── findFace.py
│ └── script.sh
├── j48-bitcoin-tx-doracle
│ ├── app
│ │ ├── Dockerfile
│ │ ├── README.md
│ │ ├── build.sh
│ │ └── src
│ │ │ └── doracle.js
│ └── smart-contract
│ │ ├── contracts
│ │ ├── BitcoinTxDoracle.sol
│ │ └── Migrations.sol
│ │ ├── migrations
│ │ ├── 1_initial_migration.js
│ │ └── 2_deploy_contracts.js
│ │ ├── package.json
│ │ └── truffle.js
├── ndmg
│ ├── Dockerfile
│ └── demo-ndmg-on-iexec.sh
├── option-pricing
│ ├── Dockerfile
│ └── option-pricing.py
├── oyente
│ ├── Dockerfile
│ └── customScript.sh
├── package-tracker-oracle
│ ├── app
│ │ ├── .gitignore
│ │ ├── Dockerfile
│ │ ├── entrypoint.sh
│ │ ├── package-lock.json
│ │ ├── package-tracker.js
│ │ └── package.json
│ ├── chain.json
│ ├── deployed.json
│ ├── iexec.json
│ ├── orders.json
│ └── smart-contract
│ │ ├── .gitignore
│ │ ├── build
│ │ └── contracts
│ │ │ ├── IERC1271.json
│ │ │ ├── IERC20.json
│ │ │ ├── IERC734.json
│ │ │ ├── IOracle.json
│ │ │ ├── IOracleConsumer.json
│ │ │ ├── IexecClerkInterface.json
│ │ │ ├── IexecDoracle.json
│ │ │ ├── IexecHubInterface.json
│ │ │ ├── IexecInterface.json
│ │ │ ├── IexecODBLibCore.json
│ │ │ ├── IexecODBLibOrders.json
│ │ │ ├── LaPosteOracle.json
│ │ │ ├── Migrations.json
│ │ │ ├── Ownable.json
│ │ │ └── SignatureVerifier.json
│ │ ├── contracts
│ │ ├── LaPosteOracle.sol
│ │ └── Migrations.sol
│ │ ├── migrations
│ │ ├── 1_initial_migration.js
│ │ └── 2_deploy_contracts.js
│ │ ├── package-lock.json
│ │ ├── package.json
│ │ └── truffle.js
├── param-checker
│ ├── Dockerfile
│ └── test.js
├── pengiundev-soccerapp
│ ├── contract
│ │ └── PriceOracle.sol
│ └── offchain
│ │ ├── Dockerfile
│ │ └── src
│ │ └── index.js
├── randomGenerator
│ ├── Dockerfile
│ ├── Dockerfile-unsafe
│ ├── build.sh
│ ├── metadata.txt
│ ├── signer
│ │ └── signer.py
│ └── src
│ │ └── app.py
├── scinumpy
│ ├── Dockerfile
│ └── build.sh
├── snap
│ ├── Dockerfile
│ ├── docker-run-test.sh
│ └── processDataset.sh
├── vanitygen
│ ├── Dockerfile
│ └── vanity-with-consensus.sh
├── windy-feed
│ ├── .gitignore
│ ├── README.md
│ ├── app
│ │ ├── Dockerfile
│ │ ├── entrypoint.sh
│ │ └── wind-feed.js
│ ├── chain.json
│ ├── deployed.json
│ ├── iexec.json
│ ├── orders.json
│ └── smart-contract
│ │ ├── contracts
│ │ ├── Migrations.sol
│ │ └── WindOracle.sol
│ │ ├── daemon
│ │ ├── Dockerfile
│ │ ├── daemon.ts
│ │ ├── docker-compose.yml
│ │ ├── entrypoint.sh
│ │ ├── launch.ts
│ │ ├── package-lock.json
│ │ ├── package.json
│ │ └── utils
│ │ │ ├── addrToKey.ts
│ │ │ ├── index.ts
│ │ │ └── require.ts
│ │ ├── migrations
│ │ ├── 1_initial_migration.js
│ │ └── 2_deploy_contracts.js
│ │ ├── package-lock.json
│ │ ├── package.json
│ │ └── truffle-config.js
└── xmrig
│ ├── Dockerfile
│ └── entrypoint.sh
├── README.md
├── cloud-computing
├── ffmpeg
│ ├── Dockerfile
│ ├── README.md
│ └── ffmpegPoCoReady.sh
├── gnuplot
│ ├── Dockerfile
│ ├── README.md
│ ├── build.sh
│ ├── gnuplotPoCoReady.sh
│ └── gnuplot_sin_and_log.gp
├── nodejs-hello-world
│ ├── .gitignore
│ ├── Readme.md
│ ├── src
│ │ └── app.js
│ ├── standard
│ │ ├── Dockerfile
│ │ ├── build
│ │ └── run
│ └── tee
│ │ ├── Dockerfile
│ │ ├── build
│ │ ├── confidential-assets
│ │ └── confidential-asset.txt
│ │ ├── protect-fs.sh
│ │ └── run
├── python-hello-world
│ ├── Readme.md
│ ├── src
│ │ └── app.py
│ ├── standard
│ │ ├── Dockerfile
│ │ ├── build
│ │ └── run
│ └── tee
│ │ ├── Dockerfile
│ │ ├── build
│ │ ├── confidential-assets
│ │ └── confidential-asset.txt
│ │ ├── protect-fs.sh
│ │ └── run
└── vanityeth
│ ├── Dockerfile
│ └── vanityeth-with-consensus.sh
└── offchain-computing
├── offchain-python-hello-world
├── Readme.md
├── src
│ └── app.py
├── standard
│ ├── Dockerfile
│ ├── build
│ └── run
└── tee
│ ├── Dockerfile
│ ├── build
│ ├── confidential-assets
│ └── confidential-asset.txt
│ ├── protect-fs.sh
│ └── run
└── offchain-tee-kaiko-pricefeed
├── README.md
├── src
└── app.py
└── tee
├── Dockerfile
├── README.md
├── build
├── datasets
└── encrypted
│ └── dataset_key.txt.zip
├── protect-fs.sh
└── run
/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/iExecBlockchainComputing/iexec-apps/afb2843ca746eb5c1d537cd247b67bf73ba32e0d/.DS_Store
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | **/iexec_in
2 | **/iexec_out
3 |
4 | .idea
5 |
6 | /offchain-computing/offchain-tee-kaiko-pricefeed/tee/confidential-assets/
7 |
--------------------------------------------------------------------------------
/.gitmodules:
--------------------------------------------------------------------------------
1 | [submodule "cloud-computing/iexec-face-swap"]
2 | path = cloud-computing/iexec-face-swap
3 | url = https://github.com/iExecBlockchainComputing/iexec-face-swap
4 |
--------------------------------------------------------------------------------
/0.to-update/IntegrationTests/BigUpload/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu:16.04
2 | COPY generateBigFile.sh /generateBigFile.sh
3 | RUN chmod +x /generateBigFile.sh
4 | ENTRYPOINT ["/generateBigFile.sh"]
5 |
--------------------------------------------------------------------------------
/0.to-update/IntegrationTests/BigUpload/build.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | docker image build -f Dockerfile -t iexechub/it-bigupload:1.0.0 .
4 |
--------------------------------------------------------------------------------
/0.to-update/IntegrationTests/BigUpload/generateBigFile.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | #vanityDefaultResultFile=Result-log-*.txt
4 | resultFile=/iexec_out/result.txt
5 | consensusFile=/iexec_out/determinism.iexec
6 |
7 | mkdir /iexec_out
8 | rm -f $resultFile $consensusFile
9 |
10 | touch $resultFile
11 | # count = number of Mb of the file
12 | dd if=/dev/urandom of=$resultFile bs=1048576 count=500
13 |
14 | echo "Determinist">> $consensusFile
15 |
--------------------------------------------------------------------------------
/0.to-update/IntegrationTests/DoNothingOneMinute/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu:16.04
2 | COPY doNothingForOneMinute.sh /doNothingForOneMinute.sh
3 | RUN chmod +x /doNothingForOneMinute.sh
4 | ENTRYPOINT ["/doNothingForOneMinute.sh"]
5 |
--------------------------------------------------------------------------------
/0.to-update/IntegrationTests/DoNothingOneMinute/build.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | docker image build -f Dockerfile -t iexechub/it-donothingoneminute .
4 |
--------------------------------------------------------------------------------
/0.to-update/IntegrationTests/DoNothingOneMinute/doNothingForOneMinute.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 |
4 | resultFile=/iexec/result.txt
5 | consensusFile=/iexec/consensus.iexec
6 |
7 | mkdir /iexec
8 | rm -f $resultFile $consensusFile
9 |
10 | touch $resultFile
11 | echo "result" >> $resultFile
12 |
13 | touch $consensusFile
14 | echo "consensus" >> $consensusFile
15 |
16 | sleep 60
17 |
--------------------------------------------------------------------------------
/0.to-update/IntegrationTests/NonDeterminist/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu:16.04
2 | COPY generateNonDeterminist.sh /generateNonDeterminist.sh
3 | RUN chmod +x /generateNonDeterminist.sh
4 | ENTRYPOINT ["/generateNonDeterminist.sh"]
5 |
--------------------------------------------------------------------------------
/0.to-update/IntegrationTests/NonDeterminist/build.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | docker image build -f Dockerfile -t iexechub/it-nondeterminist .
4 |
--------------------------------------------------------------------------------
/0.to-update/IntegrationTests/NonDeterminist/generateNonDeterminist.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | resultFile=/iexec_out/result.txt
4 | consensusFile=/iexec_out/determinism.iexec
5 |
6 | echo "creating folder"
7 | mkdir /iexec_out
8 | rm -f $resultFile $consensusFile
9 |
10 | touch $resultFile
11 | echo $((1 + RANDOM)) >> $resultFile
12 |
13 | touch $consensusFile
14 | echo $((1 + RANDOM)) >> $consensusFile
15 |
16 | echo "done"
17 |
--------------------------------------------------------------------------------
/0.to-update/IntegrationTests/SgxApp/build.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | docker image build -f Dockerfile -t iexechub/sgx-app:it .
4 |
--------------------------------------------------------------------------------
/0.to-update/IntegrationTests/SgxApp/dataset.enc.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/iExecBlockchainComputing/iexec-apps/afb2843ca746eb5c1d537cd247b67bf73ba32e0d/0.to-update/IntegrationTests/SgxApp/dataset.enc.zip
--------------------------------------------------------------------------------
/0.to-update/IntegrationTests/SgxApp/dataset.secret:
--------------------------------------------------------------------------------
1 | f58a3f36cd7095acc90dace3045bbd23c5c5a8f26623d7a0e7016f985f593da8|7a637210b156ba842e6f0b32aa904431
2 |
--------------------------------------------------------------------------------
/0.to-update/IntegrationTests/SgxApp/dataset.txt:
--------------------------------------------------------------------------------
1 | Hello, if you can read this, well done!
2 |
--------------------------------------------------------------------------------
/0.to-update/MemeGenerator/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu:18.04
2 |
3 | RUN apt-get update -y
4 | RUN DEBIAN_FRONTEND=noninteractive apt-get install -y \
5 | python3 \
6 | python3-pip \
7 | ttf-mscorefonts-installer \
8 | xvfb \
9 | wkhtmltopdf \
10 | zip
11 | RUN pip3 install imgkit
12 |
13 | COPY src /src
14 | COPY entrypoint.sh /entrypoint.sh
15 | COPY ressources/*.ttf /usr/share/fonts/truetype/.
16 |
17 | RUN chmod +x /entrypoint.sh
18 | ENTRYPOINT ["/entrypoint.sh"]
19 |
--------------------------------------------------------------------------------
/0.to-update/MemeGenerator/README.md:
--------------------------------------------------------------------------------
1 | docker run \
2 | -v $PWD/iexec_in:/iexec_in \
3 | -v $PWD/iexec_out:/iexec_out \
4 | -e IEXEC_DATASET_FILENAME='BoardroomMeetingSuggestion.zip' \
5 | iexechub/meme-generator:0.0.1 \
6 | '["What is great about iExec V3?","E2E Encryption","Doracles","MemeGenerator"]'
7 |
8 | mv iexec_out/result.jpg 1.jpg
9 |
10 | docker run \
11 | -v $PWD/iexec_in:/iexec_in \
12 | -v $PWD/iexec_out:/iexec_out \
13 | -e IEXEC_DATASET_FILENAME='DistractedBoyfriend.zip' \
14 | iexechub/meme-generator:0.0.1 \
15 | '["Me using iExec V3 Dataset monetization","Valuable medical datasets","Meme templates"]'
16 |
17 | mv iexec_out/result.jpg 2.jpg
18 |
19 | docker run \
20 | -v $PWD/iexec_in:/iexec_in \
21 | -v $PWD/iexec_out:/iexec_out \
22 | -e IEXEC_DATASET_FILENAME='TrumpBillSigning.zip' \
23 | iexechub/meme-generator:0.0.1 \
24 | "[\"iExec V3 is amazing. I know them well. It's a great team with a great product.\",\"All in RLC\"]"
25 |
26 | mv iexec_out/result.jpg 3.jpg
27 |
--------------------------------------------------------------------------------
/0.to-update/MemeGenerator/build.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | docker image build -f Dockerfile -t iexechub/meme-generator:0.0.1 .
4 |
--------------------------------------------------------------------------------
/0.to-update/MemeGenerator/entrypoint.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | mkdir $PWD/dataset
4 | unzip $PWD/iexec_in/$IEXEC_DATASET_FILENAME -d $PWD/dataset
5 | python3 $PWD/src/memegenerator.py $@
6 | rm -r $PWD/dataset
7 |
--------------------------------------------------------------------------------
/0.to-update/MemeGenerator/ressources/impact.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/iExecBlockchainComputing/iexec-apps/afb2843ca746eb5c1d537cd247b67bf73ba32e0d/0.to-update/MemeGenerator/ressources/impact.ttf
--------------------------------------------------------------------------------
/0.to-update/MemeGenerator/src/base.css:
--------------------------------------------------------------------------------
1 | *
2 | {
3 | margin: 0;
4 | padding: 0;
5 | }
6 | p
7 | {
8 | position: absolute;
9 | text-align: center;
10 | transform: translate(-50%,-50%);
11 | }
12 |
--------------------------------------------------------------------------------
/0.to-update/MemeGenerator/src/base.html:
--------------------------------------------------------------------------------
1 |
2 |
{text[0]}
3 | {text[1]}
4 | {text[2]}
5 | {text[3]}
6 | {text[4]}
7 | {text[5]}
8 | {text[6]}
9 | {text[7]}
10 | {text[8]}
11 | {text[9]}
12 | {text[10]}
13 | {text[11]}
14 | {text[12]}
15 | {text[13]}
16 | {text[14]}
17 | {text[15]}
18 | {text[16]}
19 | {text[17]}
20 | {text[18]}
21 | {text[19]}
22 | {text[20]}
23 | {text[21]}
24 | {text[22]}
25 | {text[23]}
26 | {text[24]}
27 | {text[25]}
28 | {text[26]}
29 | {text[27]}
30 | {text[28]}
31 | {text[29]}
32 | {text[30]}
33 | {text[31]}
34 |
--------------------------------------------------------------------------------
/0.to-update/MemeGenerator/src/memegenerator.py:
--------------------------------------------------------------------------------
1 | #!/bin/python
2 |
3 | import html
4 | import imgkit
5 | import json
6 | import os
7 | import sys
8 |
9 | PWD = os.getcwd();
10 | formatPath = lambda path: path if PWD is "/" else PWD+path
11 |
12 | options = json.load(open(formatPath('/dataset/options.json'), 'r'))
13 | args = json.loads(' '.join(sys.argv[1:]))
14 | text = [ '' for i in range(32) ]
15 | text[0:len(args)] = [ html.escape(x).replace('\n', '
') for x in args ]
16 |
17 | with open(formatPath('/src/base.html'), 'r') as infile:
18 | body = infile.read().format(img=formatPath('/dataset/template.jpg'), text=text)
19 | imgkit.from_string(body, formatPath('/iexec_out/result.jpg'), css=[formatPath('/src/base.css'), formatPath('/dataset/template.css')], options={"xvfb": "", **options})
20 |
--------------------------------------------------------------------------------
/0.to-update/R-Clifford-Attractors/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM r-base
2 | RUN apt-get update
3 | RUN apt-get install -y libcurl4-openssl-dev
4 | RUN apt-get install -y libcairo2-dev
5 | RUN apt-get install -y libssl-dev
6 | RUN apt-get install -y libpq-dev
7 | RUN apt-get install -y libmariadbclient-dev
8 |
9 | RUN R -e 'install.packages("Rcpp", dependencies = TRUE)'
10 | RUN R -e 'install.packages("ggplot2", dependencies = TRUE)'
11 | RUN R -e 'install.packages("dplyr", dependencies = TRUE)'
12 |
13 |
14 |
15 |
16 |
17 |
--------------------------------------------------------------------------------
/0.to-update/R-Clifford-Attractors/build.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | docker image build -f Dockerfile -t iexechub/r-clifford-attractors .
4 |
--------------------------------------------------------------------------------
/0.to-update/SudokuCLI/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3-alpine
2 |
3 | COPY sudokuCLI.py /sudokuCLI.py
4 |
5 | RUN chmod +x /sudokuCLI.py
6 |
7 | ENTRYPOINT ["./sudokuCLI.py"]
8 |
--------------------------------------------------------------------------------
/0.to-update/SudokuCLI/build.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | docker image build -f Dockerfile -t iexechub/sudoku-solver-cli:0.0.1 .
4 |
--------------------------------------------------------------------------------
/0.to-update/SudokuCLI/sudokuCLI.py:
--------------------------------------------------------------------------------
1 | #!/usr/local/bin/python
2 |
3 | import argparse
4 | import sys
5 |
6 | # CLASS SUDOKU SOLVER
7 | class Sudoku:
8 | def same_row(i,j): return i//9 == j//9
9 | def same_col(i,j): return i%9 == j%9
10 | def same_blk(i,j): return i//27 == j//27 and i%9//3 == j%9//3
11 | def dependent(i,j): return Sudoku.same_row(i,j) or Sudoku.same_col(i,j) or Sudoku.same_blk(i,j)
12 | def solve(grid):
13 | try:
14 | idx = next(i for i,v in enumerate(grid) if v not in "123456789")
15 | except:
16 | return grid
17 |
18 | exclude = set()
19 | for pos in range(81):
20 | if grid[pos] in "123456789":
21 | if Sudoku.dependent(pos,idx):
22 | exclude.add(grid[pos])
23 | for value in "123456789":
24 | if value not in exclude:
25 | ngrid = Sudoku.solve(grid[:idx]+value+grid[idx+1:])
26 | if ngrid: return ngrid
27 | return None
28 | def check(grid):
29 | if len(grid) != 81:
30 | raise argparse.ArgumentTypeError('Argument must be a valid sudoku grid (size must be 81)')
31 | for idx in range(81):
32 | if grid[idx] in "123456789":
33 | for pos in range(81):
34 | if idx != pos and grid[idx] == grid[pos]:
35 | if Sudoku.dependent(pos,idx):
36 | raise argparse.ArgumentTypeError('Argument must be a valid sudoku grid (positions %d and %d should not have the same value)' % (idx, pos))
37 | return grid
38 |
39 | if __name__ == '__main__':
40 |
41 | parser = argparse.ArgumentParser(description='Sudoku solver')
42 | parser.add_argument('grid', type=Sudoku.check, help='A (linearized) sudoku grid')
43 | args = parser.parse_args()
44 |
45 | solution = Sudoku.solve(args.grid)
46 | if solution:
47 | sys.stdout.write(solution)
48 | else:
49 | sys.stderr.write("No valid solution for this grid")
50 | exit(1)
--------------------------------------------------------------------------------
/0.to-update/WebNotary/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM alpine:3.7
2 |
3 | COPY entrypoint.sh /entrypoint.sh
4 |
5 | RUN chmod +x /entrypoint.sh
6 |
7 | ENTRYPOINT ["/entrypoint.sh"]
8 |
--------------------------------------------------------------------------------
/0.to-update/WebNotary/build.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | docker image build -f Dockerfile -t iexechub/app-web-notary:0.0.1 .
4 |
--------------------------------------------------------------------------------
/0.to-update/WebNotary/entrypoint.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | BASE=$PWD/iexec_out
4 | RAW=$BASE/raw
5 | PARAMS=$@
6 | DETERMINISM=$BASE/determinism.iexec
7 | ERROR=$BASE/error.txt
8 |
9 | function pullTo
10 | {
11 | mkdir $1
12 | cd $1
13 | wget $PARAMS
14 | }
15 |
16 | # cleanup
17 | rm -rf $BASE/*
18 |
19 | # get two copy to test determinism
20 | pullTo $RAW
21 | pullTo $RAW.copy
22 |
23 | # worker protection against non deterministic queries
24 | if diff $RAW $RAW.copy > /dev/null;
25 | then
26 | # configure determinism (stdout is not deterministic)
27 | ( echo -n "0x"; find $RAW -type f -exec sha256sum {} \; | sha256sum | cut -c1-64 ) > $DETERMINISM
28 | # rm copy
29 | rm -rf $RAW.copy
30 | else
31 | # error
32 | echo "ERROR: result is not deterministic" > $ERROR
33 | # deterministic value
34 | ( echo -n "0x"; cat $ERROR | sha256sum | cut -c1-64 ) >> $DETERMINISM
35 | # rm all data
36 | rm -rf $RAW $RAW.copy
37 | fi
38 |
--------------------------------------------------------------------------------
/0.to-update/Wordcloud/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM r-base
2 | RUN apt-get update
3 | RUN apt-get install -y libapparmor-dev
4 | RUN apt-get install -y libpoppler-cpp-dev
5 | RUN apt-get install -y libpoppler-glib-dev
6 | RUN apt-get install -y libxml2-dev
7 | RUN apt-get install -y libgeos-dev
8 | RUN apt-get install -y libwebp-dev
9 | RUN apt-get install -y libcurl4-openssl-dev
10 | RUN apt-get install -y curl
11 | RUN apt-get install -y libmagic-dev
12 | RUN apt-get install -y libssl-dev
13 |
14 |
15 | RUN R -e 'install.packages("tm", dependencies = TRUE)'
16 | RUN R -e 'install.packages("pdftools", dependencies = TRUE)'
17 | RUN R -e 'install.packages("wordcloud", dependencies = TRUE)'
18 | RUN R -e 'install.packages("RColorBrewer", dependencies = TRUE)'
19 | RUN R -e 'install.packages("RCurl", dependencies = TRUE)'
20 | RUN R -e 'install.packages("drat", dependencies = TRUE)'
21 | RUN R -e 'drat::addRepo("RInstitute")' -e 'install.packages("dqmagic", dependencies = TRUE)'
22 |
23 |
24 |
25 |
26 |
27 |
--------------------------------------------------------------------------------
/0.to-update/anemometer/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/iExecBlockchainComputing/iexec-apps/afb2843ca746eb5c1d537cd247b67bf73ba32e0d/0.to-update/anemometer/.DS_Store
--------------------------------------------------------------------------------
/0.to-update/anemometer/app/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/iExecBlockchainComputing/iexec-apps/afb2843ca746eb5c1d537cd247b67bf73ba32e0d/0.to-update/anemometer/app/.DS_Store
--------------------------------------------------------------------------------
/0.to-update/anemometer/app/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM node:11-alpine
2 | COPY anemometer.js /src/anemometer.js
3 | COPY entrypoint.sh /entrypoint.sh
4 | RUN npm i axios ethers fs
5 | RUN mkdir iexec_out
6 | RUN chmod +x /entrypoint.sh
7 | ENTRYPOINT ["/entrypoint.sh"]
--------------------------------------------------------------------------------
/0.to-update/anemometer/app/anemometer.js:
--------------------------------------------------------------------------------
1 | const ethers = require('ethers');
2 | const fs = require('fs');
3 | const axios = require('axios');
4 |
5 |
6 | const root = 'iexec_out';
7 | const determinismFilePath = `${root}/determinism.iexec`;
8 | const callbackFilePath = `${root}/callback.iexec`;
9 | const errorFilePath = `${root}/error.iexec`;
10 |
11 | /*****************************************************************************
12 | * CONFIG *
13 | *****************************************************************************/
14 | const DARKSKYAPIKEY = '513d51692e7207ee17fe6bdf1d1d6414';
15 |
16 | /*****************************************************************************
17 | * ARGUMENTS *
18 | *****************************************************************************/
19 |
20 | var [lati , longi, timestamp] = process.argv.slice(2).map(s => s);
21 |
22 | // var timestampInMili = new Date().getTime();
23 | // var timestamp = parseInt(timestampInMili/1000)
24 | // console.log(longi);
25 | // console.log(lati);
26 | // console.log(timestamp);
27 | /*****************************************************************************
28 | * HTTP QUERY *
29 | *****************************************************************************/
30 |
31 | let url = `https://api.darksky.net/forecast/${DARKSKYAPIKEY}/${lati},${longi},${timestamp}?exclude=hourly,daily,flags`;
32 |
33 | /*****************************************************************************
34 | * EXECUTE *
35 | *****************************************************************************/
36 | axios.get(url)
37 | .then(res=>{
38 | // console.log(res);
39 | var result = res.data;
40 | var windSpeed = parseInt(result.currently.windSpeed * 1000); //accuracy to 3 decimal points
41 | var weather = result.currently.summary;
42 | console.log(windSpeed);
43 | console.log(weather);
44 | var iexeccallback = ethers.utils.defaultAbiCoder.encode(['uint256', 'string', 'string', 'uint256' , 'string'], [timestamp, longi, lati, windSpeed, weather]);
45 | var iexecconsensus = ethers.utils.keccak256(iexeccallback);
46 | fs.writeFile(callbackFilePath, iexeccallback , (err) => {});
47 | fs.writeFile(determinismFilePath, iexecconsensus, (err) => {});
48 | })
49 | .catch(err=>{
50 | fs.writeFile(
51 | errorFilePath,
52 | err.toString(),
53 | (error) => {}
54 | );
55 | fs.writeFile(
56 | determinismFilePath,
57 | ethers.utils.solidityKeccak256(['string'],[err.toString()]),
58 | (error) => {}
59 | );
60 | throw new Error(err);
61 | })
62 |
63 |
--------------------------------------------------------------------------------
/0.to-update/anemometer/app/entrypoint.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | node src/anemometer.js $@
--------------------------------------------------------------------------------
/0.to-update/anemometer/chain.json:
--------------------------------------------------------------------------------
1 | {
2 | "default": "kovan",
3 | "chains": {
4 | "dev": {
5 | "host": "http://localhost:8545",
6 | "sms": "http://localhost:5000",
7 | "id": "17",
8 | "hub": "0x60E25C038D70A15364DAc11A042DB1dD7A2cccBC"
9 | },
10 | "ropsten": {
11 | "host": "https://ropsten.infura.io/v3/f3e0664e01504f5ab2b4360853ce0dc7",
12 | "id": "3"
13 | },
14 | "rinkeby": {
15 | "host": "https://rinkeby.infura.io/v3/f3e0664e01504f5ab2b4360853ce0dc7",
16 | "id": "4"
17 | },
18 | "kovan": {
19 | "host": "https://kovan.infura.io/v3/f3e0664e01504f5ab2b4360853ce0dc7",
20 | "id": "42",
21 | "sms": "https://sms-kovan.iex.ec"
22 | },
23 | "mainnet": {
24 | "host": "https://mainnet.infura.io/v3/f3e0664e01504f5ab2b4360853ce0dc7",
25 | "id": "1",
26 | "sms": "https://sms-mainnet.iex.ec"
27 | }
28 | }
29 | }
--------------------------------------------------------------------------------
/0.to-update/anemometer/deployed.json:
--------------------------------------------------------------------------------
1 | {
2 | "app": {
3 | "42": "0xc3DF017c8240e3A1535b197061511B155Fd97200"
4 | }
5 | }
--------------------------------------------------------------------------------
/0.to-update/anemometer/iexec.json:
--------------------------------------------------------------------------------
1 | {
2 | "description": "My iExec ressource description, must be at least 150 chars long in order to pass the validation checks. Describe your application, dataset or workerpool to your users",
3 | "license": "MIT",
4 | "author": "?",
5 | "social": {
6 | "website": "?",
7 | "github": "?"
8 | },
9 | "logo": "logo.png",
10 | "buyConf": {
11 | "params": "",
12 | "tag": "0x0000000000000000000000000000000000000000000000000000000000000000",
13 | "trust": "0",
14 | "callback": "0x0000000000000000000000000000000000000000"
15 | },
16 | "app": {
17 | "owner": "0xb4fab05554127F7fB033b5bc8C1c69f958f485b1",
18 | "name": "Anemometer",
19 | "type": "DOCKER",
20 | "multiaddr": "registry.hub.docker.com/nikhil3000/anemometer:1.0.0",
21 | "checksum": "0x77cf8b21ad2885e796a8fb5e237eaf34034918582e1625ab4c6dfe3d5c409f2f",
22 | "mrenclave": ""
23 | },
24 | "order": {
25 | "apporder": {
26 | "app": "0xc3DF017c8240e3A1535b197061511B155Fd97200",
27 | "appprice": "0",
28 | "volume": "1000000",
29 | "tag": "0x0000000000000000000000000000000000000000000000000000000000000000",
30 | "datasetrestrict": "0x0000000000000000000000000000000000000000",
31 | "workerpoolrestrict": "0x0000000000000000000000000000000000000000",
32 | "requesterrestrict": "0x0000000000000000000000000000000000000000"
33 | }
34 | }
35 | }
--------------------------------------------------------------------------------
/0.to-update/anemometer/orders.json:
--------------------------------------------------------------------------------
1 | {
2 | "42": {
3 | "apporder": {
4 | "app": "0xc3DF017c8240e3A1535b197061511B155Fd97200",
5 | "appprice": "0",
6 | "volume": "1000000",
7 | "tag": "0x0000000000000000000000000000000000000000000000000000000000000000",
8 | "datasetrestrict": "0x0000000000000000000000000000000000000000",
9 | "workerpoolrestrict": "0x0000000000000000000000000000000000000000",
10 | "requesterrestrict": "0x0000000000000000000000000000000000000000",
11 | "salt": "0xaf9ad34ba17916f232f2452a66b838f748c6076669f0d696b2f74c8e1452ab21",
12 | "sign": "0x2a3ed85deff805d3d0203228d75de75fb73d7f330dafb5203ddd8257c38e578e006825d0973f7118a81d49ca1e9674ff3f5a372ae1316754bd31a4a97e24910a1b"
13 | }
14 | }
15 | }
--------------------------------------------------------------------------------
/0.to-update/anemometer/smart-contract/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/iExecBlockchainComputing/iexec-apps/afb2843ca746eb5c1d537cd247b67bf73ba32e0d/0.to-update/anemometer/smart-contract/.DS_Store
--------------------------------------------------------------------------------
/0.to-update/anemometer/smart-contract/.gitignore:
--------------------------------------------------------------------------------
1 | node_modules
2 | build
3 |
4 | package-lock.json
5 | yarn.lock
6 |
7 |
--------------------------------------------------------------------------------
/0.to-update/anemometer/smart-contract/README.md:
--------------------------------------------------------------------------------
1 | iExec Dencentralized Oracle System
2 | ==================================
3 |
4 | About iExec
5 | -----------
6 |
7 | Thanks to iExec, it is possible to achieve onchain consensus about the result of an offchain application. Applications are represented by smart contracts and execution results can be made available onchain with all the necessary proof.
8 |
9 | Building an oracle application
10 | ------------------------------
11 |
12 | iExec applications produce different outputs.
13 | * The consensus is achieved based on a deterministic value describing the application output. By default this is the hash of the result archive, but can be overriden by the content of `/iexec_out/determinism.iexec`. Upon successful verification, this is stored onchain in the `task.resultDigest` field.
14 | * The actual result. By default this is the IPFS address of a (potentially encrypted) archive containing the outputs, but can be overridden by the content of `/iexec_out/callback.iexec`. Upon successful verification, this is stored onchain in the `task.results` field.
15 |
16 | An iExec oracle application such as the one used in the price-oracle example uses these 2 elements to produce verified results to the blockchain.
17 |
18 | Given a set of parameters, the application produces a self-describing result, encodes it in a way that can be interpreted onchain, stores it in `/iexec_out/callback.iexec` so that is can be accessed onchain, and stores the hash of this encoded value to perform the consensus.
19 |
20 | For example, given the parameters "BTC USD 9 2019-04-11T13:08:32.605Z" the price-oracle application will:
21 |
22 | 1. Retreive the price of BTC in USD at 2019-04-11T13:08:32.605Z
23 | 2. Multiply this value by 10e9 (to capture price value more accurately as it will be represented by an integer onchain)
24 | 3. encode the date, the description ("btc-usd-9") and the value using `abi.encode`
25 | 4. Store this result in `/iexec_out/callback.iexec`
26 | 5. hash the result and store it in `/iexec_out/determinism.iexec`
27 |
28 | iExec will then achieve PoCo consensus on the `/iexec_out/determinism.iexec` value, and will store both the `/iexec_out/determinism.iexec` and the `/iexec_out/callback.iexec` onchain.
29 |
30 | Given a taskID, it is possible to retrieve all the details of the computation as described above. The oracle smart contract just needs to retrieve the information, verify the validity of the execution and process the encoded result. Thanks to the PoCo consensus, anyone can require a computation and ask the oracle to update itself in a trustless manner.
31 |
32 | How to setup an oracle contract
33 | -------------------------------
34 |
35 | 1. Record the address of the iExec Hub and Clerk contracts
36 |
37 | 2. Register the requirements needed for a result to be processed
38 | * Which application (single, any, whitelist?)
39 | * Which dataset (single, any, whitelist?)
40 | * Which workerpool (single, any, whitelist?)
41 | * Minimum level of trust
42 | * Mandatory tag
43 |
44 | How to update an oracle contract
45 | --------------------------------
46 |
47 | 1. Send the taskID of a valid execution to the oracle smart contract.
48 | 2. The oracle smart contract retrieves details about this task from the iexec's smart contracts.
49 | 3. The oracle smart contract verifies the execution is valid (authorized app, dataset, workerpool, trust level and tags).
50 | 4. The oracle smart contract verifies the hash of the results correspond to the resultDigest that achieved consensus, thus verifying the validity of the result field.
51 | 5. The oracle smart contract decodes the results using `abi.decode`.
52 | 6. The oracle smart contract processes the results. In the case of the price oracle, this means storing the value if it is more recent than the one currently recorded.
53 |
54 | How to read price from the iExec price oracle
55 | ---------------------------------------------
56 |
57 | Just query the oracle `values` field with the id of the requested field. For example, to get the most recent price of BTC in USD with 9 place precision (as described above), query `values(keccak256(bytes("BTC-USD-9")))` and this will return a structure containing the value, the associate date, and the details of the request.
58 |
59 | Deployed addresses
60 | ------------------
61 |
62 | 1. **Kovan:**
63 |
64 | price oracle: `https://kovan.etherscan.io/address/0x3b9f1a9aecb1991f3818f45bd4cc735f4bee93ac`
65 |
66 | app whitelist: `https://kovan.etherscan.io/address/0x651a09cdff5a6669ea8bf05be11eff4aa9cbfdaf`
67 |
68 | whitelist contains:
69 |
70 | * `0xf92f39545340ce2fd6f4248a689fca4f660ae42f`
71 | * `0xe01bccbcab54c42f999b6ce88d63d3a5e96cfdb7`
72 |
73 | Whitelist is administered by:
74 |
75 | * `0x7bd4783FDCAD405A28052a0d1f11236A741da593`
76 |
--------------------------------------------------------------------------------
/0.to-update/anemometer/smart-contract/contracts/AnemometerOracle.sol:
--------------------------------------------------------------------------------
1 | pragma solidity ^0.5.8;
2 | pragma experimental ABIEncoderV2;
3 |
4 | import "openzeppelin-solidity/contracts/ownership/Ownable.sol";
5 | import "iexec-doracle-base/contracts/IexecDoracle.sol";
6 |
7 | contract AnemometerOracle is Ownable, IexecDoracle
8 | {
9 | struct TimedValue
10 | {
11 | bytes32 oracleCallID;
12 | uint256 date;
13 | string longi;
14 | string lati;
15 | uint256 windSpeed; //correct to 3 decimal places
16 | string weather;
17 | }
18 |
19 | mapping(bytes32 => TimedValue) public values;
20 |
21 | event ValueUpdated(
22 | bytes32 indexed id,
23 | bytes32 indexed oracleCallID,
24 | uint256 oldDate,
25 | uint256 oldWindSpeed,
26 | string oldWeather,
27 | uint256 newDate,
28 | uint256 newWindSpeed,
29 | string newWeather
30 | );
31 |
32 | // Use _iexecHubAddr to force use of custom iexechub, leave 0x0 for autodetect
33 | constructor(address _iexecHubAddr)
34 | public IexecDoracle(_iexecHubAddr)
35 | {}
36 |
37 | function updateEnv(
38 | address _authorizedApp
39 | , address _authorizedDataset
40 | , address _authorizedWorkerpool
41 | , bytes32 _requiredtag
42 | , uint256 _requiredtrust
43 | )
44 | public onlyOwner
45 | {
46 | _iexecDoracleUpdateSettings(_authorizedApp, _authorizedDataset, _authorizedWorkerpool, _requiredtag, _requiredtrust);
47 | }
48 |
49 | function decodeResults(bytes memory results)
50 | public pure returns(uint256, string memory, string memory, uint256 , string memory)
51 | { return abi.decode(results, (uint256, string , string , uint256 , string )); }
52 |
53 | function processResult(bytes32 _oracleCallID)
54 | public
55 | {
56 | uint256 date;
57 | string memory longi;
58 | string memory lati;
59 | uint256 windSpeed; //correct to 3 decimal places
60 | string memory weather;
61 |
62 | // Parse results
63 | (date,longi, lati, windSpeed, weather) = decodeResults(_iexecDoracleGetVerifiedResult(_oracleCallID));
64 |
65 | // Process results
66 | bytes32 id = keccak256(abi.encode(longi,lati));
67 | require(values[id].date < date, "new-value-is-too-old");
68 | emit ValueUpdated(id, _oracleCallID, values[id].date, values[id].windSpeed,values[id].weather, date, windSpeed, weather);
69 | values[id].oracleCallID = _oracleCallID;
70 | values[id].date = date;
71 | values[id].longi = longi;
72 | values[id].lati = lati;
73 | values[id].windSpeed = windSpeed;
74 | values[id].weather = weather;
75 | }
76 | }
77 |
--------------------------------------------------------------------------------
/0.to-update/anemometer/smart-contract/contracts/Migrations.sol:
--------------------------------------------------------------------------------
1 | pragma solidity ^0.5.8;
2 |
3 | import "openzeppelin-solidity/contracts/ownership/Ownable.sol";
4 |
5 | contract Migrations is Ownable
6 | {
7 | uint256 public lastCompletedMigration;
8 |
9 | constructor()
10 | public
11 | {
12 | }
13 |
14 | function setCompleted(uint completed) public onlyOwner
15 | {
16 | lastCompletedMigration = completed;
17 | }
18 |
19 | function upgrade(address newAddress) public onlyOwner
20 | {
21 | Migrations upgraded = Migrations(newAddress);
22 | upgraded.setCompleted(lastCompletedMigration);
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/0.to-update/anemometer/smart-contract/daemon/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM node:8-alpine
2 |
3 | # changing user
4 | USER root
5 |
6 | # add necessary packages
7 | RUN apk add --no-cache git python make g++
8 |
9 | # create a work directory inside the container
10 | RUN mkdir /app
11 | WORKDIR /app
12 |
13 | # copy project files
14 | COPY . .
15 |
16 | # install utilities
17 | RUN npm install -g yarn ts-node typescript
18 |
19 | # install dependencies
20 | RUN yarn
21 |
22 | # making entrypoint executable
23 | RUN chmod +x entrypoint.sh
24 |
25 | ENTRYPOINT ["./entrypoint.sh"]
26 |
--------------------------------------------------------------------------------
/0.to-update/anemometer/smart-contract/daemon/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '2'
2 | services:
3 | doracle-daemon:
4 | image: iexechub/iexec-doracle-daemon:latest
5 | environment:
6 | - DORACLE_ADDR=xxx
7 | - MNEMONIC=xxxx
8 | - PROVIDER=xxx
9 | - REQUESTER=xxxx
10 | restart: unless-stopped
11 |
--------------------------------------------------------------------------------
/0.to-update/anemometer/smart-contract/daemon/entrypoint.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | echo "[INFO] Launching DOracle Daemon"
3 | /usr/local/bin/ts-node launch.ts
4 |
--------------------------------------------------------------------------------
/0.to-update/anemometer/smart-contract/daemon/launch.ts:
--------------------------------------------------------------------------------
1 | import { ethers } from 'ethers';
2 | import Daemon from './daemon';
3 |
4 | // mainnet 0xed4a0189511859427c33dcc7c85fdd36575ae946
5 | // kovan 0x3b9F1a9aeCb1991f3818f45bd4CC735f4BEE93Ac
6 |
7 | let doracle_addr: string = process.env.DORACLE_ADDR;
8 | let private_key: string = process.env.MNEMONIC;
9 | let provider: ethers.providers.Provider = ethers.getDefaultProvider(process.env.PROVIDER);
10 |
11 | let wallet: ethers.Wallet = new ethers.Wallet(private_key, provider);
12 | let daemon: Daemon = new Daemon(doracle_addr, wallet, process.env.REQUESTER);
13 |
14 | daemon.start();
15 |
--------------------------------------------------------------------------------
/0.to-update/anemometer/smart-contract/daemon/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "price-feed-doracle",
3 | "version": "0.0.1",
4 | "description": "Contracts and tools for the iExec based price feed DOracle",
5 | "repository": {
6 | "type": "git",
7 | "url": "git+https://github.com/iExecBlockchainComputing/price-feed-doracle.git"
8 | },
9 | "bugs": {
10 | "url": "https://github.com/iExecBlockchainComputing/price-feed-doracle.git/issues"
11 | },
12 | "files": [
13 | "/build",
14 | "/contracts",
15 | "daemon"
16 | ],
17 | "author": "iExec",
18 | "license": "ISC",
19 | "homepage": "https://github.com/iExecBlockchainComputing/price-feed-doracle.git#readme",
20 | "dependencies": {
21 | "chai": "^4.2.0",
22 | "ethereumjs-util": "^5.2.0",
23 | "iexec-doracle-base": "^0.0.4",
24 | "iexec-poco": "^3.0.35",
25 | "iexec-solidity": "^0.0.7",
26 | "multiaddr": "^6.0.6",
27 | "openzeppelin-solidity": "^2.2.0",
28 | "openzeppelin-test-helpers": "^0.1.5",
29 | "rlc-faucet-contract": "^2.0.0"
30 | }
31 | }
32 |
--------------------------------------------------------------------------------
/0.to-update/anemometer/smart-contract/daemon/utils/addrToKey.ts:
--------------------------------------------------------------------------------
1 | import { ethers } from 'ethers';
2 |
3 | export default function(addr: string) : string
4 | {
5 | return ethers.utils.hexZeroPad(addr, 32).toString().toLowerCase();
6 | }
7 |
--------------------------------------------------------------------------------
/0.to-update/anemometer/smart-contract/daemon/utils/index.ts:
--------------------------------------------------------------------------------
1 | export { default as require } from "./require";
2 | export { default as addrToKey } from "./addrToKey";
3 |
--------------------------------------------------------------------------------
/0.to-update/anemometer/smart-contract/daemon/utils/require.ts:
--------------------------------------------------------------------------------
1 | export default function(value: boolean, reason: string = "") : void
2 | {
3 | if (!value) throw Error(reason);
4 | }
5 |
--------------------------------------------------------------------------------
/0.to-update/anemometer/smart-contract/migrations/1_initial_migration.js:
--------------------------------------------------------------------------------
1 | var Migrations = artifacts.require("./tools/Migrations.sol");
2 |
3 | module.exports = function(deployer) {
4 | deployer.deploy(Migrations, {gas: 500000});
5 | };
6 |
--------------------------------------------------------------------------------
/0.to-update/anemometer/smart-contract/migrations/2_deploy_contracts.js:
--------------------------------------------------------------------------------
1 | var AnemometerOracle = artifacts.require("AnemometerOracle");
2 |
3 | module.exports = async function(deployer, network, accounts)
4 | {
5 | await deployer.deploy(AnemometerOracle, "0x0000000000000000000000000000000000000000", { gas: 2500000 });
6 | AnemometerOracleInstance = await AnemometerOracle.deployed();
7 | console.log("AnemometerOracle deployed at address: " + AnemometerOracleInstance.address);
8 | };
9 |
--------------------------------------------------------------------------------
/0.to-update/anemometer/smart-contract/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "price-feed-doracle",
3 | "version": "0.0.1",
4 | "description": "Contracts and tools for the iExec based price feed DOracle",
5 | "repository": {
6 | "type": "git",
7 | "url": "git+https://github.com/iExecBlockchainComputing/price-feed-doracle.git"
8 | },
9 | "bugs": {
10 | "url": "https://github.com/iExecBlockchainComputing/price-feed-doracle.git/issues"
11 | },
12 | "files": [
13 | "/build",
14 | "/contracts",
15 | "daemon"
16 | ],
17 | "author": "iExec",
18 | "license": "ISC",
19 | "homepage": "https://github.com/iExecBlockchainComputing/price-feed-doracle.git#readme",
20 | "dependencies": {
21 | "iexec-doracle-base": "^0.0.6",
22 | "iexec-poco": "^3.0.35",
23 | "iexec-solidity": "^0.0.7",
24 | "openzeppelin-solidity": "^2.2.0",
25 | "truffle": "^5.0.25",
26 | "truffle-hdwallet-provider": "^1.0.12"
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/0.to-update/anemometer/smart-contract/truffle.js:
--------------------------------------------------------------------------------
1 | var HDWalletProvider = require("truffle-hdwallet-provider");
2 |
3 | module.exports =
4 | {
5 | networks:
6 | {
7 | docker:
8 | {
9 | host: "iexec-geth-local",
10 | port: 8545,
11 | network_id: "*", // Match any network id,
12 | gasPrice: 22000000000, //22Gwei
13 | },
14 | development:
15 | {
16 | host: "localhost",
17 | port: 8545,
18 | network_id: "*", // Match any network id,
19 | gasPrice: 22000000000, //22Gwei
20 | },
21 | coverage:
22 | {
23 | host: "localhost",
24 | port: 8555, // <-- If you change this, also set the port option in .solcover.js.
25 | network_id: "*",
26 | gas: 0xFFFFFFFFFFF, // <-- Use this high gas value
27 | gasPrice: 0x01 // <-- Use this low gas price
28 | },
29 | mainnet:
30 | {
31 | provider: () => new HDWalletProvider(process.env.MNEMONIC, "https://mainnet.infura.io/v3/f3e0664e01504f5ab2b4360853ce0dc7"),
32 | network_id: '1',
33 | gasPrice: 22000000000, //22Gwei
34 | },
35 | ropsten:
36 | {
37 | provider: () => new HDWalletProvider(process.env.MNEMONIC, "https://ropsten.infura.io/v3/f3e0664e01504f5ab2b4360853ce0dc7"),
38 | network_id: '3',
39 | gasPrice: 22000000000, //22Gwei
40 | },
41 | kovan: {
42 | provider: () => new HDWalletProvider(process.env.MNEMONIC, "https://kovan.infura.io/v3/f3e0664e01504f5ab2b4360853ce0dc7"),
43 | network_id: '42',
44 | gasPrice: 1000000000, //1Gwei
45 | }
46 | },
47 | compilers: {
48 | solc: {
49 | version: "0.5.10",
50 | settings: {
51 | optimizer: {
52 | enabled: true,
53 | runs: 200
54 | }
55 | }
56 | }
57 | },
58 | mocha:
59 | {
60 | enableTimeouts: false
61 | }
62 | };
63 |
--------------------------------------------------------------------------------
/0.to-update/blender-images/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu:xenial
2 |
3 | RUN apt-get update && \
4 | apt-get install -y \
5 | curl \
6 | bzip2 \
7 | imagemagick \
8 | libfreetype6 \
9 | libgl1-mesa-dev \
10 | libglu1-mesa \
11 | libxi6 \
12 | libxrender1 && \
13 | apt-get -y autoremove && \
14 | rm -rf /var/lib/apt/lists/*
15 |
16 | ENV BLENDER_MAJOR 2.79
17 | ENV BLENDER_VERSION 2.79
18 | ENV BLENDER_BZ2_URL https://mirror.clarkson.edu/blender/release/Blender$BLENDER_MAJOR/blender-$BLENDER_VERSION-linux-glibc219-x86_64.tar.bz2
19 |
20 | RUN mkdir /usr/local/blender && \
21 | curl -SL "$BLENDER_BZ2_URL" -o blender.tar.bz2 && \
22 | tar -jxvf blender.tar.bz2 -C /usr/local/blender --strip-components=1 && \
23 | rm blender.tar.bz2
24 |
25 | VOLUME /media
26 |
27 | RUN cd /
28 | COPY script.sh script.sh
29 |
30 | ENTRYPOINT ["/script.sh"]
31 |
--------------------------------------------------------------------------------
/0.to-update/blender-images/README.md:
--------------------------------------------------------------------------------
1 | # blender-images
2 |
3 | The Dockerfile is largely inspired by the one from ikester: https://github.com/ikester/blender-docker/blob/2.79/Dockerfile
4 |
5 | Only imagemagick has been added to be able to perform a conversion for the consensus.iexec file to be generated.
6 |
--------------------------------------------------------------------------------
/0.to-update/blender-images/build.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | docker image build -f Dockerfile -t iexechub/blender-images .
4 |
--------------------------------------------------------------------------------
/0.to-update/blender-images/script.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 |
4 | # delete the consensus file if it already exists
5 | rm /iexec/consensus.iexec 2> /dev/null
6 |
7 | # run the rendering with iexec
8 | /usr/local/blender/blender -b $1 -o iexec/output -f 1
9 |
10 | # create the consensus file
11 | cd iexec
12 | echo "** Creation of the consensus file **"
13 | convert output*.* -define png:include-chunk=none -set colorspace Gray -separate -average -depth 4 consensus.iexec
14 | echo "Done!"
15 |
--------------------------------------------------------------------------------
/0.to-update/blur-face/Dockerfile:
--------------------------------------------------------------------------------
1 | # This is a sample Dockerfile you can modify to deploy your own app based on face_recognition
2 |
3 | FROM python:3.6-slim-stretch
4 |
5 | RUN apt-get -y update
6 | RUN apt-get install -y --fix-missing \
7 | build-essential \
8 | cmake \
9 | gfortran \
10 | git \
11 | wget \
12 | curl \
13 | graphicsmagick \
14 | libgraphicsmagick1-dev \
15 | libatlas-dev \
16 | libavcodec-dev \
17 | libavformat-dev \
18 | libgtk2.0-dev \
19 | libjpeg-dev \
20 | liblapack-dev \
21 | libswscale-dev \
22 | pkg-config \
23 | python3-dev \
24 | python3-numpy \
25 | software-properties-common \
26 | zip \
27 | python-opencv \
28 | libopencv-dev \
29 | ffmpeg \
30 | bc \
31 | && apt-get clean && rm -rf /tmp/* /var/tmp/*
32 |
33 | RUN cd ~ && \
34 | mkdir -p dlib && \
35 | git clone -b 'v19.9' --single-branch https://github.com/davisking/dlib.git dlib/ && \
36 | cd dlib/ && \
37 | python3 setup.py install --yes USE_AVX_INSTRUCTIONS
38 |
39 |
40 | # The rest of this file just runs an example script.
41 |
42 | # If you wanted to use this Dockerfile to run your own app instead, maybe you would do this:
43 | # COPY . /root/your_app_or_whatever
44 | # RUN cd /root/your_app_or_whatever && \
45 | # pip3 install -r requirements.txt
46 | # RUN whatever_command_you_run_to_start_your_app
47 |
48 | RUN cd ~ && \
49 | git clone https://github.com/ageitgey/face_recognition && \
50 | cd face_recognition/ && \
51 | pip3 install -r requirements.txt && \
52 | pip3 install opencv-python && \
53 | python3 setup.py install
54 |
55 | RUN cd /
56 |
57 | COPY blurFace.py blurFace.py
58 | COPY script.sh script.sh
59 |
60 | ENTRYPOINT ["/script.sh"]
61 |
--------------------------------------------------------------------------------
/0.to-update/blur-face/README.md:
--------------------------------------------------------------------------------
1 | # blur-face
2 | This Dockerfile is from the project [ageitgey/face_recognition](https://github.com/ageitgey/face_recognition), A script has been added to blur the faces that the algorithm finds.
3 |
--------------------------------------------------------------------------------
/0.to-update/blur-face/blurFace.py:
--------------------------------------------------------------------------------
1 | from PIL import Image
2 | import face_recognition
3 | import sys
4 | import cv2
5 |
6 | image_file=sys.argv[1]
7 | image_width=sys.argv[2]
8 | image_height=sys.argv[3]
9 | frame_rate=sys.argv[4]
10 |
11 | input_movie = cv2.VideoCapture(image_file)
12 | length = int(input_movie.get(cv2.CAP_PROP_FRAME_COUNT))
13 |
14 | fourcc = cv2.VideoWriter_fourcc(*'XVID')
15 | output_movie = cv2.VideoWriter('iexec/output.avi', fourcc, float(frame_rate), (int(image_width), int(image_height)))
16 |
17 | # Load the jpg file into a numpy array
18 | #image = face_recognition. o(image_file)
19 |
20 | # This method is fairly accurate, but not as accurate as the CNN model and not GPU accelerated.
21 | # See also: find_faces_in_picture_cnn.py
22 | #face_locations = face_recognition.face_locations(image)
23 | frame_number = 0
24 |
25 | while True:
26 | # Grab a single frame of video
27 | ret, frame = input_movie.read()
28 | frame_number += 1
29 |
30 | # Quit when the input video file ends
31 | if not ret:
32 | break
33 |
34 | # Convert the image from BGR color (which OpenCV uses) to RGB color (which face_recognition uses)
35 | rgb_frame = frame[:, :, ::-1]
36 |
37 | face_locations = face_recognition.face_locations(rgb_frame)
38 |
39 | print("I found {} face(s) to blur in this photograph.".format(len(face_locations)))
40 | count = 0;
41 | for top, right, bottom, left in face_locations:
42 |
43 | # Extract the region of the image that contains the face
44 | face_image = frame[top:bottom, left:right]
45 | face_image = cv2.GaussianBlur(face_image, (99, 99), 30)
46 | frame[top:bottom, left:right] = face_image
47 |
48 | count = count + 1;
49 | # Print the location of each face in this image
50 | #top, right, bottom, left = face_location
51 | print("A face to blur is located at pixel location Top: {}, Left: {}, Bottom: {}, Right: {}".format(top, left, bottom, right))
52 |
53 | # You can access the actual face itself like this:
54 | print("Writing frame {} / {}".format(frame_number, length))
55 | output_movie.write(frame)
56 | #face_image = image[top:bottom, left:right]
57 | #pil_image = Image.fromarray(face_image)
58 |
59 | # All done!
60 | input_movie.release()
61 | cv2.destroyAllWindows()
62 |
--------------------------------------------------------------------------------
/0.to-update/blur-face/build.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | docker image build -f Dockerfile -t iexechub/blur-face .
4 |
--------------------------------------------------------------------------------
/0.to-update/blur-face/script.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 |
4 | # delete the consensus file if it already exists
5 | rm /iexec/consensus.iexec 2> /dev/null
6 |
7 | #get the width and height of the video
8 | width=`ffprobe -v error -select_streams v:0 -show_entries stream=width,height -of csv=s=x:p=0 $1 | tr x ' ' | awk -F ' ' '{print $1}'`
9 | height=`ffprobe -v error -select_streams v:0 -show_entries stream=width,height -of csv=s=x:p=0 $1 | tr x ' ' | awk -F ' ' '{print $2}'`
10 |
11 | frame_rate=`ffprobe -v 0 -of csv=p=0 -select_streams 0 -show_entries stream=r_frame_rate $1 | bc -l`
12 | # print the output in the consensus.iexec file
13 | python blurFace.py $1 $width $height $frame_rate 2>&1 | tee -a /iexec/consensus.iexec
14 |
--------------------------------------------------------------------------------
/0.to-update/curl/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM byrnedo/alpine-curl
2 |
3 |
4 | ENTRYPOINT ["tail", "-f", "/dev/null"]
--------------------------------------------------------------------------------
/0.to-update/erazhu31/SoccerLiveScores/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM node:11-alpine
2 | COPY src/oracle.js /src/oracle.js
3 | RUN npm i https ethers fs
4 | ENTRYPOINT ["node", "src/oracle.js"]
5 |
6 |
7 |
--------------------------------------------------------------------------------
/0.to-update/erazhu31/SoccerLiveScores/smart-contract/.gitignore:
--------------------------------------------------------------------------------
1 | node_modules
2 | build
3 |
4 | package-lock.json
5 | yarn.lock
6 |
7 |
--------------------------------------------------------------------------------
/0.to-update/erazhu31/SoccerLiveScores/smart-contract/README.md:
--------------------------------------------------------------------------------
1 | iExec Dencentralized Oracle System
2 | ==================================
3 |
4 | About iExec
5 | -----------
6 |
7 | Thanks to iExec, it is possible to achieve onchain consensus about the result of an offchain application. Applications are represented by smart contracts and execution results can be made available onchain with all the necessary proof.
8 |
9 | Building an oracle application
10 | ------------------------------
11 |
12 | iExec applications produce different outputs.
13 | * The consensus is achieved based on a deterministic value describing the application output. By default this is the hash of the result archive, but can be overriden by the content of `/iexec_out/determinism.iexec`. Upon successful verification, this is stored onchain in the `task.resultDigest` field.
14 | * The actual result. By default this is the IPFS address of a (potentially encrypted) archive containing the outputs, but can be overridden by the content of `/iexec_out/callback.iexec`. Upon successful verification, this is stored onchain in the `task.results` field.
15 |
16 | An iExec oracle application such as the one used in the price-oracle example uses these 2 elements to produce verified results to the blockchain.
17 |
18 | Given a set of parameters, the application produces a self-describing result, encodes it in a way that can be interpreted onchain, stores it in `/iexec_out/callback.iexec` so that is can be accessed onchain, and stores the hash of this encoded value to perform the consensus.
19 |
20 | For example, given the parameters "BTC USD 9 2019-04-11T13:08:32.605Z" the price-oracle application will:
21 |
22 | 1. Retreive the price of BTC in USD at 2019-04-11T13:08:32.605Z
23 | 2. Multiply this value by 10e9 (to capture price value more accurately as it will be represented by an integer onchain)
24 | 3. encode the date, the description ("btc-usd-9") and the value using `abi.encode`
25 | 4. Store this result in `/iexec_out/callback.iexec`
26 | 5. hash the result and store it in `/iexec_out/determinism.iexec`
27 |
28 | iExec will then achieve PoCo consensus on the `/iexec_out/determinism.iexec` value, and will store both the `/iexec_out/determinism.iexec` and the `/iexec_out/callback.iexec` onchain.
29 |
30 | Given a taskID, it is possible to retrieve all the details of the computation as described above. The oracle smart contract just needs to retrieve the information, verify the validity of the execution and process the encoded result. Thanks to the PoCo consensus, anyone can require a computation and ask the oracle to update itself in a trustless manner.
31 |
32 | How to setup an oracle contract
33 | -------------------------------
34 |
35 | 1. Record the address of the iExec Hub and Clerk contracts
36 |
37 | 2. Register the requirements needed for a result to be processed
38 | * Which application (single, any, whitelist?)
39 | * Which dataset (single, any, whitelist?)
40 | * Which workerpool (single, any, whitelist?)
41 | * Minimum level of trust
42 | * Mandatory tag
43 |
44 | How to update an oracle contract
45 | --------------------------------
46 |
47 | 1. Send the taskID of a valid execution to the oracle smart contract.
48 | 2. The oracle smart contract retrieves details about this task from the iexec's smart contracts.
49 | 3. The oracle smart contract verifies the execution is valid (authorized app, dataset, workerpool, trust level and tags).
50 | 4. The oracle smart contract verifies the hash of the results correspond to the resultDigest that achieved consensus, thus verifying the validity of the result field.
51 | 5. The oracle smart contract decodes the results using `abi.decode`.
52 | 6. The oracle smart contract processes the results. In the case of the price oracle, this means storing the value if it is more recent than the one currently recorded.
53 |
54 | How to read price from the iExec price oracle
55 | ---------------------------------------------
56 |
57 | Just query the oracle `values` field with the id of the requested field. For example, to get the most recent price of BTC in USD with 9 place precision (as described above), query `values(keccak256(bytes("BTC-USD-9")))` and this will return a structure containing the value, the associate date, and the details of the request.
58 |
59 | Deployed addresses
60 | ------------------
61 |
62 | 1. **Kovan:**
63 |
64 | price oracle: `https://kovan.etherscan.io/address/0x3b9f1a9aecb1991f3818f45bd4cc735f4bee93ac`
65 |
66 | app whitelist: `https://kovan.etherscan.io/address/0x651a09cdff5a6669ea8bf05be11eff4aa9cbfdaf`
67 |
68 | whitelist contains:
69 |
70 | * `0xf92f39545340ce2fd6f4248a689fca4f660ae42f`
71 | * `0xe01bccbcab54c42f999b6ce88d63d3a5e96cfdb7`
72 |
73 | Whitelist is administered by:
74 |
75 | * `0x7bd4783FDCAD405A28052a0d1f11236A741da593`
76 |
--------------------------------------------------------------------------------
/0.to-update/erazhu31/SoccerLiveScores/smart-contract/contracts/LiveScoreOracle.sol:
--------------------------------------------------------------------------------
1 | pragma solidity ^0.5.8;
2 | pragma experimental ABIEncoderV2;
3 |
4 | import "openzeppelin-solidity/contracts/ownership/Ownable.sol";
5 | import "iexec-doracle-base/contracts/IexecDoracle.sol";
6 |
7 | contract LiveScoreOracle is Ownable, IexecDoracle
8 | {
9 | struct FixtureValue
10 | {
11 | bytes32 oracleCallID;
12 | string status;
13 | string location;
14 | string added;
15 | string home_name;
16 | uint256 score_home;
17 | string away_name;
18 | uint256 score_away;
19 |
20 |
21 | }
22 |
23 | mapping(bytes32 => FixtureValue) public values;
24 |
25 | /*
26 | event ValueUpdated(
27 | bytes32 indexed id,
28 | bytes32 indexed oracleCallID,
29 | string oldStatus,
30 | string newStatus,
31 | uint256 oldScore_home,
32 | uint256 newScore_home,
33 | uint256 oldScore_away,
34 | uint256 newScore_away
35 | );
36 | */
37 |
38 |
39 | event ValueUpdated(
40 | bytes32 indexed id,
41 | bytes32 indexed oracleCallID,
42 | string status,
43 | uint256 score_home,
44 | uint256 score_away
45 |
46 | );
47 | // Use _iexecHubAddr to force use of custom iexechub, leave 0x0 for autodetect
48 | constructor(address _iexecHubAddr)
49 | public IexecDoracle(_iexecHubAddr)
50 | {}
51 |
52 | function updateEnv(
53 | address _authorizedApp
54 | , address _authorizedDataset
55 | , address _authorizedWorkerpool
56 | , bytes32 _requiredtag
57 | , uint256 _requiredtrust
58 | )
59 | public onlyOwner
60 | {
61 | _iexecDoracleUpdateSettings(_authorizedApp, _authorizedDataset, _authorizedWorkerpool, _requiredtag, _requiredtrust);
62 | }
63 |
64 |
65 | function decodeResults(bytes memory data)
66 | public pure returns(string memory a, string memory b, string memory c, string memory d, uint256 e, string memory f, uint256 g)
67 | {
68 | string memory status;
69 | string memory location;
70 | string memory added;
71 | string memory home_name;
72 | uint256 score_home;
73 | string memory away_name;
74 | uint256 score_away;
75 |
76 | (status, location, added, home_name, score_home, away_name, score_away)= abi.decode(data, (string, string, string, string, uint256, string, uint256));
77 |
78 | return (status, location, added, home_name, score_home, away_name, score_away);
79 |
80 | }
81 | /*
82 | function strConcat(string memory _a, string memory _b, string memory _c, string memory _d, string memory _e) internal pure returns (string memory _concatenatedString) {
83 | bytes memory _ba = bytes(_a);
84 | bytes memory _bb = bytes(_b);
85 | bytes memory _bc = bytes(_c);
86 | bytes memory _bd = bytes(_d);
87 | bytes memory _be = bytes(_e);
88 | string memory abcde = new string(_ba.length + _bb.length + _bc.length + _bd.length + _be.length);
89 | bytes memory babcde = bytes(abcde);
90 | uint k = 0;
91 | uint i = 0;
92 | for (i = 0; i < _ba.length; i++) {
93 | babcde[k++] = _ba[i];
94 | }
95 | for (i = 0; i < _bb.length; i++) {
96 | babcde[k++] = _bb[i];
97 | }
98 | for (i = 0; i < _bc.length; i++) {
99 | babcde[k++] = _bc[i];
100 | }
101 | for (i = 0; i < _bd.length; i++) {
102 | babcde[k++] = _bd[i];
103 | }
104 | for (i = 0; i < _be.length; i++) {
105 | babcde[k++] = _be[i];
106 | }
107 | return string(babcde);
108 | }
109 | */
110 |
111 | function strConcat(string memory _a, string memory _b, string memory _c) internal pure returns (string memory _concatenatedString) {
112 | bytes memory _ba = bytes(_a);
113 | bytes memory _bb = bytes(_b);
114 | bytes memory _bc = bytes(_c);
115 |
116 | string memory abcde = new string(_ba.length + _bb.length + _bc.length );
117 | bytes memory babcde = bytes(abcde);
118 | uint k = 0;
119 | uint i = 0;
120 | for (i = 0; i < _ba.length; i++) {
121 | babcde[k++] = _ba[i];
122 | }
123 | for (i = 0; i < _bb.length; i++) {
124 | babcde[k++] = _bb[i];
125 | }
126 | for (i = 0; i < _bc.length; i++) {
127 | babcde[k++] = _bc[i];
128 | }
129 |
130 | return string(babcde);
131 | }
132 |
133 | function processResult(bytes32 _oracleCallID)
134 | public
135 | {
136 | string memory status;
137 | string memory location;
138 | string memory added;
139 | string memory home_name;
140 | uint256 score_home;
141 | string memory away_name;
142 | uint256 score_away;
143 |
144 | // Parse results
145 | (status, location, added, home_name, score_home, away_name, score_away) = decodeResults(_iexecDoracleGetVerifiedResult(_oracleCallID));
146 |
147 | // string memory te= strConcat(home_name,away_name,added);
148 | // Process results
149 | //uint256 id = uint256(keccak256(te));
150 | bytes32 id = sha256(bytes (strConcat(home_name,away_name,added)));
151 |
152 | emit ValueUpdated(id, _oracleCallID, status, score_home, score_away);
153 |
154 |
155 | values[id].oracleCallID = _oracleCallID;
156 | values[id].status;
157 | values[id].location;
158 | values[id].added;
159 | values[id].home_name;
160 | values[id].score_home;
161 | values[id].away_name;
162 | values[id].score_away;
163 | }
164 | }
165 |
--------------------------------------------------------------------------------
/0.to-update/erazhu31/SoccerLiveScores/smart-contract/contracts/Migrations.sol:
--------------------------------------------------------------------------------
1 | pragma solidity ^0.5.8;
2 |
3 | import "openzeppelin-solidity/contracts/ownership/Ownable.sol";
4 |
5 | contract Migrations is Ownable
6 | {
7 | uint256 public lastCompletedMigration;
8 |
9 | constructor()
10 | public
11 | {
12 | }
13 |
14 | function setCompleted(uint completed) public onlyOwner
15 | {
16 | lastCompletedMigration = completed;
17 | }
18 |
19 | function upgrade(address newAddress) public onlyOwner
20 | {
21 | Migrations upgraded = Migrations(newAddress);
22 | upgraded.setCompleted(lastCompletedMigration);
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/0.to-update/erazhu31/SoccerLiveScores/smart-contract/daemon/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM node:8-alpine
2 |
3 | # changing user
4 | USER root
5 |
6 | # add necessary packages
7 | RUN apk add --no-cache git python make g++
8 |
9 | # create a work directory inside the container
10 | RUN mkdir /app
11 | WORKDIR /app
12 |
13 | # copy project files
14 | COPY . .
15 |
16 | # install utilities
17 | RUN npm install -g yarn ts-node typescript
18 |
19 | # install dependencies
20 | RUN yarn
21 |
22 | # making entrypoint executable
23 | RUN chmod +x entrypoint.sh
24 |
25 | ENTRYPOINT ["./entrypoint.sh"]
26 |
--------------------------------------------------------------------------------
/0.to-update/erazhu31/SoccerLiveScores/smart-contract/daemon/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '2'
2 | services:
3 | doracle-daemon:
4 | image: iexechub/iexec-doracle-daemon:latest
5 | environment:
6 | - DORACLE_ADDR=xxx
7 | - MNEMONIC=xxxx
8 | - PROVIDER=xxx
9 | - REQUESTER=xxxx
10 | restart: unless-stopped
11 |
--------------------------------------------------------------------------------
/0.to-update/erazhu31/SoccerLiveScores/smart-contract/daemon/entrypoint.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | echo "[INFO] Launching DOracle Daemon"
3 | /usr/local/bin/ts-node launch.ts
4 |
--------------------------------------------------------------------------------
/0.to-update/erazhu31/SoccerLiveScores/smart-contract/daemon/launch.ts:
--------------------------------------------------------------------------------
1 | import { ethers } from 'ethers';
2 | import Daemon from './daemon';
3 |
4 | // mainnet 0xed4a0189511859427c33dcc7c85fdd36575ae946
5 | // kovan 0x3b9F1a9aeCb1991f3818f45bd4CC735f4BEE93Ac
6 |
7 | let doracle_addr: string = process.env.DORACLE_ADDR;
8 | let private_key: string = process.env.MNEMONIC;
9 | let provider: ethers.providers.Provider = ethers.getDefaultProvider(process.env.PROVIDER);
10 |
11 | let wallet: ethers.Wallet = new ethers.Wallet(private_key, provider);
12 | let daemon: Daemon = new Daemon(doracle_addr, wallet, process.env.REQUESTER);
13 |
14 | daemon.start();
15 |
--------------------------------------------------------------------------------
/0.to-update/erazhu31/SoccerLiveScores/smart-contract/daemon/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "price-feed-doracle",
3 | "version": "0.0.1",
4 | "description": "Contracts and tools for the iExec based price feed DOracle",
5 | "repository": {
6 | "type": "git",
7 | "url": "git+https://github.com/iExecBlockchainComputing/price-feed-doracle.git"
8 | },
9 | "bugs": {
10 | "url": "https://github.com/iExecBlockchainComputing/price-feed-doracle.git/issues"
11 | },
12 | "files": [
13 | "/build",
14 | "/contracts",
15 | "daemon"
16 | ],
17 | "author": "iExec",
18 | "license": "ISC",
19 | "homepage": "https://github.com/iExecBlockchainComputing/price-feed-doracle.git#readme",
20 | "dependencies": {
21 | "chai": "^4.2.0",
22 | "ethereumjs-util": "^5.2.0",
23 | "iexec-doracle-base": "^0.0.4",
24 | "iexec-poco": "^3.0.35",
25 | "iexec-solidity": "^0.0.7",
26 | "multiaddr": "^6.0.6",
27 | "openzeppelin-solidity": "^2.2.0",
28 | "openzeppelin-test-helpers": "^0.1.5",
29 | "rlc-faucet-contract": "^2.0.0"
30 | }
31 | }
32 |
--------------------------------------------------------------------------------
/0.to-update/erazhu31/SoccerLiveScores/smart-contract/daemon/utils/addrToKey.ts:
--------------------------------------------------------------------------------
1 | import { ethers } from 'ethers';
2 |
3 | export default function(addr: string) : string
4 | {
5 | return ethers.utils.hexZeroPad(addr, 32).toString().toLowerCase();
6 | }
7 |
--------------------------------------------------------------------------------
/0.to-update/erazhu31/SoccerLiveScores/smart-contract/daemon/utils/index.ts:
--------------------------------------------------------------------------------
1 | export { default as require } from "./require";
2 | export { default as addrToKey } from "./addrToKey";
3 |
--------------------------------------------------------------------------------
/0.to-update/erazhu31/SoccerLiveScores/smart-contract/daemon/utils/require.ts:
--------------------------------------------------------------------------------
1 | export default function(value: boolean, reason: string = "") : void
2 | {
3 | if (!value) throw Error(reason);
4 | }
5 |
--------------------------------------------------------------------------------
/0.to-update/erazhu31/SoccerLiveScores/smart-contract/migrations/1_initial_migration.js:
--------------------------------------------------------------------------------
1 | var Migrations = artifacts.require("./tools/Migrations.sol");
2 |
3 | module.exports = function(deployer) {
4 | deployer.deploy(Migrations, {gas: 500000});
5 | };
6 |
--------------------------------------------------------------------------------
/0.to-update/erazhu31/SoccerLiveScores/smart-contract/migrations/2_deploy_contracts.js:
--------------------------------------------------------------------------------
1 | var PriceOracle = artifacts.require("PriceOracle");
2 |
3 | module.exports = async function(deployer, network, accounts)
4 | {
5 | await deployer.deploy(PriceOracle, "0x0000000000000000000000000000000000000000", { gas: 2500000 });
6 | PriceOracleInstance = await PriceOracle.deployed();
7 | console.log("PriceOracle deployed at address: " + PriceOracleInstance.address);
8 |
9 | };
10 |
--------------------------------------------------------------------------------
/0.to-update/erazhu31/SoccerLiveScores/smart-contract/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "price-feed-doracle",
3 | "version": "0.0.1",
4 | "description": "Contracts and tools for the iExec based price feed DOracle",
5 | "repository": {
6 | "type": "git",
7 | "url": "git+https://github.com/iExecBlockchainComputing/price-feed-doracle.git"
8 | },
9 | "bugs": {
10 | "url": "https://github.com/iExecBlockchainComputing/price-feed-doracle.git/issues"
11 | },
12 | "files": [
13 | "/build",
14 | "/contracts",
15 | "daemon"
16 | ],
17 | "author": "iExec",
18 | "license": "ISC",
19 | "homepage": "https://github.com/iExecBlockchainComputing/price-feed-doracle.git#readme",
20 | "dependencies": {
21 | "iexec-doracle-base": "^0.0.6",
22 | "iexec-poco": "^3.0.35",
23 | "iexec-solidity": "^0.0.7",
24 | "openzeppelin-solidity": "^2.2.0",
25 | "truffle": "^5.0.25",
26 | "truffle-hdwallet-provider": "^1.0.12"
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/0.to-update/erazhu31/SoccerLiveScores/smart-contract/truffle-config.js:
--------------------------------------------------------------------------------
1 | /**
2 | * Use this file to configure your truffle project. It's seeded with some
3 | * common settings for different networks and features like migrations,
4 | * compilation and testing. Uncomment the ones you need or modify
5 | * them to suit your project as necessary.
6 | *
7 | * More information about configuration can be found at:
8 | *
9 | * truffleframework.com/docs/advanced/configuration
10 | *
11 | * To deploy via Infura you'll need a wallet provider (like truffle-hdwallet-provider)
12 | * to sign your transactions before they're sent to a remote public node. Infura accounts
13 | * are available for free at: infura.io/register.
14 | *
15 | * You'll also need a mnemonic - the twelve word phrase the wallet uses to generate
16 | * public/private key pairs. If you're publishing your code to GitHub make sure you load this
17 | * phrase from a file you've .gitignored so it doesn't accidentally become public.
18 | *
19 | */
20 |
21 | // const HDWalletProvider = require('truffle-hdwallet-provider');
22 | // const infuraKey = "fj4jll3k.....";
23 | //
24 | // const fs = require('fs');
25 | // const mnemonic = fs.readFileSync(".secret").toString().trim();
26 |
27 | var HDWalletProvider = require("truffle-hdwallet-provider");
28 |
29 | module.exports = {
30 | /**
31 | * Networks define how you connect to your ethereum client and let you set the
32 | * defaults web3 uses to send transactions. If you don't specify one truffle
33 | * will spin up a development blockchain for you on port 9545 when you
34 | * run `develop` or `test`. You can ask a truffle command to use a specific
35 | * network from the command line, e.g
36 | *
37 | * $ truffle test --network
38 | */
39 |
40 | networks: {
41 | // Useful for testing. The `development` name is special - truffle uses it by default
42 | // if it's defined here and no other network is specified at the command line.
43 | // You should run a client (like ganache-cli, geth or parity) in a separate terminal
44 | // tab if you use this network and you must also set the `host`, `port` and `network_id`
45 | // options below to some value.
46 | //
47 | // development: {
48 | // host: "127.0.0.1", // Localhost (default: none)
49 | // port: 8545, // Standard Ethereum port (default: none)
50 | // network_id: "*", // Any network (default: none)
51 | // },
52 |
53 | // Another network with more advanced options...
54 | // advanced: {
55 | // port: 8777, // Custom port
56 | // network_id: 1342, // Custom network
57 | // gas: 8500000, // Gas sent with each transaction (default: ~6700000)
58 | // gasPrice: 20000000000, // 20 gwei (in wei) (default: 100 gwei)
59 | // from: , // Account to send txs from (default: accounts[0])
60 | // websockets: true // Enable EventEmitter interface for web3 (default: false)
61 | // },
62 |
63 | // Useful for deploying to a public network.
64 | // NB: It's important to wrap the provider as a function.
65 | // ropsten: {
66 | // provider: () => new HDWalletProvider(mnemonic, `https://ropsten.infura.io/v3/YOUR-PROJECT-ID`),
67 | // network_id: 3, // Ropsten's id
68 | // gas: 5500000, // Ropsten has a lower block limit than mainnet
69 | // confirmations: 2, // # of confs to wait between deployments. (default: 0)
70 | // timeoutBlocks: 200, // # of blocks before a deployment times out (minimum/default: 50)
71 | // skipDryRun: true // Skip dry run before migrations? (default: false for public nets )
72 | // },
73 |
74 | // Useful for private networks
75 | // private: {
76 | // provider: () => new HDWalletProvider(mnemonic, `https://network.io`),
77 | // network_id: 2111, // This network is yours, in the cloud.
78 | // production: true // Treats this network as if it was a public net. (default: false)
79 | // }
80 |
81 | kovan: {
82 | provider: () => new HDWalletProvider(process.env.MNEMONIC, "https://kovan.infura.io/v3/0fcbbdc72ccb436bbe963c59912b12d9"),
83 | network_id: '*',
84 | gasPrice: 1000000000, //1Gwei
85 | }
86 | },
87 |
88 | // Set default mocha options here, use special reporters etc.
89 | mocha: {
90 | // timeout: 100000
91 | },
92 |
93 | // Configure your compilers
94 | compilers: {
95 | solc: {
96 | version: "0.5.8", // Fetch exact version from solc-bin (default: truffle's version)
97 | docker: true, // Use "0.5.1" you've installed locally with docker (default: false)
98 | settings: { // See the solidity docs for advice about optimization and evmVersion
99 | optimizer: {
100 | enabled: true,
101 | runs: 200
102 | },
103 | evmVersion: "byzantium"
104 | }
105 | }
106 | }
107 | }
108 |
--------------------------------------------------------------------------------
/0.to-update/erazhu31/SoccerLiveScores/src/oracle.js:
--------------------------------------------------------------------------------
1 | const https = require('https');
2 | const ethers = require('ethers');
3 | const fs = require('fs');
4 |
5 | const root = 'iexec_out';
6 | const determinismFilePath = `${root}/determinism.iexec`;
7 | const callbackFilePath = `${root}/callback.iexec`;
8 | const errorFilePath = `${root}/error.iexec`;
9 |
10 | /*****************************************************************************
11 | * CONFIG *
12 | *****************************************************************************/
13 |
14 | // livescore-api.com key
15 | const APIKEY = 'nhHn9Q6iuRr1S5Cb';
16 | const APISECRET ='dLFxyuKS8I43a6KyorVxi1QNTiPLnrWA';
17 |
18 |
19 | /*****************************************************************************
20 | * ARGUMENTS *
21 | *****************************************************************************/
22 | let id = process.argv.slice(2);
23 |
24 | /*****************************************************************************
25 | * HTTP QUERY *
26 | *****************************************************************************/
27 |
28 | let URL=`/api-client/scores/live.json?key=${APIKEY}&secret=${APISECRET}&competition_id=${id}`;
29 |
30 |
31 | const query = {
32 | hostname: 'livescore-api.com',
33 | port: 443,
34 | path: URL,
35 | method: 'GET'
36 | };
37 |
38 |
39 | /*****************************************************************************
40 | * EXECUTE *
41 | *****************************************************************************/
42 | new Promise(async (resolve, reject) => {
43 |
44 |
45 |
46 | console.log('- Calling API');
47 | let chunks = [];
48 | let request = https.request(query, res => {
49 | res.on('data', (chunk) => {
50 | chunks.push(chunk);
51 | });
52 | res.on('end', () => {
53 | if (chunks.length)
54 | {
55 | resolve(chunks.join(''));
56 | }
57 | else
58 | {
59 | reject(`[HTTP ERROR]\nstatusCode: ${res.statusCode}`);
60 | }
61 | });
62 | });
63 | request.on('error', reject);
64 | request.end();
65 | })
66 | .then(data => {
67 |
68 |
69 | let results =data.toString();
70 |
71 |
72 |
73 |
74 | if (results.error !== undefined)
75 | {
76 | throw new Error(results.error);
77 | }
78 |
79 | let tmpData = JSON.parse(results);
80 | let matches = tmpData.data.match;
81 |
82 | //loop fixtures
83 | matches.forEach((game)=>{
84 |
85 |
86 | let score=game.score.split('-');
87 | let homeScore=(parseInt(score[0])!==parseInt(score[0]))?0:parseInt(score[0]);
88 | let awayScore=(parseInt(score[1])!==parseInt(score[1]))?0:parseInt(score[1]);
89 |
90 |
91 |
92 | let iexecCallback = ethers.utils.defaultAbiCoder.encode(
93 | ['string', 'string', 'string', 'string', 'uint256', 'string', 'uint256' ],
94 | [game.status, game.location, game.added, game.home_name, homeScore, game.away_name, awayScore]);
95 |
96 | let iexecDeterminism = ethers.utils.keccak256(iexecCallback);
97 | fs.writeFile(callbackFilePath, iexecCallback , (err) => {});
98 | fs.writeFile(determinismFilePath, iexecDeterminism, (err) => {});
99 |
100 | });
101 |
102 | console.log(`- Success: ${results}`);
103 |
104 |
105 |
106 | })
107 | .catch(error => {
108 | fs.writeFile(
109 | errorFilePath,
110 | error.toString(),
111 | (err) => {}
112 | );
113 | fs.writeFile(
114 | determinismFilePath,
115 | ethers.utils.solidityKeccak256(['string'],[error.toString()]),
116 | (err) => {}
117 | );
118 | console.log(error.toString());
119 | });
120 |
--------------------------------------------------------------------------------
/0.to-update/face-swap/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM bshaffer/faceswap:latest
2 | #https://github.com/bshaffer/faceswap-docker/blob/master/Dockerfile
3 |
4 | COPY entrypoint.sh /entrypoint.sh
5 | RUN apt update && apt install -y wget
6 | RUN chmod +x /entrypoint.sh
7 | ENTRYPOINT ["/entrypoint.sh"]
8 |
9 | # Build:
10 | # docker image build -t iexechub/faceswap:1.0.0 .
11 | # Usage:
12 | # docker run -it -v $(pwd)/iexec_out:/iexec_out iexechub/faceswap:1.0.0 https://somewhere.io/img1.jpg https://somewhere.io/img2.jpg
13 | # docker run -it -v $(pwd)/iexec_out:/iexec_out iexechub/faceswap:1.0.0 (default images 2 images inside)
--------------------------------------------------------------------------------
/0.to-update/face-swap/entrypoint.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | #app
4 | FACESWAP_APP=/usr/local/lib/faceswap/faceswap.py
5 |
6 | #created files
7 | IMG1=/tmp/img1.jpg
8 | IMG2=/tmp/img2.jpg
9 | OUTPUT_IMG=/iexec_out/output.jpg
10 | deterministFile=/iexec_out/determinism.iexec
11 |
12 | #input params
13 | IMG1_URL=$1
14 | IMG2_URL=$2
15 |
16 | if [[ -z $IMG1_URL ]]; then
17 | IMG1_URL="https://img-0.journaldunet.com/Xdg903b0Y60YZu4DqwPMlTapL9U=/540x/smart/f9c96ea8642c41349af09d287ef1828e/ccmcms-jdn/11076448.jpg"
18 | echo "WARN: Empty IMG1_URL, will use default IMG1 [IMG1_URL:$IMG1_URL]"
19 | fi
20 |
21 | if [[ -z $IMG2_URL ]]; then
22 | IMG2_URL="https://static3.7sur7.be/static/photo/2018/15/13/1/20180312072251/media_xll_10327921.jpg"
23 | echo "WARN: Empty IMG2_URL, will use default IMG2 [IMG2_URL:$IMG2_URL]"
24 | fi
25 |
26 | echo "Downloading IMG1 [IMG1_URL:$IMG1_URL]"
27 | wget --quiet $IMG1_URL -O $IMG1
28 | echo "Downloading IMG2 [IMG2_URL:$IMG2_URL]"
29 | wget --quiet $IMG2_URL -O $IMG2
30 |
31 | if [[ -f $IMG1 ]] && [[ -s $IMG1 ]] && [[ -f $IMG2 ]] && [[ -s $IMG2 ]]; then
32 |
33 | $FACESWAP_APP $IMG1 $IMG2 $OUTPUT_IMG
34 |
35 | if [[ -f $OUTPUT_IMG ]] && [[ -s $OUTPUT_IMG ]]; then
36 | errorMsg="SUCCESS: Swap completed [swap:$OUTPUT_IMG, shasum:$(shasum $OUTPUT_IMG)]"
37 | else
38 | errorMsg="FAILURE: Failed to create output image"
39 | fi
40 |
41 | else
42 | errorMsg="FAILURE: 1 or more images couldn't be downloaded [IMG1_URL:$IMG1_URL, IMG2_URL:$IMG2_URL]"
43 | fi
44 |
45 | echo $errorMsg
46 | echo $errorMsg >> $deterministFile
47 |
48 |
49 |
50 |
51 |
--------------------------------------------------------------------------------
/0.to-update/factorial/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:2-alpine
2 |
3 | COPY factorial.py /factorial.py
4 |
5 | RUN chmod +x /factorial.py
6 |
7 | ENTRYPOINT ["./factorial.py"]
8 |
--------------------------------------------------------------------------------
/0.to-update/factorial/factorial.py:
--------------------------------------------------------------------------------
1 | #!/usr/local/bin/python
2 | import argparse, sys, math
3 |
4 |
5 | class Range(argparse.Action):
6 | def __init__(self, min=None, max=None, *args, **kwargs):
7 | self.min = min
8 | self.max = max
9 | kwargs["metavar"] = "[%d-%d]" % (self.min, self.max)
10 | super(Range, self).__init__(*args, **kwargs)
11 |
12 | def __call__(self, parser, namespace, value, option_string=None):
13 | if not (self.min <= value <= self.max):
14 | msg = 'invalid choice: %r (choose from [%d-%d])' % \
15 | (value, self.min, self.max)
16 | raise argparse.ArgumentError(self, msg)
17 | setattr(namespace, self.dest, value)
18 |
19 | parser = argparse.ArgumentParser(description='Calculate the factorial of an integer')
20 | parser.add_argument('integer', metavar='N', type=int, action=Range, min=0, max=1000,
21 | help='an integer for the factorial between [0-1000] ')
22 | args = parser.parse_args()
23 | print math.factorial(args.integer)
24 |
25 |
--------------------------------------------------------------------------------
/0.to-update/find-face/Dockerfile:
--------------------------------------------------------------------------------
1 | # This is a sample Dockerfile you can modify to deploy your own app based on face_recognition
2 |
3 | FROM python:3.6-slim-stretch
4 |
5 | RUN apt-get -y update
6 | RUN apt-get install -y --fix-missing \
7 | build-essential \
8 | cmake \
9 | gfortran \
10 | git \
11 | wget \
12 | curl \
13 | graphicsmagick \
14 | libgraphicsmagick1-dev \
15 | libatlas-dev \
16 | libavcodec-dev \
17 | libavformat-dev \
18 | libgtk2.0-dev \
19 | libjpeg-dev \
20 | liblapack-dev \
21 | libswscale-dev \
22 | pkg-config \
23 | python3-dev \
24 | python3-numpy \
25 | software-properties-common \
26 | zip \
27 | && apt-get clean && rm -rf /tmp/* /var/tmp/*
28 |
29 | RUN cd ~ && \
30 | mkdir -p dlib && \
31 | git clone -b 'v19.9' --single-branch https://github.com/davisking/dlib.git dlib/ && \
32 | cd dlib/ && \
33 | python3 setup.py install --yes USE_AVX_INSTRUCTIONS
34 |
35 |
36 | # The rest of this file just runs an example script.
37 |
38 | # If you wanted to use this Dockerfile to run your own app instead, maybe you would do this:
39 | # COPY . /root/your_app_or_whatever
40 | # RUN cd /root/your_app_or_whatever && \
41 | # pip3 install -r requirements.txt
42 | # RUN whatever_command_you_run_to_start_your_app
43 |
44 | RUN cd ~ && \
45 | git clone https://github.com/ageitgey/face_recognition && \
46 | cd face_recognition/ && \
47 | pip3 install -r requirements.txt && \
48 | python3 setup.py install
49 |
50 | RUN cd /
51 |
52 | COPY findFace.py findFace.py
53 | COPY script.sh script.sh
54 |
55 | ENTRYPOINT ["/script.sh"]
56 |
--------------------------------------------------------------------------------
/0.to-update/find-face/README.md:
--------------------------------------------------------------------------------
1 | # find-face
2 | This Dockerfile is from the project (ageitgey/face_recognition)[https://github.com/ageitgey/face_recognition], a script has been added to retrieve the faces that the algo finds.
3 |
--------------------------------------------------------------------------------
/0.to-update/find-face/build.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | docker image build -f Dockerfile -t iexechub/find-face .
4 |
--------------------------------------------------------------------------------
/0.to-update/find-face/findFace.py:
--------------------------------------------------------------------------------
1 | from PIL import Image
2 | import face_recognition
3 | import sys
4 |
5 | image_file=sys.argv[1]
6 |
7 | # Load the jpg file into a numpy array
8 | image = face_recognition.load_image_file(image_file)
9 |
10 | # Find all the faces in the image using the default HOG-based model.
11 | # This method is fairly accurate, but not as accurate as the CNN model and not GPU accelerated.
12 | # See also: find_faces_in_picture_cnn.py
13 | face_locations = face_recognition.face_locations(image)
14 |
15 | print("I found {} face(s) in this photograph.".format(len(face_locations)))
16 | count = 0;
17 | for face_location in face_locations:
18 |
19 | count = count + 1;
20 | # Print the location of each face in this image
21 | top, right, bottom, left = face_location
22 | print("A face is located at pixel location Top: {}, Left: {}, Bottom: {}, Right: {}".format(top, left, bottom, right))
23 |
24 | # You can access the actual face itself like this:
25 | face_image = image[top:bottom, left:right]
26 | pil_image = Image.fromarray(face_image)
27 | imageName = "/iexec/faces/face_" + str(count) + ".bmp"
28 | pil_image.save(imageName)
29 |
--------------------------------------------------------------------------------
/0.to-update/find-face/script.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # create the faces folder if it doesn't exist
4 | if [ ! -d /iexec/faces ]; then
5 | mkdir -p /iexec/faces;
6 | fi
7 |
8 | # delete the consensus file if it already exists
9 | rm /iexec/consensus.iexec 2> /dev/null
10 |
11 | # print the output in the consensus.iexec file
12 | python findFace.py $1 2>&1 | tee -a /iexec/consensus.iexec
13 |
14 |
--------------------------------------------------------------------------------
/0.to-update/j48-bitcoin-tx-doracle/app/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM node:11-alpine
2 |
3 | RUN npm i https fs ethers
4 |
5 | COPY src/doracle.js /doracle.js
6 |
7 | ENTRYPOINT ["node", "/doracle.js"]
8 |
--------------------------------------------------------------------------------
/0.to-update/j48-bitcoin-tx-doracle/app/README.md:
--------------------------------------------------------------------------------
1 | Bitcoin Transfer Verifier DOracle using blockchain.info API
2 | -----
3 |
4 | App
5 | -----
6 | given a valid bitcoin transaction hash as input 'b6f6991d03df0e2e04dafffcd6bc418aac66049e2cd74b80f14ac86db1e3f0da'
7 |
8 | returns bitcoin transaction hash as ID (bytes32) along with transfer amount in sats and transaction timestamp (packed bytes32)
9 | `0xb6f6991d03df0e2e04dafffcd6bc418aac66049e2cd74b80f14ac86db1e3f0da000000000000000000000000000000000000000000000001f2ed64005d3b7183`
10 |
11 | Usage
12 | -----
13 | `docker run -v /tmp/iexec_out:/iexec_out j048/app-bitcoin-tx-doracle:latest b6f6991d03df0e2e04dafffcd6bc418aac66049e2cd74b80f14ac86db1e3f0da`
14 |
15 | Smart Contract
16 | -----
17 | stores Bitcoin transaction hash as ID(bytes32) along with iExec oracle call ID(bytes32), transfer amount(uint256) in sats, and transaction timestamp(uint256) in unix time
18 |
19 | creates transaction Receipt Event Log of TxUpdated with values txHash, oracleCallID, amount, timestamp
20 |
21 | to access stored values for transaction use mapping function txAmount(bytes32 Bitcoin transaction hash)
22 |
23 | `txAmount(0xb6f6991d03df0e2e04dafffcd6bc418aac66049e2cd74b80f14ac86db1e3f0da)`
24 |
25 | to verify:
26 | https://www.blockchain.com/btc/tx/b6f6991d03df0e2e04dafffcd6bc418aac66049e2cd74b80f14ac86db1e3f0da
27 |
--------------------------------------------------------------------------------
/0.to-update/j48-bitcoin-tx-doracle/app/build.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | docker image build -f Dockerfile -t j048/app-bitcoin-tx-doracle:latest .
4 |
--------------------------------------------------------------------------------
/0.to-update/j48-bitcoin-tx-doracle/app/src/doracle.js:
--------------------------------------------------------------------------------
1 | const https = require('https');
2 | const ethers = require('ethers');
3 | const fs = require('fs');
4 |
5 | const root = 'iexec_out';
6 | const determinismFilePath = `${root}/determinism.iexec`;
7 | const callbackFilePath = `${root}/callback.iexec`;
8 | const errorFilePath = `${root}/error.iexec`;
9 |
10 | /*****************************************************************************
11 | * TOOLS *
12 | *****************************************************************************/
13 |
14 | const sleep = (ms) => {
15 | return new Promise(resolve => setTimeout(resolve, ms));
16 | };
17 |
18 | const cat = (path) => {
19 | try {
20 | return fs.readFileSync(path).toString();
21 | } catch (e) {
22 | return null;
23 | }
24 | };
25 |
26 | // modified web3 utils.js
27 | const isHexString = (hex) => {
28 | return ((typeof hex === 'string' || typeof hex === 'number') && /^(-)?[0-9a-f]*$/i.test(hex));
29 | };
30 |
31 | // 0x prefix bool added (used when packing)
32 | const leftPad = (string, chars, sign='0', prefix=true) => {
33 | // given number or hex string will return with 0x prefix by default
34 | const hasPrefix = /^0x/i.test(string) || isHexString(string);
35 | string = string.toString(16).replace(/^0x/i, '');
36 |
37 | const padding = chars - string.length + 1 >= 0 ? chars - string.length + 1 : 0;
38 | if(prefix) return (hasPrefix ? '0x' : '') + new Array(padding).join(sign) + string;
39 | return new Array(padding).join(sign) + string;
40 | };
41 |
42 | /*****************************************************************************
43 | * CONFIG *
44 | *****************************************************************************/
45 |
46 | // public API but key may be needed in future to avoid being rate limited (use dataset)
47 | const APIKEY = cat(`/iexec_in/${process.env.DATASET_FILENAME}`) || undefined;
48 |
49 | // random delay
50 | const WAIT_MIN = parseInt(process.env.WAIT_MIN) || 0; // in ms
51 | const WAIT_MAX = parseInt(process.env.WAIT_MAX) || 0; // in ms
52 |
53 | /*****************************************************************************
54 | * ARGUMENTS *
55 | *****************************************************************************/
56 |
57 | const args = process.argv.slice(2).map(s => s.toUpperCase());
58 |
59 | var txHash;
60 |
61 | // allows only hexstring of 32 bytes (bitcoin transaction hash format)
62 | if (isHexString(args[0]) && args[0].length == 64){
63 | txHash = args[0];
64 | }
65 |
66 | /*****************************************************************************
67 | * HTTP QUERY *
68 | *****************************************************************************/
69 |
70 | let path = `/rawtx/${txHash}`;
71 |
72 | const query = {
73 | method: 'GET',
74 | port: 443,
75 | host: 'blockchain.info',
76 | path: path,
77 | };
78 |
79 | if (APIKEY) query.headers = { key: APIKEY };
80 |
81 | /*****************************************************************************
82 | * EXECUTE *
83 | *****************************************************************************/
84 |
85 | new Promise(async (resolve, reject) => {
86 | console.log(`* blockchain.info API *`);
87 | console.log(`- Input: ${txHash ? `${txHash}`: '-Null-'}`);
88 | console.log(`- API key: ${APIKEY ? `${true}`: `${false}`}`);
89 |
90 | if (!txHash) {
91 | return reject(`Invalid Bitcoin transaction hash format`);
92 | }
93 |
94 | const delay = (WAIT_MAX - WAIT_MIN) * Math.random() + WAIT_MIN;
95 | console.log(`- Waiting for ${delay} ms.`);
96 | await sleep(delay);
97 |
98 | console.log(`- API url: ${query.host}${query.path}\n- Calling blockchain.info API`);
99 |
100 | let chunks = [];
101 | let request = https.request(query, res => {
102 | res.on('data', (chunk) => {
103 | chunks.push(chunk);
104 | });
105 | res.on('end', () => {
106 | if (chunks.length) {
107 | resolve(chunks.join(''));
108 | } else {
109 | reject(`*[HTTP ERROR]\nstatusCode: ${res.statusCode}*`);
110 | }
111 | });
112 | });
113 | request.on('error', reject);
114 | request.end();
115 | })
116 | .then(data => {
117 | let results;
118 |
119 | // parse json
120 | try {
121 | results = JSON.parse(data.toString());
122 | } catch (e) {
123 | // allow non JSON results for functions or just throw error
124 | throw `${data.toString()}`;
125 | }
126 |
127 | if (results.error !== undefined) {
128 | throw `API returned an error. (${results.error})`;
129 | }
130 |
131 | // api results
132 | // numbers must be number type and not string type
133 | let details = results.hash;
134 |
135 | let totalAmount = 0;
136 | let outputs = results.out; // add value of each output in tx
137 | outputs.forEach(function(output) {
138 | totalAmount += parseInt(output.value);
139 | });
140 |
141 | let timestamp = parseInt(results.time);
142 |
143 | // validation logic
144 | if (details.toUpperCase() !== txHash){
145 | throw `invalid results- api tx hash does not match input tx hash (${details}) -- API error`;
146 | }
147 |
148 | if (isNaN(totalAmount)){
149 | throw `invalid results- api tx amount is not a number (${totalAmount}) -- API error`;
150 | }
151 |
152 | if (1230768000 > timestamp || timestamp > 253402300799) {
153 | // 1-1-2009 < timestamp < 12-31-9999
154 | throw `invalid results- timestamp is not a reasonable epoch time (${timestamp})-- API error`;
155 | }
156 |
157 | // data to send to smart contract from api results
158 | // unique hex or string based on details used for lookup
159 | let apiDataID = {value: details, size: 32, type: 'bytes32'};
160 |
161 | // max 32 bytes between all values if packing (when converted to hex)
162 | // in this case only totalAmount/timestamp is needed
163 | let apiDataArray = [
164 | {value: totalAmount, size: 27, type: 'uint256'},
165 | {value: timestamp, size: 5, type: 'uint256'}
166 | ];
167 |
168 | console.log(`- API data ID: ${apiDataID.value}`);
169 | console.log(`- API data: ${JSON.stringify(apiDataArray)}`);
170 |
171 | // encode data to send to smart contract
172 | let abiData = [];
173 | let abiTypes = [];
174 |
175 | abiData.push(leftPad(apiDataID.value, 64));
176 | abiTypes.push(apiDataID.type);
177 |
178 | // packed bytes32 for aesthetics and saving gas
179 | let packedData = '';
180 | apiDataArray.forEach(function (entry) {
181 | packedData += leftPad(entry.value, entry.size * 2, '0', false);
182 | });
183 |
184 | // user error check here instead of trimming
185 | if (packedData.length > 64){
186 | throw `invalid packed data size, packed data must be less than or equal to 32 bytes`;
187 | }
188 |
189 | // packed data always bytes32 (bytes32 vs uint256?)
190 | abiData.push(leftPad(packedData, 64));
191 | abiTypes.push('bytes32');
192 |
193 | console.log(`- ABI Types: ${abiTypes}\n- ABI Data: ${abiData}`);
194 |
195 | var iexeccallback = ethers.utils.defaultAbiCoder.encode(abiTypes, abiData);
196 | var iexecconsensus = ethers.utils.keccak256(iexeccallback);
197 |
198 | fs.writeFile(callbackFilePath, iexeccallback , (err) => {});
199 | fs.writeFile(determinismFilePath, iexecconsensus, (err) => {});
200 |
201 | //console.log(`- callback: ${iexeccallback}\n- consensus: ${iexecconsensus}`);
202 | console.log(`* end *`);
203 | })
204 | .catch(error => {
205 | fs.writeFile(
206 | errorFilePath,
207 | error.toString(),
208 | (err) => {}
209 | );
210 | fs.writeFile(
211 | determinismFilePath,
212 | ethers.utils.solidityKeccak256(['string'], [error.toString()]),
213 | (err) => {}
214 | );
215 |
216 | console.log(`ERROR: ${error.toString()}\n*APP exiting because of error*`);
217 | });
218 |
--------------------------------------------------------------------------------
/0.to-update/j48-bitcoin-tx-doracle/smart-contract/contracts/BitcoinTxDoracle.sol:
--------------------------------------------------------------------------------
1 | pragma solidity ^0.5.8;
2 | pragma experimental ABIEncoderV2;
3 |
4 | import "openzeppelin-solidity/contracts/ownership/Ownable.sol";
5 | import "iexec-doracle-base/contracts/IexecDoracle.sol";
6 |
7 | contract BitcoinTxDoracle is Ownable, IexecDoracle{
8 |
9 | struct Tx
10 | {
11 | bytes32 oracleCallID;
12 | uint256 amount;
13 | uint256 timestamp;
14 | }
15 |
16 | mapping(bytes32 => Tx) public txAmount;
17 |
18 | event TxUpdated(
19 | bytes32 indexed txHash,
20 | bytes32 indexed oracleCallID,
21 | uint256 amount,
22 | uint256 timestamp);
23 |
24 | // Use _iexecHubAddr to force use of custom iexechub, leave 0x0 for autodetect
25 | constructor(address _iexecHubAddr)
26 | public IexecDoracle(_iexecHubAddr)
27 | {}
28 |
29 | function updateEnv(address _authorizedApp, address _authorizedDataset, address _authorizedWorkerpool, bytes32 _requiredtag, uint256 _requiredtrust)
30 | public onlyOwner
31 | {
32 | _iexecDoracleUpdateSettings(_authorizedApp, _authorizedDataset, _authorizedWorkerpool, _requiredtag, _requiredtrust);
33 | }
34 |
35 | function decodeResults(bytes memory results)
36 | public pure returns(bytes32, bytes32)
37 | { return abi.decode(results, (bytes32, bytes32)); }
38 |
39 | function processResult(bytes32 _oracleCallID)
40 | public
41 | {
42 | bytes32 hash;
43 | bytes32 packedData;
44 |
45 | // Parse results
46 | (hash, packedData) = decodeResults(_iexecDoracleGetVerifiedResult(_oracleCallID));
47 |
48 | // Process results
49 | // unpack require data
50 | uint256 timestamp = uint256(uint40(uint256(packedData>>0)));
51 |
52 | require(txAmount[hash].timestamp < timestamp, "tx-exists");
53 | // there should only ever be 1 entry per tx
54 |
55 | emit TxUpdated(hash, _oracleCallID, amount, timestamp);
56 |
57 | txAmount[hash].oracleCallID = _oracleCallID;
58 | txAmount[hash].amount = uint256(uint216(uint256(packedData>>40)));
59 | txAmount[hash].timestamp = timestamp;
60 | }
61 |
62 | }
63 |
--------------------------------------------------------------------------------
/0.to-update/j48-bitcoin-tx-doracle/smart-contract/contracts/Migrations.sol:
--------------------------------------------------------------------------------
1 | pragma solidity ^0.5.8;
2 |
3 | import "openzeppelin-solidity/contracts/ownership/Ownable.sol";
4 |
5 | contract Migrations is Ownable
6 | {
7 | uint256 public lastCompletedMigration;
8 |
9 | constructor()
10 | public
11 | {
12 | }
13 |
14 | function setCompleted(uint completed) public onlyOwner
15 | {
16 | lastCompletedMigration = completed;
17 | }
18 |
19 | function upgrade(address newAddress) public onlyOwner
20 | {
21 | Migrations upgraded = Migrations(newAddress);
22 | upgraded.setCompleted(lastCompletedMigration);
23 | }
24 | }
--------------------------------------------------------------------------------
/0.to-update/j48-bitcoin-tx-doracle/smart-contract/migrations/1_initial_migration.js:
--------------------------------------------------------------------------------
1 | var Migrations = artifacts.require("./tools/Migrations.sol");
2 |
3 | module.exports = function(deployer) {
4 | deployer.deploy(Migrations, {gas: 500000});
5 | };
--------------------------------------------------------------------------------
/0.to-update/j48-bitcoin-tx-doracle/smart-contract/migrations/2_deploy_contracts.js:
--------------------------------------------------------------------------------
1 | var BitcoinTxDoracle = artifacts.require("BitcoinTxDoracle");
2 |
3 | module.exports = async function(deployer, network, accounts)
4 | {
5 | await deployer.deploy(BitcoinTxDoracle, "0x0000000000000000000000000000000000000000", { gas: 2500000 });
6 | BitcoinTxDoracleInstance = await BitcoinTxDoracle.deployed();
7 | console.log("BitcoinTxDoracle deployed at address: " + BitcoinTxDoracleInstance.address);
8 | };
--------------------------------------------------------------------------------
/0.to-update/j48-bitcoin-tx-doracle/smart-contract/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "bitcoin-tx-doracle",
3 | "version": "0.0.1",
4 | "description": "Contracts and tools for the iExec based bitcoinTx Doracle",
5 | "author": "j48",
6 | "license": "ISC",
7 | "homepage": "https://github.com/j48/iExec/apps",
8 | "dependencies": {
9 | "iexec-doracle-base": "^0.0.6",
10 | "iexec-poco": "^3.0.35",
11 | "iexec-solidity": "^0.0.7",
12 | "openzeppelin-solidity": "^2.2.0",
13 | "truffle": "^5.0.25",
14 | "truffle-hdwallet-provider": "^1.0.12"
15 | }
16 | }
--------------------------------------------------------------------------------
/0.to-update/j48-bitcoin-tx-doracle/smart-contract/truffle.js:
--------------------------------------------------------------------------------
1 | var HDWalletProvider = require("truffle-hdwallet-provider");
2 |
3 | module.exports =
4 | {
5 | networks:
6 | {
7 | docker:
8 | {
9 | host: "iexec-geth-local",
10 | port: 8545,
11 | network_id: "*", // Match any network id,
12 | gasPrice: 22000000000, //22Gwei
13 | },
14 | development:
15 | {
16 | host: "localhost",
17 | port: 8545,
18 | network_id: "*", // Match any network id,
19 | gasPrice: 22000000000, //22Gwei
20 | },
21 | coverage:
22 | {
23 | host: "localhost",
24 | port: 8555, // <-- If you change this, also set the port option in .solcover.js.
25 | network_id: "*",
26 | gas: 0xFFFFFFFFFFF, // <-- Use this high gas value
27 | gasPrice: 0x01 // <-- Use this low gas price
28 | },
29 | mainnet:
30 | {
31 | provider: () => new HDWalletProvider(process.env.MNEMONIC, "https://mainnet.infura.io/v3/f3e0664e01504f5ab2b4360853ce0dc7"),
32 | network_id: '1',
33 | gasPrice: 22000000000, //22Gwei
34 | },
35 | ropsten:
36 | {
37 | provider: () => new HDWalletProvider(process.env.MNEMONIC, "https://ropsten.infura.io/v3/f3e0664e01504f5ab2b4360853ce0dc7"),
38 | network_id: '3',
39 | gasPrice: 22000000000, //22Gwei
40 | },
41 | kovan: {
42 | provider: () => new HDWalletProvider(process.env.MNEMONIC, "https://kovan.infura.io/v3/f3e0664e01504f5ab2b4360853ce0dc7"),
43 | network_id: '42',
44 | gasPrice: 1000000000, //1Gwei
45 | }
46 | },
47 | compilers: {
48 | solc: {
49 | version: "0.5.10",
50 | settings: {
51 | optimizer: {
52 | enabled: true,
53 | runs: 200
54 | }
55 | }
56 | }
57 | },
58 | mocha:
59 | {
60 | enableTimeouts: false
61 | }
62 | };
--------------------------------------------------------------------------------
/0.to-update/ndmg/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM bids/ndmg
2 | COPY demo-ndmg-on-iexec.sh /usr/bin/demo-ndmg-on-iexec.sh
3 | RUN chmod 755 /usr/bin/demo-ndmg-on-iexec.sh
4 | ENTRYPOINT ["demo-ndmg-on-iexec.sh"]
5 |
--------------------------------------------------------------------------------
/0.to-update/ndmg/demo-ndmg-on-iexec.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | touch /tmp/ndmg_demo_dwi.log
3 |
4 |
5 | chmod 755 /tmp/ndmg_demo_dwi.log
6 |
7 | ndmg_demo_dwi | tee /tmp/ndmg_demo_dwi.log
8 |
9 |
10 | mkdir -p /iexec/
11 | echo "generate /iexec/consensus.iexec"
12 |
13 | cat /tmp/ndmg_demo_dwi.log | grep Computing >> /iexec/consensus.iexec
14 | cat /tmp/ndmg_demo_dwi.log | grep "Subject Means" >> /iexec/consensus.iexec
15 | if [ -f /tmp/ndmg_demo/outputs/qa/graphs/desikan-res-4x4x4/desikan-res-4x4x4_plot.html ]
16 | then
17 | cp -f /tmp/ndmg_demo/outputs/qa/graphs/desikan-res-4x4x4/desikan-res-4x4x4_plot.html /iexec/desikan-res-4x4x4_plot.html
18 | fi
19 |
--------------------------------------------------------------------------------
/0.to-update/option-pricing/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.7.0-alpine3.8
2 | RUN apk --no-cache add --virtual .builddeps gcc gfortran musl-dev && pip install numpy==1.14.5 && apk del .builddeps && rm -rf /root/.cache
3 |
4 | COPY option-pricing.py option-pricing.py
5 |
6 | ENTRYPOINT ["python", "option-pricing.py"]
7 |
--------------------------------------------------------------------------------
/0.to-update/option-pricing/option-pricing.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 | np.random.seed(12345678)
4 |
5 | def stoc_walk(p,dr,vol,periods):
6 | w = np.random.normal(0,1,size=periods)
7 | for i in range(periods):
8 | p += dr*p + w[i]*vol*p
9 | return p
10 |
11 | # Parameters
12 | s0 = 114.64 # Actual price
13 | drift = 0.0016273 # Drift term (daily)
14 | volatility = 0.088864 # Volatility (daily)
15 | t_ = 365 # Total periods in a year
16 | r = 0.033 # Risk free rate (yearly)
17 | days = 2 # Days until option expiration
18 | N = 100000 # Number of Monte Carlo trials
19 | zero_trials = 0 # Number of trials where the option payoff = 0
20 | k = 100 # Strike price
21 |
22 | avg = 0 # Temporary variable to be assigned to the sum
23 | # of the simulated payoffs
24 |
25 | # Simulation loop
26 | for i in range(N):
27 | temp = stoc_walk(s0,drift,volatility,days)
28 | if temp > k:
29 | payoff = temp-k
30 | payoff = payoff*np.exp(-r/t_*days)
31 | avg += payoff
32 | else:
33 | zero_trials += 1
34 |
35 | # Averaging the payoffs
36 | price = avg/float(N)
37 |
38 | # Priting the results
39 | print("MONTE CARLO PLAIN VANILLA CALL OPTION PRICING")
40 | print("Option price: ",price)
41 | print("Initial price: ",s0)
42 | print("Strike price: ",k)
43 | print("Daily expected drift: ",drift*100,"%")
44 | print("Daily expected volatility: ",volatility*100,"%")
45 | print("Total trials: ",N)
46 | print("Zero trials: ",zero_trials)
47 | print("Percentage of total trials: ",zero_trials/N*100,"%")
48 |
--------------------------------------------------------------------------------
/0.to-update/oyente/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM luongnguyen/oyente
2 | RUN apt-get update
3 | COPY customScript.sh /oyente/customScript.sh
4 | RUN chmod 755 /oyente/customScript.sh
5 |
6 |
7 |
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/0.to-update/oyente/customScript.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | echo "----------------------Inside the docker----------------------------"
3 |
4 | b=( $(find / -name contracts -type d) )
5 | c=( $(find / -name oyente.py -type f) )
6 | echo "----------------------Contracts located----------------------------"
7 |
8 | num=( $(find $b -type f|wc -l) )
9 |
10 | echo ${num}
11 |
12 | arr=( $(find $b -type f) )
13 |
14 | i=0
15 |
16 | while (($i <= $num-1)); do
17 | echo $i
18 | python $c -s ${arr[i]} -ce
19 | ((i++))
20 | done
21 |
22 | rm -rf $b
23 |
24 | echo "-----------------------------Done---------------------------------"
25 |
26 |
--------------------------------------------------------------------------------
/0.to-update/package-tracker-oracle/app/.gitignore:
--------------------------------------------------------------------------------
1 | .env
2 | node_modules
--------------------------------------------------------------------------------
/0.to-update/package-tracker-oracle/app/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM node:11-alpine
2 | COPY package-tracker.js /app/package-tracker.js
3 | COPY entrypoint.sh /entrypoint.sh
4 | ARG APIKEY=APIKEY
5 | ENV APIKEY=${APIKEY}
6 | RUN npm i https ethers fs
7 | RUN chmod +x /entrypoint.sh
8 | ENTRYPOINT ["/entrypoint.sh"]
--------------------------------------------------------------------------------
/0.to-update/package-tracker-oracle/app/entrypoint.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | node app/package-tracker.js $@
--------------------------------------------------------------------------------
/0.to-update/package-tracker-oracle/app/package-lock.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "app",
3 | "version": "1.0.0",
4 | "lockfileVersion": 1,
5 | "requires": true,
6 | "dependencies": {
7 | "@types/node": {
8 | "version": "10.14.22",
9 | "resolved": "https://registry.npmjs.org/@types/node/-/node-10.14.22.tgz",
10 | "integrity": "sha512-9taxKC944BqoTVjE+UT3pQH0nHZlTvITwfsOZqyc+R3sfJuxaTtxWjfn1K2UlxyPcKHf0rnaXcVFrS9F9vf0bw=="
11 | },
12 | "aes-js": {
13 | "version": "3.0.0",
14 | "resolved": "https://registry.npmjs.org/aes-js/-/aes-js-3.0.0.tgz",
15 | "integrity": "sha1-4h3xCtbCBTKVvLuNq0Cwnb6ofk0="
16 | },
17 | "bn.js": {
18 | "version": "4.11.8",
19 | "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.11.8.tgz",
20 | "integrity": "sha512-ItfYfPLkWHUjckQCk8xC+LwxgK8NYcXywGigJgSwOP8Y2iyWT4f2vsZnoOXTTbo+o5yXmIUJ4gn5538SO5S3gA=="
21 | },
22 | "brorand": {
23 | "version": "1.1.0",
24 | "resolved": "https://registry.npmjs.org/brorand/-/brorand-1.1.0.tgz",
25 | "integrity": "sha1-EsJe/kCkXjwyPrhnWgoM5XsiNx8="
26 | },
27 | "dotenv": {
28 | "version": "8.2.0",
29 | "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-8.2.0.tgz",
30 | "integrity": "sha512-8sJ78ElpbDJBHNeBzUbUVLsqKdccaa/BXF1uPTw3GrvQTBgrQrtObr2mUrE38vzYd8cEv+m/JBfDLioYcfXoaw=="
31 | },
32 | "elliptic": {
33 | "version": "6.3.3",
34 | "resolved": "https://registry.npmjs.org/elliptic/-/elliptic-6.3.3.tgz",
35 | "integrity": "sha1-VILZZG1UvLif19mU/J4ulWiHbj8=",
36 | "requires": {
37 | "bn.js": "^4.4.0",
38 | "brorand": "^1.0.1",
39 | "hash.js": "^1.0.0",
40 | "inherits": "^2.0.1"
41 | }
42 | },
43 | "ethers": {
44 | "version": "4.0.38",
45 | "resolved": "https://registry.npmjs.org/ethers/-/ethers-4.0.38.tgz",
46 | "integrity": "sha512-l7l7RIfk2/rIFgRRVLFY3H06S9dhXXPUdMlYm6SCelB6oG+ABmoRig7xSVOLcHLayBfSwssjAAYLKxf1jWhbuQ==",
47 | "requires": {
48 | "@types/node": "^10.3.2",
49 | "aes-js": "3.0.0",
50 | "bn.js": "^4.4.0",
51 | "elliptic": "6.3.3",
52 | "hash.js": "1.1.3",
53 | "js-sha3": "0.5.7",
54 | "scrypt-js": "2.0.4",
55 | "setimmediate": "1.0.4",
56 | "uuid": "2.0.1",
57 | "xmlhttprequest": "1.8.0"
58 | }
59 | },
60 | "fs": {
61 | "version": "0.0.1-security",
62 | "resolved": "https://registry.npmjs.org/fs/-/fs-0.0.1-security.tgz",
63 | "integrity": "sha1-invTcYa23d84E/I4WLV+yq9eQdQ="
64 | },
65 | "hash.js": {
66 | "version": "1.1.3",
67 | "resolved": "https://registry.npmjs.org/hash.js/-/hash.js-1.1.3.tgz",
68 | "integrity": "sha512-/UETyP0W22QILqS+6HowevwhEFJ3MBJnwTf75Qob9Wz9t0DPuisL8kW8YZMK62dHAKE1c1p+gY1TtOLY+USEHA==",
69 | "requires": {
70 | "inherits": "^2.0.3",
71 | "minimalistic-assert": "^1.0.0"
72 | }
73 | },
74 | "https": {
75 | "version": "1.0.0",
76 | "resolved": "https://registry.npmjs.org/https/-/https-1.0.0.tgz",
77 | "integrity": "sha1-PDfHrhqO65ZpBKKtHpdaGUt+06Q="
78 | },
79 | "inherits": {
80 | "version": "2.0.4",
81 | "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
82 | "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="
83 | },
84 | "js-sha3": {
85 | "version": "0.5.7",
86 | "resolved": "https://registry.npmjs.org/js-sha3/-/js-sha3-0.5.7.tgz",
87 | "integrity": "sha1-DU/9gALVMzqrr0oj7tL2N0yfKOc="
88 | },
89 | "minimalistic-assert": {
90 | "version": "1.0.1",
91 | "resolved": "https://registry.npmjs.org/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz",
92 | "integrity": "sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A=="
93 | },
94 | "scrypt-js": {
95 | "version": "2.0.4",
96 | "resolved": "https://registry.npmjs.org/scrypt-js/-/scrypt-js-2.0.4.tgz",
97 | "integrity": "sha512-4KsaGcPnuhtCZQCxFxN3GVYIhKFPTdLd8PLC552XwbMndtD0cjRFAhDuuydXQ0h08ZfPgzqe6EKHozpuH74iDw=="
98 | },
99 | "setimmediate": {
100 | "version": "1.0.4",
101 | "resolved": "https://registry.npmjs.org/setimmediate/-/setimmediate-1.0.4.tgz",
102 | "integrity": "sha1-IOgd5iLUoCWIzgyNqJc8vPHTE48="
103 | },
104 | "uuid": {
105 | "version": "2.0.1",
106 | "resolved": "https://registry.npmjs.org/uuid/-/uuid-2.0.1.tgz",
107 | "integrity": "sha1-wqMN7bPlNdcsz4LjQ5QaULqFM6w="
108 | },
109 | "xmlhttprequest": {
110 | "version": "1.8.0",
111 | "resolved": "https://registry.npmjs.org/xmlhttprequest/-/xmlhttprequest-1.8.0.tgz",
112 | "integrity": "sha1-Z/4HXFwk/vOfnWX197f+dRcZaPw="
113 | }
114 | }
115 | }
116 |
--------------------------------------------------------------------------------
/0.to-update/package-tracker-oracle/app/package-tracker.js:
--------------------------------------------------------------------------------
1 | const https = require('https');
2 | const ethers = require('ethers');
3 | const fs = require('fs');
4 |
5 | const root = 'iexec_out';
6 | const determinismFilePath = `${root}/determinism.iexec`;
7 | const callbackFilePath = `${root}/callback.iexec`;
8 | const errorFilePath = `${root}/error.iexec`;
9 |
10 |
11 | /*****************************************************************************
12 | * TOOLS *
13 | *****************************************************************************/
14 | const sleep = (ms) => {
15 | return new Promise(resolve => setTimeout(resolve, ms));
16 | }
17 |
18 | /*****************************************************************************
19 | * CONFIG *
20 | *****************************************************************************/
21 |
22 | const APIKEY = process.env.APIKEY;
23 |
24 | // random delay
25 | const WAIT_MIN = parseInt(process.env.WAIT_MIN) || 0; // in ms
26 | const WAIT_MAX = parseInt(process.env.WAIT_MAX) || 0; // in ms
27 |
28 | const statusValue = {
29 | PRIS_EN_CHARGE: 0,
30 | EN_LIVRAISON: 1,
31 | EXPEDIE: 2,
32 | A_RETIRER: 3,
33 | TRI_EFFECTUE: 4,
34 | DISTRIBUE: 5,
35 | LIVRE: 6,
36 | DESTINATAIRE_INFORME: 7,
37 | RETOUR_DESTINATAIRE: 8,
38 | ERREUR: 9,
39 | INCONNU: 10
40 | }
41 |
42 | /*****************************************************************************
43 | * ARGUMENTS *
44 | *****************************************************************************/
45 |
46 | var [ id ] = process.argv.slice(2).map(s => s.toUpperCase());
47 |
48 | /*****************************************************************************
49 | * HTTP QUERY *
50 | *****************************************************************************/
51 | let path = `/suivi/v1/${id}`;
52 |
53 | const query = {
54 | method: 'GET',
55 | port: 443,
56 | host: 'api.laposte.fr',
57 | path: path,
58 | headers: { 'X-Okapi-Key': APIKEY },
59 | };
60 |
61 | /*****************************************************************************
62 | * EXECUTE *
63 | *****************************************************************************/
64 | new Promise(async (resolve, reject) => {
65 |
66 | const delay = (WAIT_MAX-WAIT_MIN) * Math.random() + WAIT_MIN;
67 | console.log(`- Waiting for ${delay} ms.`);
68 | await sleep(delay);
69 |
70 | console.log(`- Calling API ${query.host}${query.path}`);
71 | let chunks = [];
72 | let request = https.request(query, res => {
73 | res.on('data', (chunk) => {
74 | chunks.push(chunk);
75 | });
76 | res.on('end', () => {
77 | if (chunks.length)
78 | {
79 | resolve(chunks.join(''));
80 | }
81 | else
82 | {
83 | reject(`[HTTP ERROR]\nstatusCode: ${res.statusCode}`);
84 | }
85 | });
86 | });
87 | request.on('error', reject);
88 | request.end();
89 | })
90 | .then(data => {
91 | let results = JSON.parse(data.toString());
92 |
93 | if (results.error !== undefined)
94 | {
95 | throw new Error(results.error);
96 | }
97 |
98 | if (results.code == 'BAD_REQUEST' || results.code == 'RESOURCE_NOT_FOUND')
99 | {
100 | throw new Error(results.message);
101 | }
102 |
103 | let { code, status } = results;
104 |
105 | if (code == undefined || status == undefined)
106 | {
107 | throw new Error("invalid results");
108 | }
109 |
110 | let statusNumber = statusValue[status];
111 | let date = new Date().getTime();
112 |
113 | let iexeccallback = ethers.utils.defaultAbiCoder.encode(['string', 'uint256', 'uint256'], [code, statusNumber, date]);
114 | let iexecconsensus = ethers.utils.keccak256(iexeccallback);
115 | fs.writeFile(callbackFilePath, iexeccallback , (err) => {});
116 | fs.writeFile(determinismFilePath, iexecconsensus, (err) => {});
117 |
118 | console.log(`- Success: ${code} ${statusNumber} ${date}`);
119 | })
120 | .catch(error => {
121 | fs.writeFile(
122 | errorFilePath,
123 | error.toString(),
124 | (err) => {}
125 | );
126 | fs.writeFile(
127 | determinismFilePath,
128 | ethers.utils.solidityKeccak256(['string'],[error.toString()]),
129 | (err) => {}
130 | );
131 | console.log(error.toString());
132 | });
133 |
--------------------------------------------------------------------------------
/0.to-update/package-tracker-oracle/app/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "app",
3 | "version": "1.0.0",
4 | "description": "",
5 | "main": "package-tracker.js",
6 | "scripts": {
7 | "test": "echo \"Error: no test specified\" && exit 1"
8 | },
9 | "author": "",
10 | "license": "ISC",
11 | "dependencies": {
12 | "dotenv": "^8.2.0",
13 | "ethers": "^4.0.38",
14 | "fs": "0.0.1-security",
15 | "https": "^1.0.0"
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/0.to-update/package-tracker-oracle/chain.json:
--------------------------------------------------------------------------------
1 | {
2 | "default": "kovan",
3 | "chains": {
4 | "dev": {
5 | "host": "http://localhost:8545",
6 | "sms": "http://localhost:5000",
7 | "id": "17",
8 | "hub": "0x60E25C038D70A15364DAc11A042DB1dD7A2cccBC"
9 | },
10 | "ropsten": {
11 | "host": "https://ropsten.infura.io/v3/f3e0664e01504f5ab2b4360853ce0dc7",
12 | "id": "3"
13 | },
14 | "rinkeby": {
15 | "host": "https://rinkeby.infura.io/v3/f3e0664e01504f5ab2b4360853ce0dc7",
16 | "id": "4"
17 | },
18 | "kovan": {
19 | "host": "https://kovan.infura.io/v3/f3e0664e01504f5ab2b4360853ce0dc7",
20 | "id": "42",
21 | "sms": "https://sms-kovan.iex.ec"
22 | },
23 | "mainnet": {
24 | "host": "https://mainnet.infura.io/v3/f3e0664e01504f5ab2b4360853ce0dc7",
25 | "id": "1",
26 | "sms": "https://sms-mainnet.iex.ec"
27 | }
28 | }
29 | }
--------------------------------------------------------------------------------
/0.to-update/package-tracker-oracle/deployed.json:
--------------------------------------------------------------------------------
1 | {
2 | "app": {
3 | "42": "0x7f14b4110bd07772E00F0fA3AFF2f0F3E2ccfCe0"
4 | }
5 | }
--------------------------------------------------------------------------------
/0.to-update/package-tracker-oracle/iexec.json:
--------------------------------------------------------------------------------
1 | {
2 | "description": "My iExec ressource description, must be at least 150 chars long in order to pass the validation checks. Describe your application, dataset or workerpool to your users",
3 | "license": "MIT",
4 | "author": "?",
5 | "social": {
6 | "website": "?",
7 | "github": "?"
8 | },
9 | "logo": "logo.png",
10 | "buyConf": {
11 | "params": "",
12 | "tag": "0x0000000000000000000000000000000000000000000000000000000000000000",
13 | "trust": "0",
14 | "callback": "0x0000000000000000000000000000000000000000"
15 | },
16 | "app": {
17 | "owner": "0x866A9D83B80266F2D40882b13463615eda305b08",
18 | "name": "LaPoste-tracker",
19 | "type": "DOCKER",
20 | "multiaddr": "registry.hub.docker.com/andy92pac/tracker:1.0.0",
21 | "checksum": "0x016d0f7fe968243774daf8d0f6e5748c1293a98ec3eabace3e87573fa1c9269a",
22 | "mrenclave": ""
23 | },
24 | "order": {
25 | "apporder": {
26 | "app": "0x7f14b4110bd07772E00F0fA3AFF2f0F3E2ccfCe0",
27 | "appprice": "0",
28 | "volume": "1000000",
29 | "tag": "0x0000000000000000000000000000000000000000000000000000000000000000",
30 | "datasetrestrict": "0x0000000000000000000000000000000000000000",
31 | "workerpoolrestrict": "0x0000000000000000000000000000000000000000",
32 | "requesterrestrict": "0x0000000000000000000000000000000000000000"
33 | }
34 | }
35 | }
--------------------------------------------------------------------------------
/0.to-update/package-tracker-oracle/orders.json:
--------------------------------------------------------------------------------
1 | {
2 | "42": {
3 | "apporder": {
4 | "app": "0x7f14b4110bd07772E00F0fA3AFF2f0F3E2ccfCe0",
5 | "appprice": "0",
6 | "volume": "1000000",
7 | "tag": "0x0000000000000000000000000000000000000000000000000000000000000000",
8 | "datasetrestrict": "0x0000000000000000000000000000000000000000",
9 | "workerpoolrestrict": "0x0000000000000000000000000000000000000000",
10 | "requesterrestrict": "0x0000000000000000000000000000000000000000",
11 | "salt": "0xf8fa30787d9b4daf7632338316cbafe132ead5070c524e2a54a9deef9a77caf7",
12 | "sign": "0xcbbebcdfe9503786e496913f72bccbf938481243dea5960fccbe224b5730fb5757afab2e1812c2970e30b10791af34f6ec67830be6d1094e87ae210e2bc7eea91b"
13 | }
14 | }
15 | }
--------------------------------------------------------------------------------
/0.to-update/package-tracker-oracle/smart-contract/.gitignore:
--------------------------------------------------------------------------------
1 | node_modules
--------------------------------------------------------------------------------
/0.to-update/package-tracker-oracle/smart-contract/contracts/LaPosteOracle.sol:
--------------------------------------------------------------------------------
1 | pragma solidity ^0.5.8;
2 | pragma experimental ABIEncoderV2;
3 |
4 | import "openzeppelin-solidity/contracts/ownership/Ownable.sol";
5 | import "iexec-doracle-base/contracts/IexecDoracle.sol";
6 |
7 | contract LaPosteOracle is Ownable, IexecDoracle
8 | {
9 |
10 | enum Status {
11 | PRIS_EN_CHARGE,
12 | EN_LIVRAISON,
13 | EXPEDIE,
14 | A_RETIRER,
15 | TRI_EFFECTUE,
16 | DISTRIBUE,
17 | LIVRE,
18 | DESTINATAIRE_INFORME,
19 | RETOUR_DESTINATAIRE,
20 | ERREUR,
21 | INCONNU
22 | }
23 |
24 | struct TrackingReport
25 | {
26 | bytes32 oracleCallID;
27 | string trackingNumber;
28 | Status trackingStatus;
29 | uint256 lastUpdated;
30 | }
31 |
32 | mapping (bytes32 => TrackingReport) public trackings;
33 |
34 | event NewTrackingReport(
35 | bytes32 indexed id,
36 | bytes32 indexed oracleCallID,
37 | string trackingNumber,
38 | uint256 status,
39 | uint256 lastUpdated
40 | );
41 |
42 | // Use _iexecHubAddr to force use of custom iexechub, leave 0x0 for autodetect
43 | constructor(address _iexecHubAddr)
44 | public IexecDoracle(_iexecHubAddr)
45 | {}
46 |
47 | function updateEnv(
48 | address _authorizedApp
49 | , address _authorizedDataset
50 | , address _authorizedWorkerpool
51 | , bytes32 _requiredtag
52 | , uint256 _requiredtrust
53 | )
54 | public onlyOwner
55 | {
56 | _iexecDoracleUpdateSettings(_authorizedApp, _authorizedDataset, _authorizedWorkerpool, _requiredtag, _requiredtrust);
57 | }
58 |
59 | function decodeResults(bytes memory results)
60 | public pure returns(string memory, uint256, uint256)
61 | { return abi.decode(results, (string, uint256, uint256)); }
62 |
63 | function processResult(bytes32 _oracleCallID)
64 | public
65 | {
66 | string memory trackingNumber;
67 | uint256 status;
68 | uint256 date;
69 |
70 | // Parse results
71 | (trackingNumber, status, date) = decodeResults(_iexecDoracleGetVerifiedResult(_oracleCallID));
72 |
73 | // Process results
74 | bytes32 id = keccak256(bytes(trackingNumber));
75 | require (date > trackings[id].lastUpdated, "tracking report is too old");
76 | emit NewTrackingReport(id, _oracleCallID, trackingNumber, status, date);
77 | trackings[id].oracleCallID = _oracleCallID;
78 | trackings[id].trackingNumber = trackingNumber;
79 | trackings[id].trackingStatus = Status(status);
80 | trackings[id].lastUpdated = date;
81 | }
82 | }
--------------------------------------------------------------------------------
/0.to-update/package-tracker-oracle/smart-contract/contracts/Migrations.sol:
--------------------------------------------------------------------------------
1 | pragma solidity >=0.4.21 <0.6.0;
2 |
3 | contract Migrations {
4 | address public owner;
5 | uint public last_completed_migration;
6 |
7 | constructor() public {
8 | owner = msg.sender;
9 | }
10 |
11 | modifier restricted() {
12 | if (msg.sender == owner) _;
13 | }
14 |
15 | function setCompleted(uint completed) public restricted {
16 | last_completed_migration = completed;
17 | }
18 |
19 | function upgrade(address new_address) public restricted {
20 | Migrations upgraded = Migrations(new_address);
21 | upgraded.setCompleted(last_completed_migration);
22 | }
23 | }
24 |
--------------------------------------------------------------------------------
/0.to-update/package-tracker-oracle/smart-contract/migrations/1_initial_migration.js:
--------------------------------------------------------------------------------
1 | const Migrations = artifacts.require("Migrations");
2 |
3 | module.exports = function(deployer) {
4 | deployer.deploy(Migrations);
5 | };
6 |
--------------------------------------------------------------------------------
/0.to-update/package-tracker-oracle/smart-contract/migrations/2_deploy_contracts.js:
--------------------------------------------------------------------------------
1 | var LaPosteOracle = artifacts.require("LaPosteOracle");
2 |
3 | module.exports = async function(deployer, network, accounts)
4 | {
5 | await deployer.deploy(LaPosteOracle, "0x0000000000000000000000000000000000000000", { gas: 2500000 });
6 | LaPosteOracleInstance = await LaPosteOracle.deployed();
7 | console.log("LaPosteOracle deployed at address: " + LaPosteOracleInstance.address);
8 | };
--------------------------------------------------------------------------------
/0.to-update/package-tracker-oracle/smart-contract/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "smart-contract",
3 | "version": "1.0.0",
4 | "description": "",
5 | "main": "truffle.js",
6 | "directories": {
7 | "test": "test"
8 | },
9 | "scripts": {
10 | "test": "echo \"Error: no test specified\" && exit 1"
11 | },
12 | "author": "",
13 | "license": "ISC",
14 | "dependencies": {
15 | "iexec-doracle-base": "0.0.7",
16 | "iexec-poco": "^3.0.35",
17 | "iexec-solidity": "0.0.16",
18 | "openzeppelin-solidity": "^2.3.0",
19 | "truffle": "^5.0.41",
20 | "truffle-hdwallet-provider": "^1.0.17"
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/0.to-update/package-tracker-oracle/smart-contract/truffle.js:
--------------------------------------------------------------------------------
1 | var HDWalletProvider = require("truffle-hdwallet-provider");
2 |
3 | module.exports =
4 | {
5 | networks:
6 | {
7 | docker:
8 | {
9 | host: "iexec-geth-local",
10 | port: 8545,
11 | network_id: "*", // Match any network id,
12 | gasPrice: 22000000000, //22Gwei
13 | },
14 | development:
15 | {
16 | host: "localhost",
17 | port: 8545,
18 | network_id: "*", // Match any network id,
19 | gasPrice: 22000000000, //22Gwei
20 | },
21 | coverage:
22 | {
23 | host: "localhost",
24 | port: 8555, // <-- If you change this, also set the port option in .solcover.js.
25 | network_id: "*",
26 | gas: 0xFFFFFFFFFFF, // <-- Use this high gas value
27 | gasPrice: 0x01 // <-- Use this low gas price
28 | },
29 | mainnet:
30 | {
31 | provider: () => new HDWalletProvider(process.env.MNEMONIC, "https://mainnet.infura.io/v3/f3e0664e01504f5ab2b4360853ce0dc7"),
32 | network_id: '1',
33 | gasPrice: 22000000000, //22Gwei
34 | },
35 | ropsten:
36 | {
37 | provider: () => new HDWalletProvider(process.env.MNEMONIC, "https://ropsten.infura.io/v3/f3e0664e01504f5ab2b4360853ce0dc7"),
38 | network_id: '3',
39 | gasPrice: 22000000000, //22Gwei
40 | },
41 | kovan: {
42 | provider: () => new HDWalletProvider(process.env.MNEMONIC, "https://kovan.infura.io/v3/f3e0664e01504f5ab2b4360853ce0dc7"),
43 | network_id: '42',
44 | gasPrice: 1000000000, //1Gwei
45 | }
46 | },
47 | compilers: {
48 | solc: {
49 | version: "0.5.10",
50 | settings: {
51 | optimizer: {
52 | enabled: true,
53 | runs: 200
54 | }
55 | }
56 | }
57 | },
58 | mocha:
59 | {
60 | enableTimeouts: false
61 | }
62 | };
--------------------------------------------------------------------------------
/0.to-update/param-checker/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu:16.04
2 | RUN apt-get update
3 | RUN apt-get install -y sudo
4 | RUN apt-get install -y curl
5 | RUN curl -sL https://deb.nodesource.com/setup_8.x | sudo -E bash -
6 | RUN apt-get install -y nodejs
7 | RUN npm install -g vanity-eth --unsafe
8 | RUN apt install -y openssl
9 | COPY test.js /test.js
10 |
11 | ENTRYPOINT ["node", "/test.js"]
12 |
13 | # docker image build -t nexus.iex.ec/param-checker .
--------------------------------------------------------------------------------
/0.to-update/param-checker/test.js:
--------------------------------------------------------------------------------
1 | console.log(process.argv);
2 |
--------------------------------------------------------------------------------
/0.to-update/pengiundev-soccerapp/contract/PriceOracle.sol:
--------------------------------------------------------------------------------
1 | pragma solidity ^0.5.8;
2 | pragma experimental ABIEncoderV2;
3 |
4 | import "openzeppelin-solidity/contracts/ownership/Ownable.sol";
5 | import "iexec-doracle-base/contracts/IexecDoracle.sol";
6 |
7 | contract PriceOracle is Ownable, IexecDoracle
8 | {
9 | struct TimedValue
10 | {
11 | bytes32 oracleCallID;
12 | string result;
13 | }
14 |
15 | mapping(bytes32 => TimedValue) public values;
16 |
17 | event ValueUpdated(
18 | bytes32 indexed id,
19 | bytes32 indexed oracleCallID,
20 | string oldResult,
21 | string newResult
22 | );
23 |
24 | // Use _iexecHubAddr to force use of custom iexechub, leave 0x0 for autodetect
25 | constructor(address _iexecHubAddr)
26 | public IexecDoracle(_iexecHubAddr)
27 | {}
28 |
29 | function updateEnv(
30 | address _authorizedApp
31 | , address _authorizedDataset
32 | , address _authorizedWorkerpool
33 | , bytes32 _requiredtag
34 | , uint256 _requiredtrust
35 | )
36 | public onlyOwner
37 | {
38 | _iexecDoracleUpdateSettings(_authorizedApp, _authorizedDataset, _authorizedWorkerpool, _requiredtag, _requiredtrust);
39 | }
40 |
41 | function decodeResults(bytes memory results)
42 | public pure returns(string memory)
43 | { return abi.decode(results, (string)); }
44 |
45 | function processResult(bytes32 _oracleCallID)
46 | public
47 | {
48 | string memory result;
49 |
50 | // Parse results
51 | result = decodeResults(_iexecDoracleGetVerifiedResult(_oracleCallID));
52 |
53 | // Process results
54 | bytes32 id = keccak256(bytes(result));
55 | emit ValueUpdated(id, _oracleCallID, values[id].result, result);
56 | values[id].oracleCallID = _oracleCallID;
57 | values[id].result = result;
58 | }
59 | }
60 |
--------------------------------------------------------------------------------
/0.to-update/pengiundev-soccerapp/offchain/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM node:11-alpine
2 |
3 | RUN npm i axios ethers fs
4 |
5 | COPY src/index.js /index.js
6 |
7 | ENTRYPOINT ["node", "/index.js"]
--------------------------------------------------------------------------------
/0.to-update/pengiundev-soccerapp/offchain/src/index.js:
--------------------------------------------------------------------------------
1 | const axios = require('axios')
2 | const fs = require('fs')
3 | const ethers = require('ethers')
4 |
5 | const determinismFilePath = `iexec_out/determinism.iexec`;
6 | const callbackFilePath = `iexec_out/callback.iexec`;
7 | const errorFilePath = `iexec_out/error.iexec`;
8 | const API_URL = 'https://www.api-football.com/demo/api/v2/fixtures/id/';
9 |
10 | let [id] = process.argv.slice(2)
11 |
12 |
13 | async function main () {
14 | try {
15 | let result = await axios.get(API_URL+id);
16 | result = result.data.api.fixtures[0].score.fulltime;
17 | if(result) {
18 | let iexecCallback = ethers.utils.defaultAbiCoder.encode(['string'], [result]);
19 | let iexecDeterminism = ethers.utils.keccak256(iexecCallback);
20 | fs.writeFile(callbackFilePath, iexecCallback , (err) => {});
21 | fs.writeFile(determinismFilePath, iexecDeterminism, (err) => {});
22 | console.log(result)
23 | }
24 | else {
25 | throw new Error({error: "No result yet"})
26 | }
27 | }
28 | catch(error) {
29 | fs.writeFile(
30 | errorFilePath,
31 | error.toString(),
32 | (err) => {}
33 | );
34 | fs.writeFile(
35 | determinismFilePath,
36 | ethers.utils.solidityKeccak256(['string'],[error.toString()]),
37 | (err) => {}
38 | );
39 | console.log(error)
40 | }
41 | }
42 | main();
--------------------------------------------------------------------------------
/0.to-update/randomGenerator/Dockerfile:
--------------------------------------------------------------------------------
1 | #FROM iexechub/python-scone
2 | FROM sconecuratedimages/iexec:python-3.6.6-alpine3.6-new-rt
3 | COPY --from=sconecuratedimages/iexec:cli-alpine /opt/scone/scone-cli /opt/scone/scone-cli
4 | COPY --from=sconecuratedimages/iexec:cli-alpine /usr/local/bin/scone /usr/local/bin/scone
5 | COPY --from=sconecuratedimages/iexec:cli-alpine /opt/scone/bin /opt/scone/bin
6 | RUN apk add bash --no-cache
7 | COPY signer /signer
8 |
9 | RUN echo "http://dl-cdn.alpinelinux.org/alpine/v3.5/community" >> /etc/apk/repositories \
10 | && apk update \
11 | && apk add --update-cache --no-cache libgcc \
12 | && apk --no-cache --update-cache add gcc gfortran python python-dev py-pip build-base wget freetype-dev libpng-dev \
13 | && apk add --no-cache --virtual .build-deps gcc musl-dev
14 |
15 | RUN SCONE_MODE=sim pip install pysha3 attrdict python-gnupg web3
16 |
17 | RUN cp /usr/bin/python3.6 /usr/bin/python3
18 |
19 | COPY src /app
20 |
21 | RUN SCONE_MODE=sim SCONE_HASH=1 SCONE_HEAP=1G SCONE_ALPINE=1 \
22 | && mkdir conf \
23 | && scone fspf create fspf.pb \
24 | && scone fspf addr fspf.pb / --not-protected --kernel / \
25 | && scone fspf addr fspf.pb /usr --authenticated --kernel /usr \
26 | && scone fspf addf fspf.pb /usr /usr \
27 | && scone fspf addr fspf.pb /bin --authenticated --kernel /bin \
28 | && scone fspf addf fspf.pb /bin /bin \
29 | && scone fspf addr fspf.pb /lib --authenticated --kernel /lib \
30 | && scone fspf addf fspf.pb /lib /lib \
31 | && scone fspf addr fspf.pb /etc --authenticated --kernel /etc \
32 | && scone fspf addf fspf.pb /etc /etc \
33 | && scone fspf addr fspf.pb /sbin --authenticated --kernel /sbin \
34 | && scone fspf addf fspf.pb /sbin /sbin \
35 | && scone fspf addr fspf.pb /signer --authenticated --kernel /signer \
36 | && scone fspf addf fspf.pb /signer /signer \
37 | && scone fspf addr fspf.pb /app --authenticated --kernel /app \
38 | && scone fspf addf fspf.pb /app /app \
39 | && scone fspf encrypt ./fspf.pb > /conf/keytag \
40 | && MRENCLAVE="$(SCONE_HASH=1 python)" \
41 | && FSPF_TAG=$(cat conf/keytag | awk '{print $9}') \
42 | && FSPF_KEY=$(cat conf/keytag | awk '{print $11}') \
43 | && FINGERPRINT="$FSPF_KEY|$FSPF_TAG|$MRENCLAVE" \
44 | && echo $FINGERPRINT > conf/fingerprint.txt \
45 | && printf "\n" \
46 | && printf "#####################################################\n" \
47 | && printf "MREnclave: $FINGERPRINT\n" \
48 | && printf "#####################################################\n" \
49 | && printf "done"
50 |
51 | ENTRYPOINT [ "python3", "/app/app.py" ]
52 |
--------------------------------------------------------------------------------
/0.to-update/randomGenerator/Dockerfile-unsafe:
--------------------------------------------------------------------------------
1 | FROM python:3.6-alpine
2 |
3 | RUN apk --no-cache add --virtual .builddeps gcc musl-dev
4 | RUN pip install pysha3
5 | RUN apk del .builddeps
6 | RUN rm -rf /root/.cache
7 |
8 | COPY src /app
9 |
10 | ENTRYPOINT [ "python3", "/app/randomGenerator.py" ]
11 |
--------------------------------------------------------------------------------
/0.to-update/randomGenerator/build.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | docker image build -f Dockerfile-unsafe -t iexechub/random-generator-unsafe:0.0.1 .
4 | docker image build -f Dockerfile-scone -t iexechub/random-generator-scone:0.0.1 .
5 |
--------------------------------------------------------------------------------
/0.to-update/randomGenerator/metadata.txt:
--------------------------------------------------------------------------------
1 | === SCONE ===
2 |
3 | MREnclave:
4 |
5 | 143be34e8755646696477e3965930c3ac280ef2173cb1f43199c606c7142721|28dce6e0ff505348fdc8093c9d2b8e61|b84bc68bae8cdc8703ca4525b2cc16deffe9def4247498ebcc467830a67caf6d
6 |
7 | digest:
8 | sha256:10f585c2cf6f4dbda8254f957fabdee94ba36dfa2e6b3d7a58807bdc762ff7bc
9 |
10 | === UNSAFE ===
11 |
12 | digest:
13 |
14 | eb9effb46678365224c8012db72cc629e7241d62a65990d0105f08bf32d00754
15 |
--------------------------------------------------------------------------------
/0.to-update/randomGenerator/signer/signer.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 | import attrdict
4 | import ssl
5 | import json
6 | import zipfile
7 | import random
8 | import traceback
9 | import gnupg
10 | import base64
11 |
12 | from Crypto.Cipher import AES, PKCS1_OAEP
13 | from Crypto.PublicKey import RSA
14 | from web3.auto import w3
15 | from eth_account.messages import defunct_hash_message
16 | from shutil import copyfile
17 |
18 | keccak256 = w3.soliditySha3
19 | debug = True
20 |
21 | class DigestSigner:
22 | def __init__(self, enclaveKey, worker, taskid, digest):
23 | self.result = digest;
24 | self.resultHash = keccak256([ "bytes32", "bytes32" ], [ taskid, digest ])
25 | self.resultSalt = keccak256([ "address", "bytes32", "bytes32" ], [ worker, taskid, digest ])
26 | hash = defunct_hash_message(keccak256([ "bytes32", "bytes32" ], [ self.resultHash, self.resultSalt ]))
27 | self.signature = w3.eth.account.signHash(hash, private_key=enclaveKey).signature
28 |
29 | def jsonify(self):
30 | return json.dumps({
31 | 'result': self.result,
32 | 'resultHash': self.resultHash.hex(),
33 | 'resultSalt': self.resultSalt.hex(),
34 | 'signature': self.signature.hex(),
35 | })
36 |
37 | def GetPublicKey():
38 | try:
39 | key = open('/iexec_out/public.key', 'rb');
40 | pubKeyObj = RSA.importKey(key.read())
41 | except:
42 | if debug:
43 | print("Public key is not valid, couldn't import it!")
44 | traceback.print_exc()
45 | pubKeyObj = None
46 |
47 | key.close()
48 | return pubKeyObj
49 |
50 | def WriteEncryptedKey(symmetricKey, pubKeyObj):
51 | print("Encrypting symmetric key")
52 | try:
53 | encryptor = PKCS1_OAEP.new(pubKeyObj)
54 | encrypted = encryptor.encrypt(symmetricKey)
55 | with open('/iexec_out/encrypted_key', 'wb+') as output:
56 | output.write(encrypted)
57 | if debug:
58 | with open('/iexec_out/plaintext_key', 'wb+') as output:
59 | output.write(symmetricKey)
60 |
61 | except:
62 | print('Error with opening key!')
63 | traceback.print_exc()
64 | key.close()
65 |
66 | def WriteInitializationVector(iv):
67 | print("Writing iv on disk")
68 | try:
69 | ivfile = open('/iexec_out/iv', 'wb+')
70 | except:
71 | traceback.print_exc()
72 | print(ex)
73 | else:
74 | ivfile.write(iv)
75 | finally:
76 | ivfile.close()
77 |
78 | def TestReadEncryptedKey():
79 | try:
80 | with open('/iexec_out/private.key', 'rb') as input:
81 | binKey = input.read()
82 | priKeyObj = RSA.importKey(binKey)
83 | with open('/iexec_out/encrypted_key', 'rb') as encrypted:
84 | encrypted_key = encrypted.read()
85 | with open('/iexec_out/plaintext_key', 'rb') as original:
86 | original_key = original.read()
87 | except:
88 | print('Error reading key')
89 | traceback.print_exc()
90 | else:
91 | decryptor = PKCS1_OAEP.new(priKeyObj)
92 | key = decryptor.decrypt(encrypted_key)
93 | assert key == original_key, "Keys don't match"
94 | return key
95 |
96 | def TestEncryptedOutput(symmetricKey):
97 | try:
98 | with open('/iexec_out/result.zip.aes', 'rb') as input, open('/iexec_out/iv','rb') as ivfile:
99 | iv = input.read(16)
100 | ivfromfile = ivfile.read()
101 | assert iv == ivfromfile, "Init vector don't match"
102 | encryptedOutput = input.read()
103 | except:
104 | print('Error reading encrypted output')
105 | traceback.print_exc()
106 | else:
107 | decryptedOutput = DecryptOutput(encryptedOutput, symmetricKey, iv)
108 | padNb = decryptedOutput[-1:]
109 |
110 | #test padding
111 | assert bytearray(decryptedOutput[-padNb[0]:]) == bytearray(padNb * padNb[0]), "Padding not right!"
112 |
113 | #test decrypted equal to original
114 | decryptedOutput = decryptedOutput[:len(decryptedOutput) - padNb[0]]
115 | ZipOutput()
116 | with open('/iexec_out/' + os.env['taskid'] +'_result.zip', 'rb') as input:
117 | originalZip = input.read()
118 | assert(decryptedOutput == originalZip)
119 | with open('/iexec_out/result.test.zip', 'wb+') as output:
120 | output.write(decryptedOutput)
121 | zip_ref = zipfile.ZipFile('iexec_out/result.test.zip', 'r')
122 | zip_ref.extractall('iexec_out')
123 | zip_ref.close()
124 |
125 | def DecryptOutput(encryptedOutput, key, iv):
126 | aes = AES.new(key, AES.MODE_CBC, iv)
127 | return aes.decrypt(encryptedOutput)
128 |
129 | def ZipOutput():
130 | zipf = zipfile.ZipFile(zippedOutputPath, 'a', zipfile.ZIP_DEFLATED)
131 |
132 | os.chdir(zipTargetDirectory)
133 |
134 | for root, dirs, files in os.walk('./'):
135 | for file in files:
136 | if file == zipFileName:
137 | continue
138 | print("Writing file " + file + " to zip archive.")
139 | zipf.write(os.path.join(root, file))
140 |
141 | zipf.close()
142 |
143 | def PadZippedOutput():
144 | print("Padding zipped output")
145 | try:
146 | input = open(zippedOutputPath, 'ab')
147 | zipSize = os.path.getsize(zippedOutputPath)
148 | blockSize = 16
149 | nb = blockSize - zipSize % blockSize
150 | input.write(bytearray(bytes([nb]) * nb))
151 |
152 | except Exception as ex:
153 | traceback.print_exc()
154 | print(ex)
155 |
156 | def EncryptZippedOutput(pubKeyObj):
157 | try:
158 | input = open(zippedOutputPath, 'rb')
159 | output = open('/iexec_out/result.zip.aes', 'wb+')
160 |
161 | #generate initalization vector for AES and prepend it to output
162 | iv = os.getrandom(16)
163 | output.write(iv)
164 | WriteInitializationVector(iv)
165 |
166 | #generate AES key and encrypt it/write it on disk
167 | key = os.getrandom(32)
168 | WriteEncryptedKey(key, pubKeyObj)
169 |
170 | aes = AES.new(key, AES.MODE_CBC, iv)
171 | buffer_size = 8192
172 |
173 | #chunks = iter(lambda: input.read(buffer_size), '')
174 | result = input.read()
175 | #for chunk in chunks:
176 | output.write(aes.encrypt(result))
177 |
178 | except Exception as ex:
179 | traceback.print_exc()
180 |
181 |
182 | def WriteEnclaveSign(digestPath):
183 | import hashlib, os
184 | SHAhash = hashlib.sha3_256()
185 | try:
186 | input = open(digestPath, 'rb')
187 | while 1:
188 | # Read file in as little chunks
189 | buf = input.read(4096)
190 | if not buf : break
191 | SHAhash.update(buf)
192 | input.close()
193 |
194 | digest = '0x' + SHAhash.hexdigest()
195 | enclaveKey = os.environ['enclave_key']
196 | taskid = os.environ['taskid']
197 | worker = os.environ['worker']
198 | result = DigestSigner(
199 | enclaveKey = enclaveKey,
200 | worker = worker,
201 | taskid = taskid,
202 | digest = digest,
203 | ).jsonify()
204 |
205 | with open('/iexec_out/enclaveSig.iexec', 'w+') as outfile:
206 | outfile.write(result)
207 |
208 | except Exception as ex:
209 | traceback.print_exc()
210 | print(ex)
211 |
212 | if __name__ == '__main__':
213 |
214 | sconeDir = '/scone'
215 | iexecOutDir = '/iexec_out'
216 | determinismFile = 'determinism.iexec'
217 | callbackFile = 'callback.iexec'
218 |
219 | WriteEnclaveSign(sconeDir + '/' + determinismFile)
220 |
221 | copyfile(sconeDir + '/' + callbackFile, iexecOutDir + '/' + callbackFile)
222 |
--------------------------------------------------------------------------------
/0.to-update/randomGenerator/src/app.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python3
2 |
3 | import os
4 | import random
5 | import sha3
6 | import sys
7 |
8 | root = ''
9 | inFolder = '{}iexec_in/'.format(root)
10 | outFolder = '{}scone/'.format(root)
11 | callbackFilePath = '{}callback.iexec'.format(outFolder)
12 | determinismFilePath = '{}determinism.iexec'.format(outFolder)
13 |
14 | datasetLocationEnvvar = 'IEXEC_INPUT_FILES_FOLDER'
15 | datasetFilenameEnvvar = 'IEXEC_DATASET_FILENAME'
16 |
17 | if __name__ == '__main__':
18 |
19 | # Seed using the arguments → still not deterministic event with arguments
20 | random.seed(" ".join(sys.argv), random.random())
21 |
22 | # OPTIONAL: reseed using dataset to reseed the random → still not deterministic even with a dataset
23 | try:
24 | root = os.environ.get(datasetLocationEnvvar, inFolder),
25 | file = os.environ.get(datasetFilenameEnvvar),
26 | if file:
27 | with open('{root}{file}'.format(root=root, file=file), 'r') as file:
28 | random.seed(file.read(), random.random())
29 | except FileNotFoundError:
30 | pass
31 |
32 | # Generate random value and write it to the callback file
33 | callback = '{:064x}'.format(random.getrandbits(256))
34 | with open(callbackFilePath, 'w') as callbackFile:
35 | callbackFile.write('0x{}'.format(callback))
36 |
37 | # Generate the determinism file for verification and write it to the determinism file
38 | determinism = sha3.keccak_256()
39 | determinism.update(bytes.fromhex(callback))
40 |
41 | with open(determinismFilePath, 'w') as determinismFile:
42 | determinismFile.write('0x{}'.format(determinism.hexdigest()))
43 |
--------------------------------------------------------------------------------
/0.to-update/scinumpy/Dockerfile:
--------------------------------------------------------------------------------
1 | # Use an official Python runtime as a parent image
2 | FROM python:3.6-slim
3 |
4 | RUN pip install numpy scipy pandas matplotlib
5 |
6 | # entrypoint to pyhon directly
7 | ENTRYPOINT ["/usr/local/bin/python"]
8 |
--------------------------------------------------------------------------------
/0.to-update/scinumpy/build.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | docker image build -f Dockerfile -t iexechub/scinumpy .
4 |
--------------------------------------------------------------------------------
/0.to-update/snap/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM mrmoor/esa-snap
2 |
3 | RUN apt-get install unzip
4 | COPY processDataset.sh processDataset.sh
5 |
6 | RUN chmod +x processDataset.sh
7 |
8 | ENTRYPOINT ["sh","-c","./processDataset.sh $@"]]
9 |
--------------------------------------------------------------------------------
/0.to-update/snap/docker-run-test.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker_image_id="$1"
4 |
5 | docker run -it $docker_image_id bash https://raw.githubusercontent.com/iExecBlockchainComputing/iexec-dapps-registry/snap/iExecBlockchainComputing/Snap/resample/resample_s2.xml https://raw.githubusercontent.com/iExecBlockchainComputing/iexec-dapps-registry/snap/iExecBlockchainComputing/Snap/resample/resample_20m.properties https://s3.amazonaws.com/esa-snap-poc/S2A_MSIL1C_20180807T101021_N0206_R022_T32TQR_20180809T104755.zip
6 |
--------------------------------------------------------------------------------
/0.to-update/snap/processDataset.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | # enable next line for debugging purpose
3 | # set -x
4 |
5 | # inspired from this example :https://senbox.atlassian.net/wiki/spaces/SNAP/pages/70503475/Bulk+Processing+with+GPT
6 |
7 | ############################################
8 | # User Configuration
9 | ############################################
10 |
11 | # adapt this path to your needs
12 | #export PATH=~/progs/snap/bin:$PATH
13 | gptPath="gpt"
14 |
15 |
16 |
17 | ############################################
18 | # Command line handling
19 | ############################################
20 |
21 |
22 |
23 |
24 | if [ "$#" -ne 3 ]; then
25 | echo "Illegal number of parameters. Must be 3 => 1:graphXmlURL,2:parameterFileURL,3:inputDatasetURL "
26 | exit 1
27 | fi
28 |
29 | # first parameter is a url to the graph xml
30 | graphXmlURL="$1"
31 |
32 | # second parameter is a url to a parameter file
33 | parameterFileURL="$2"
34 |
35 | # use third parameter for url to dataset zip products
36 | inputDatasetURL="$3"
37 |
38 |
39 |
40 | ############################################
41 | # Helper functions
42 | ############################################
43 | removeExtension() {
44 | file="$1"
45 | echo "$(echo "$file" | sed -r 's/\.[^\.]*$//')"
46 | }
47 |
48 |
49 | ############################################
50 | # Main processing
51 | ############################################
52 |
53 | ############################################
54 | # Check all URLS
55 | ############################################
56 | wget -q --spider $graphXmlURL
57 | if [ $? -ne 0 ]
58 | then
59 | echo " url $graphXmlURL not available"
60 | exit 1
61 | fi
62 |
63 | wget -q --spider $parameterFileURL
64 | if [ $? -ne 0 ]
65 | then
66 | echo " url $parameterFileURL not available"
67 | exit 1
68 | fi
69 |
70 | wget -q --spider $inputDatasetURL
71 | if [ $? -ne 0 ]
72 | then
73 | echo " url $inputDatasetURL not available"
74 | exit 1
75 | fi
76 |
77 | ############################################
78 | # Download all inputs
79 | ############################################
80 |
81 | wget $graphXmlURL -O graphXml.xml
82 |
83 | if [ ! -f graphXml.xml ]
84 | then
85 | echo "graphXml.xml file not found after wget on $graphXmlURL"
86 | exit 1
87 | fi
88 |
89 |
90 | wget $parameterFileURL -O parameters.properties
91 |
92 | if [ ! -f parameters.properties ]
93 | then
94 | echo "parameters.properties file not found after wget on $parameterFileURL"
95 | exit 1
96 | fi
97 |
98 | wget $inputDatasetURL -O input.zip
99 |
100 | if [ ! -f input.zip ]
101 | then
102 | echo "input.zip file not found after wget on $parameterFileURL"
103 | exit 1
104 | fi
105 |
106 | ############################################
107 | # Prepare directories
108 | ############################################
109 |
110 | mkdir -p /iexec/
111 |
112 |
113 | mkdir -p /input
114 | mv input.zip /input
115 |
116 | cd /input
117 | echo "unzipping input"
118 | unzip input.zip
119 | if [ $? -eq 0 ]
120 | then
121 | echo "unzip success"
122 | else
123 | echo "unzip failed"
124 | exit 1
125 | fi
126 | cd -
127 |
128 | mkdir -p /iexec/output
129 |
130 | # the d option limits the elemeents to loop over to directories. Remove it, if you want to use files.
131 | for F in $(ls -1d /input/S2*.SAFE); do
132 | sourceFile="$(realpath "$F")"
133 | targetFile="/iexec/output/$(removeExtension "$(basename ${F})").dim"
134 | ${gptPath} graphXml.xml -e -p parameters.properties -t ${targetFile} ${sourceFile}
135 | done
136 |
137 | #generate /iexec/consensus.iexec
138 | rm -f /iexec/consensus.iexec
139 | touch /iexec/consensus.iexec
140 |
141 |
142 | cat graphXml.xml >> /iexec/consensus.iexec
143 | cat parameters.properties >> /iexec/consensus.iexec
144 | echo $inputDatasetURL >> /iexec/consensus.iexec
145 |
146 | nbFiles=$(find /iexec/output -type f | wc -l)
147 | echo "outputs files number : " >> /iexec/consensus.iexec
148 | echo $nbFiles >> /iexec/consensus.iexec
149 |
--------------------------------------------------------------------------------
/0.to-update/vanitygen/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM debian:8
2 | MAINTAINER Vladimir Ostapenco
3 |
4 | RUN apt-get update && apt-get install -y gcc make git libssl-dev libpcre3-dev
5 |
6 | RUN git clone https://github.com/cbeams/vanitygen.git /usr/local/src/vanitygen
7 | RUN make --directory /usr/local/src/vanitygen
8 | RUN ln -s /usr/local/src/vanitygen/vanitygen /usr/local/bin/vanitygen
9 |
10 | COPY vanity-with-consensus.sh /vanity-with-consensus.sh
11 | RUN chmod +x /vanity-with-consensus.sh
12 |
13 | ENTRYPOINT ["/vanity-with-consensus.sh"]
14 |
15 | # docker image build -t iexechub/vanitygen:1.0.2 .
--------------------------------------------------------------------------------
/0.to-update/vanitygen/vanity-with-consensus.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | vanityResult=/iexec_out/keypair.txt
4 | consensusFile=/iexec_out/determinism.iexec
5 |
6 | vanitygen $@ >> $vanityResult
7 | cat $vanityResult
8 |
9 | vanityPattern=$(grep 'Pattern:' $vanityResult | sed 's/^.*: //')
10 | publicAddress=$(grep 'Address: '$vanityPattern $vanityResult | sed 's/^.*: //')
11 |
12 | publicAddressLength=${#publicAddress}
13 |
14 | rm -f $consensusFile
15 | echo $vanityPattern >> $consensusFile
16 | echo $publicAddressLength >> $consensusFile
--------------------------------------------------------------------------------
/0.to-update/windy-feed/.gitignore:
--------------------------------------------------------------------------------
1 | ./smart-contract/build
2 | ./smart-contract/daemon/node_modules
3 | ./smart-contract/node_modules
--------------------------------------------------------------------------------
/0.to-update/windy-feed/README.md:
--------------------------------------------------------------------------------
1 | # wind-feed
2 |
--------------------------------------------------------------------------------
/0.to-update/windy-feed/app/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM node:11-alpine
2 | COPY wind-feed.js /src/wind-feed.js
3 | COPY entrypoint.sh /entrypoint.sh
4 | RUN npm i https ethers fs
5 | RUN chmod +x /entrypoint.sh
6 | ENTRYPOINT ["/entrypoint.sh"]
--------------------------------------------------------------------------------
/0.to-update/windy-feed/app/entrypoint.sh:
--------------------------------------------------------------------------------
1 | node src/wind-feed.js $@
--------------------------------------------------------------------------------
/0.to-update/windy-feed/app/wind-feed.js:
--------------------------------------------------------------------------------
1 | const https = require('https');
2 | const ethers = require('ethers');
3 | const fs = require('fs');
4 |
5 | const root = 'iexec_out';
6 | const determinismFilePath = `${root}/determinism.iexec`;
7 | const callbackFilePath = `${root}/callback.iexec`;
8 | const errorFilePath = `${root}/error.iexec`;
9 |
10 |
11 | /*****************************************************************************
12 | * TOOLS *
13 | *****************************************************************************/
14 | const sleep = (ms) => {
15 | return new Promise(resolve => setTimeout(resolve, ms));
16 | }
17 |
18 | const cat = (path) => {
19 | try { return fs.readFileSync(path).toString(); } catch (e) { return null; }
20 | }
21 |
22 | /*****************************************************************************
23 | * CONFIG *
24 | *****************************************************************************/
25 |
26 | // coin api key
27 | //const APIKEY = 'KXsaxepCiFoAlxKXkLzDO5sIbuLojtjH';
28 | const APIKEY1 = '4a71fa5207ec7e06042f7e63a647a80b';
29 | // const APIKEY = '69CC0AA9-1E4D-4E41-806F-8C3642729B88';
30 | // const APIKEY = 'D2C881D6-0BBF-4EFE-A572-AE6DB379D43E';
31 | // const APIKEY = 'FB7B2516-70A1-42D8-8702-292F29F19768';
32 |
33 |
34 | // random delay
35 | const WAIT_MIN = parseInt(process.env.WAIT_MIN) || 0; // in ms
36 | const WAIT_MAX = parseInt(process.env.WAIT_MAX) || 0; // in ms
37 |
38 | /*****************************************************************************
39 | * ARGUMENTS *
40 | *****************************************************************************/
41 | var [lat, long ] = process.argv.slice(2);
42 |
43 |
44 | /*****************************************************************************
45 | * HTTP QUERY *
46 | *****************************************************************************/
47 |
48 |
49 | var lat = lat || 37.39;
50 | var long = long || -122.08;
51 |
52 |
53 | const url = "https://api.openweathermap.org/data/2.5/weather";
54 | const query1 = `${url}?lat=${lat}&lon=${long}&appid=${APIKEY1}`;
55 |
56 | /*****************************************************************************
57 | * EXECUTE *
58 | *****************************************************************************/
59 |
60 | new Promise(async (resolve, reject) => {
61 |
62 | const delay = (WAIT_MAX-WAIT_MIN) * Math.random() + WAIT_MIN;
63 | console.log(`- Waiting for ${delay} ms.`);
64 | await sleep(delay);
65 |
66 | console.log(query);
67 | let chunks = [];
68 | let request = https.get(query1, res => {
69 | res.setEncoding('utf8');
70 | res.on('data', (chunk) => {
71 | chunks.push(chunk);
72 | });
73 | res.on('end', () => {
74 | if (chunks.length)
75 | {
76 | resolve(chunks.join(''));
77 | }
78 | else
79 | {
80 | reject(`[HTTP ERROR]\nstatusCode: ${res.statusCode}`);
81 | }
82 | });
83 | });
84 | request.on('error', reject);
85 | request.end();
86 | })
87 | .then(data => {
88 | console.log("Inside " + data)
89 | let results = JSON.parse(data.toString());
90 | console.log("\n\n");
91 | console.dir(results)
92 | if (results.error !== undefined)
93 | {
94 | throw new Error(results.error);
95 | }
96 |
97 | let timestamp = undefined;
98 | let details = undefined;
99 | let speed = undefined;
100 | let deg = undefined;
101 |
102 |
103 |
104 | var d = new Date();
105 |
106 | // convert to msec
107 | // subtract local time zone offset
108 | // get UTC time in msec
109 | var utc = d.getTime() + (d.getTimezoneOffset() * 60000);
110 |
111 | // create new Date object for different city
112 | // using supplied offset
113 | var nd = new Date(utc + (1000 * results.timezone));
114 |
115 |
116 | timestamp = nd.getTime();
117 | console.log(results.wind.speed +" ," + " " + results.wind.deg);
118 | var speed = results.wind.speed;
119 | var deg = results.wind.deg;
120 | var details = lat + long;
121 | //value = Math.round(results.rate * 10**power);
122 |
123 |
124 | if (isNaN(timestamp) || results.wind.speed === undefined)
125 | {
126 | throw new Error("invalid results");
127 | }
128 |
129 | var iexeccallback = ethers.utils.defaultAbiCoder.encode(['uint256', 'uint256', 'uint256', 'string'], [timestamp, speed, deg, details]);
130 | var iexecconsensus = ethers.utils.keccak256(iexeccallback);
131 | fs.writeFile(callbackFilePath, iexeccallback , (err) => {});
132 | fs.writeFile(determinismFilePath, iexecconsensus, (err) => {});
133 |
134 | console.log(`- Success: ${timestamp} ${details}`);
135 | })
136 | .catch(error => {
137 | fs.writeFile(
138 | errorFilePath,
139 | error.toString(),
140 | (err) => {}
141 | );
142 | fs.writeFile(
143 | determinismFilePath,
144 | ethers.utils.solidityKeccak256(['string'],[error.toString()]),
145 | (err) => {}
146 | );
147 | console.log(error.toString());
148 | });
--------------------------------------------------------------------------------
/0.to-update/windy-feed/chain.json:
--------------------------------------------------------------------------------
1 | {
2 | "default": "kovan",
3 | "chains": {
4 | "dev": {
5 | "host": "http://localhost:8545",
6 | "sms": "http://localhost:5000",
7 | "id": "17",
8 | "hub": "0x60E25C038D70A15364DAc11A042DB1dD7A2cccBC"
9 | },
10 | "ropsten": {
11 | "host": "https://ropsten.infura.io/v3/f3e0664e01504f5ab2b4360853ce0dc7",
12 | "id": "3"
13 | },
14 | "rinkeby": {
15 | "host": "https://rinkeby.infura.io/v3/f3e0664e01504f5ab2b4360853ce0dc7",
16 | "id": "4"
17 | },
18 | "kovan": {
19 | "host": "https://kovan.infura.io/v3/f3e0664e01504f5ab2b4360853ce0dc7",
20 | "id": "42",
21 | "sms": "https://sms-kovan.iex.ec"
22 | },
23 | "mainnet": {
24 | "host": "https://mainnet.infura.io/v3/f3e0664e01504f5ab2b4360853ce0dc7",
25 | "id": "1",
26 | "sms": "https://sms-mainnet.iex.ec"
27 | }
28 | }
29 | }
--------------------------------------------------------------------------------
/0.to-update/windy-feed/deployed.json:
--------------------------------------------------------------------------------
1 | {
2 | "app": {
3 | "42": "0x4018305a8DB65945dbf2657B1f76C197a580Fe5c"
4 | }
5 | }
--------------------------------------------------------------------------------
/0.to-update/windy-feed/iexec.json:
--------------------------------------------------------------------------------
1 | {
2 | "description": "My iExec ressource description, must be at least 150 chars long in order to pass the validation checks. Describe your application, dataset or workerpool to your users",
3 | "license": "MIT",
4 | "author": "?",
5 | "social": {
6 | "website": "?",
7 | "github": "?"
8 | },
9 | "logo": "logo.png",
10 | "buyConf": {
11 | "params": {
12 | "0": ""
13 | },
14 | "tag": "0x0000000000000000000000000000000000000000000000000000000000000000",
15 | "trust": 0,
16 | "callback": "0x0000000000000000000000000000000000000000"
17 | },
18 | "app": {
19 | "owner": "0x7AA9C3cC7c0700177eb6e98c970Ff007b42460c5",
20 | "name": "WindFeed",
21 | "type": "DOCKER",
22 | "multiaddr": "registry.hub.docker.com/22blockchain22/wind-feed:1.0.0",
23 | "checksum": "0xc25cf51cd50c83d8294f7bb2207678b1d55c0845650105bc26c153c75d5ddabf",
24 | "mrenclave": ""
25 | },
26 | "order": {
27 | "apporder": {
28 | "app": "0x4018305a8DB65945dbf2657B1f76C197a580Fe5c",
29 | "appprice": 0,
30 | "volume": 1000000,
31 | "tag": "0x0000000000000000000000000000000000000000000000000000000000000000",
32 | "datasetrestrict": "0x0000000000000000000000000000000000000000",
33 | "workerpoolrestrict": "0x0000000000000000000000000000000000000000",
34 | "requesterrestrict": "0x0000000000000000000000000000000000000000"
35 | }
36 | }
37 | }
--------------------------------------------------------------------------------
/0.to-update/windy-feed/orders.json:
--------------------------------------------------------------------------------
1 | {
2 | "42": {
3 | "apporder": {
4 | "app": "0x4018305a8DB65945dbf2657B1f76C197a580Fe5c",
5 | "appprice": 0,
6 | "volume": 1000000,
7 | "tag": "0x0000000000000000000000000000000000000000000000000000000000000000",
8 | "datasetrestrict": "0x0000000000000000000000000000000000000000",
9 | "workerpoolrestrict": "0x0000000000000000000000000000000000000000",
10 | "requesterrestrict": "0x0000000000000000000000000000000000000000",
11 | "salt": "0x30fb392cac585afb97b00c12a2ae44babd663cc49765b08afe8b4ced33c7fecc",
12 | "sign": "0x28ef56c64599cd2b30c0a8b348af89543492f35e3050f68a1320dc5ba557cbc636eb450fbd4fe42d8b1e4bac82f18f1b1504847e59e879ff97d07f08befbf33f1b"
13 | }
14 | }
15 | }
--------------------------------------------------------------------------------
/0.to-update/windy-feed/smart-contract/contracts/Migrations.sol:
--------------------------------------------------------------------------------
1 | pragma solidity >=0.4.21 <0.6.0;
2 |
3 | contract Migrations {
4 | address public owner;
5 | uint public last_completed_migration;
6 |
7 | constructor() public {
8 | owner = msg.sender;
9 | }
10 |
11 | modifier restricted() {
12 | if (msg.sender == owner) _;
13 | }
14 |
15 | function setCompleted(uint completed) public restricted {
16 | last_completed_migration = completed;
17 | }
18 |
19 | function upgrade(address new_address) public restricted {
20 | Migrations upgraded = Migrations(new_address);
21 | upgraded.setCompleted(last_completed_migration);
22 | }
23 | }
24 |
--------------------------------------------------------------------------------
/0.to-update/windy-feed/smart-contract/contracts/WindOracle.sol:
--------------------------------------------------------------------------------
1 | pragma solidity ^0.5.8;
2 | pragma experimental ABIEncoderV2;
3 |
4 | import "openzeppelin-solidity/contracts/ownership/Ownable.sol";
5 | import "iexec-doracle-base/contracts/IexecDoracle.sol";
6 |
7 | contract WindOracle is Ownable, IexecDoracle
8 | {
9 | struct TimedValue
10 | {
11 | bytes32 oracleCallID;
12 | uint256 date;
13 | uint256 speed;
14 | uint256 deg;
15 | string details;
16 | }
17 |
18 | mapping(bytes32 => TimedValue) public values;
19 |
20 | event ValueUpdated(
21 | bytes32 indexed id,
22 | bytes32 indexed oracleCallID,
23 | uint256 oldDate,
24 | uint256 oldSpeed,
25 | uint256 oldDeg,
26 | uint256 newDate,
27 | uint256 newSpeed,
28 | uint256 newDeg
29 | );
30 |
31 | // Use _iexecHubAddr to force use of custom iexechub, leave 0x0 for autodetect
32 | constructor(address _iexecHubAddr)
33 | public IexecDoracle(_iexecHubAddr)
34 | {}
35 |
36 | function updateEnv(
37 | address _authorizedApp
38 | , address _authorizedDataset
39 | , address _authorizedWorkerpool
40 | , bytes32 _requiredtag
41 | , uint256 _requiredtrust
42 | )
43 | public onlyOwner
44 | {
45 | _iexecDoracleUpdateSettings(_authorizedApp, _authorizedDataset, _authorizedWorkerpool, _requiredtag, _requiredtrust);
46 | }
47 |
48 | function decodeResults(bytes memory results)
49 | public pure returns(uint256, uint256, uint256, string memory)
50 | { return abi.decode(results, (uint256, uint256, uint256, string)); }
51 |
52 | function processResult(bytes32 _oracleCallID)
53 | public
54 | {
55 | uint256 date;
56 | uint256 deg;
57 | uint256 speed;
58 | string memory details;
59 |
60 | // Parse results
61 | (date, speed, deg, details) = decodeResults(_iexecDoracleGetVerifiedResult(_oracleCallID));
62 |
63 | // Process results
64 | bytes32 id = keccak256(bytes(details));
65 | require(values[id].date < date, "new-value-is-too-old");
66 | emit ValueUpdated(id, _oracleCallID, values[id].date, values[id].speed, values[id].deg, date, speed, deg);
67 | values[id].oracleCallID = _oracleCallID;
68 | values[id].date = date;
69 | values[id].speed = speed;
70 | values[id].deg = deg;
71 | values[id].details = details;
72 | }
73 | }
--------------------------------------------------------------------------------
/0.to-update/windy-feed/smart-contract/daemon/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM node:8-alpine
2 |
3 | # changing user
4 | USER root
5 |
6 | # add necessary packages
7 | RUN apk add --no-cache git python make g++
8 |
9 | # create a work directory inside the container
10 | RUN mkdir /app
11 | WORKDIR /app
12 |
13 | # copy project files
14 | COPY . .
15 |
16 | # install utilities
17 | RUN npm install -g yarn ts-node typescript
18 |
19 | # install dependencies
20 | RUN yarn
21 |
22 | # making entrypoint executable
23 | RUN chmod +x entrypoint.sh
24 |
25 | ENTRYPOINT ["./entrypoint.sh"]
26 |
--------------------------------------------------------------------------------
/0.to-update/windy-feed/smart-contract/daemon/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '2'
2 | services:
3 | doracle-daemon:
4 | image: 22blockchain22/iexec-doracle-daemon:latest
5 | environment:
6 | - DORACLE_ADDR=xxx
7 | - MNEMONIC=xxxx
8 | - PROVIDER=xxx
9 | - REQUESTER=xxxx
10 | restart: unless-stopped
--------------------------------------------------------------------------------
/0.to-update/windy-feed/smart-contract/daemon/entrypoint.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | echo "[INFO] Launching DOracle Daemon"
3 | /usr/local/bin/ts-node launch.ts
--------------------------------------------------------------------------------
/0.to-update/windy-feed/smart-contract/daemon/launch.ts:
--------------------------------------------------------------------------------
1 | import { ethers } from 'ethers';
2 | import Daemon from './daemon';
3 |
4 | // mainnet 0xed4a0189511859427c33dcc7c85fdd36575ae946
5 | // kovan 0x3b9F1a9aeCb1991f3818f45bd4CC735f4BEE93Ac
6 |
7 | let doracle_addr: string = process.env.DORACLE_ADDR;
8 | let private_key: string = process.env.MNEMONIC;
9 | let provider: ethers.providers.Provider = ethers.getDefaultProvider(process.env.PROVIDER);
10 |
11 | let wallet: ethers.Wallet = new ethers.Wallet(private_key, provider);
12 | let daemon: Daemon = new Daemon(doracle_addr, wallet, process.env.REQUESTER);
13 |
14 | daemon.start();
--------------------------------------------------------------------------------
/0.to-update/windy-feed/smart-contract/daemon/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "wind-feed-doracle",
3 | "version": "1.0.0",
4 | "description": "Contracts and tools for the iExec based feed DOracle",
5 | "main": "index.js",
6 | "scripts": {
7 | "test": "echo \"Error: no test specified\" && exit 1"
8 | },
9 | "files": [
10 | "/build",
11 | "/contracts",
12 | "daemon"
13 | ],
14 | "keywords": [],
15 | "author": "",
16 | "license": "ISC",
17 | "dependencies": {
18 | "chai": "^4.2.0",
19 | "ethereumjs-util": "^6.1.0",
20 | "iexec-doracle-base": "0.0.7",
21 | "iexec-poco": "^3.0.35",
22 | "iexec-solidity": "0.0.14",
23 | "multiaddr": "^7.1.0",
24 | "openzeppelin-solidity": "^2.3.0",
25 | "openzeppelin-test-helpers": "^0.4.3",
26 | "rlc-faucet-contract": "^1.0.8"
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/0.to-update/windy-feed/smart-contract/daemon/utils/addrToKey.ts:
--------------------------------------------------------------------------------
1 |
2 | import { ethers } from 'ethers';
3 |
4 | export default function(addr: string) : string
5 | {
6 | return ethers.utils.hexZeroPad(addr, 32).toString().toLowerCase();
7 | }
--------------------------------------------------------------------------------
/0.to-update/windy-feed/smart-contract/daemon/utils/index.ts:
--------------------------------------------------------------------------------
1 | export { default as require } from "./require";
2 | export { default as addrToKey } from "./addrToKey";
--------------------------------------------------------------------------------
/0.to-update/windy-feed/smart-contract/daemon/utils/require.ts:
--------------------------------------------------------------------------------
1 | export default function(value: boolean, reason: string = "") : void
2 | {
3 | if (!value) throw Error(reason);
4 | }
--------------------------------------------------------------------------------
/0.to-update/windy-feed/smart-contract/migrations/1_initial_migration.js:
--------------------------------------------------------------------------------
1 | const Migrations = artifacts.require("Migrations");
2 |
3 | module.exports = function(deployer) {
4 | deployer.deploy(Migrations, {gas: 500000});
5 | };
6 |
--------------------------------------------------------------------------------
/0.to-update/windy-feed/smart-contract/migrations/2_deploy_contracts.js:
--------------------------------------------------------------------------------
1 | var WindOracle = artifacts.require("WindOracle");
2 |
3 | module.exports = async function(deployer, network, accounts)
4 | {
5 | await deployer.deploy(WindOracle, "0x0000000000000000000000000000000000000000", { gas: 2500000 });
6 | WindOracleInstance = await WindOracle.deployed();
7 | console.log("WindOracle deployed at address: " + WindOracleInstance.address);
8 | };
--------------------------------------------------------------------------------
/0.to-update/windy-feed/smart-contract/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "smart-contract",
3 | "version": "1.0.0",
4 | "description": "",
5 | "main": "index.js",
6 | "scripts": {
7 | "test": "echo \"Error: no test specified\" && exit 1"
8 | },
9 | "files": [
10 | "/build",
11 | "/contracts",
12 | "daemon"
13 | ],
14 | "keywords": [],
15 | "author": "Janus",
16 | "license": "ISC",
17 | "dependencies": {
18 | "iexec-doracle-base": "0.0.7",
19 | "iexec-poco": "^3.0.35",
20 | "iexec-solidity": "0.0.14",
21 | "openzeppelin-solidity": "^2.3.0",
22 | "truffle": "^5.0.36",
23 | "truffle-hdwallet-provider": "^1.0.17"
24 | }
25 | }
26 |
--------------------------------------------------------------------------------
/0.to-update/windy-feed/smart-contract/truffle-config.js:
--------------------------------------------------------------------------------
1 | var HDWalletProvider = require("truffle-hdwallet-provider");
2 |
3 | module.exports =
4 | {
5 | networks:
6 | {
7 | docker:
8 | {
9 | host: "iexec-geth-local",
10 | port: 8545,
11 | network_id: "*", // Match any network id,
12 | gasPrice: 22000000000, //22Gwei
13 | },
14 | development:
15 | {
16 | host: "localhost",
17 | port: 8545,
18 | network_id: "*", // Match any network id,
19 | gasPrice: 22000000000, //22Gwei
20 | },
21 | coverage:
22 | {
23 | host: "localhost",
24 | port: 8555, // <-- If you change this, also set the port option in .solcover.js.
25 | network_id: "*",
26 | gas: 0xFFFFFFFFFFF, // <-- Use this high gas value
27 | gasPrice: 0x01 // <-- Use this low gas price
28 | },
29 | mainnet:
30 | {
31 | provider: () => new HDWalletProvider(process.env.MNEMONIC, "https://mainnet.infura.io/v3/f3e0664e01504f5ab2b4360853ce0dc7"),
32 | network_id: '1',
33 | gasPrice: 22000000000, //22Gwei
34 | },
35 | ropsten:
36 | {
37 | provider: () => new HDWalletProvider(process.env.MNEMONIC, "https://ropsten.infura.io/v3/f3e0664e01504f5ab2b4360853ce0dc7"),
38 | network_id: '3',
39 | gasPrice: 22000000000, //22Gwei
40 | },
41 | kovan: {
42 | provider: () => new HDWalletProvider(process.env.MNEMONIC, "https://kovan.infura.io/v3/f3e0664e01504f5ab2b4360853ce0dc7"),
43 | network_id: '42',
44 | gasPrice: 1000000000, //1Gwei
45 | }
46 | },
47 | compilers: {
48 | solc: {
49 | version: "0.5.10",
50 | settings: {
51 | optimizer: {
52 | enabled: true,
53 | runs: 200
54 | }
55 | }
56 | }
57 | },
58 | mocha:
59 | {
60 | enableTimeouts: false
61 | }
62 | };
63 |
--------------------------------------------------------------------------------
/0.to-update/xmrig/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM alpine:3.7 as build
2 |
3 | ENV XMRIG_DIR /xmrig-cpu
4 | ENV XMRIG_BUILD_DIR $XMRIG_DIR/build
5 |
6 | RUN apk --no-cache add build-base cmake curl git libuv-dev wget
7 | RUN git clone https://github.com/xmrig/xmrig.git $XMRIG_DIR && cd $XMRIG_DIR
8 | RUN mkdir $XMRIG_BUILD_DIR && cd $XMRIG_BUILD_DIR && \
9 | cmake .. -DWITH_HTTPD=OFF && make
10 | RUN mv $XMRIG_BUILD_DIR/xmrig /usr/bin/
11 |
12 | FROM alpine:3.7
13 | RUN apk --no-cache add libuv-dev
14 | COPY --from=build /usr/bin/xmrig /usr/bin/
15 |
16 | # DL entrypoint script
17 | COPY ./entrypoint.sh entrypoint.sh
18 | #RUN wget https://raw.githubusercontent.com/Hugogerva/xmrig-docker-script/master/entrypoint.sh
19 | RUN mv /entrypoint.sh /usr/local/bin/xmrig.sh
20 | RUN chmod +x /usr/local/bin/xmrig.sh
21 |
22 | ENTRYPOINT ["sh","-c","/usr/local/bin/xmrig.sh $0 $@"]]
23 |
--------------------------------------------------------------------------------
/0.to-update/xmrig/entrypoint.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | timeout=$1
4 | args=${@:${#timeout}}}
5 | echo "start xmrig with arg : ${args} and timeout: ${timeout}"
6 | mkdir -p iexec
7 | (xmrig ${args} ) & pid=$!
8 | sleep ${timeout} && echo "mined" >> iexec/consensus.iexec
9 | cat /iexec/consensus.iexec
10 | kill -SIGINT $pid
11 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # iExec Apps
2 |
3 | *Note:
4 | If you want to clone this repository, please use:*
5 |
6 | `git clone --recurse-submodules https://github.com/iExecBlockchainComputing/iexec-apps`
7 |
8 | ## Readme
9 |
10 | Are you a developer? Retrieve here curated iExec applications sources.
11 |
12 | * For developers needing Cloud Computing:
13 |
14 | `cd cloud-computing/`
15 |
16 | * For developers needing Off-chain Computing:
17 |
18 | `cd offchain-computing/`
19 |
20 | ## Add external project
21 |
22 | ```
23 | cd cloud-computing/
24 | git submodule add https://github.com/alice/my-iexec-app
25 | ```
--------------------------------------------------------------------------------
/cloud-computing/ffmpeg/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM jrottenberg/ffmpeg:4.0-scratch AS ffmpeg
2 | FROM python:3.6
3 | COPY --from=ffmpeg / /
4 | COPY ffmpegPoCoReady.sh /usr/bin/ffmpegPoCoReady.sh
5 | RUN chmod +x /usr/bin/ffmpegPoCoReady.sh
6 | ENTRYPOINT ["sh","-c","/usr/bin/ffmpegPoCoReady.sh $0 $@"]
7 |
--------------------------------------------------------------------------------
/cloud-computing/ffmpeg/README.md:
--------------------------------------------------------------------------------
1 | iexec app run --args "-i https://thumbs.dreamstime.com/z/children-s-drawing-horse-19658204.jpg?fbclid=IwAR22eckuJR8QyyFTzQuEDs4l0VEc2wmAC5kbNFEFX1ALFavQDre86U5uuak /iexec\_out/ticheval.png" --watch
2 |
--------------------------------------------------------------------------------
/cloud-computing/ffmpeg/ffmpegPoCoReady.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | touch /tmp/ffmpeg.log
4 | chmod 755 /tmp/ffmpeg.log
5 |
6 | nohup /bin/ffmpeg $@ > /tmp/ffmpeg.log 2>&1
7 |
8 | mkdir -p /iexec_out/
9 | mkdir -p /iexec_in/
10 | cat /tmp/ffmpeg.log > /iexec_out/stdout.log
11 | cat /tmp/ffmpeg.log | grep video >> /iexec_out/determinism.iexec
12 | cat >> /iexec_out/computed.json << EOF
13 | { "deterministic-output-path" : "/iexec_out/determinism.iexec" }
14 | EOF
15 |
16 |
--------------------------------------------------------------------------------
/cloud-computing/gnuplot/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu:18.04
2 |
3 | RUN apt-get update -y && apt-get install -y gnuplot
4 |
5 | COPY gnuplotPocoReady.sh /usr/bin/gnuplotPocoReady.sh
6 | RUN chmod +x /usr/bin/gnuplotPocoReady.sh
7 | ENTRYPOINT ["sh","-c","/usr/bin/gnuplotPocoReady.sh $0 $@"]
8 |
--------------------------------------------------------------------------------
/cloud-computing/gnuplot/README.md:
--------------------------------------------------------------------------------
1 |
2 | iexec app run --watch --input-files https://raw.githubusercontent.com/iExecBlockchainComputing/iexec-apps/v5/cloud-computing/gnuplot/gnuplot_sin_and_log.gp --args /iexec_in/gnuplot_sin_and_log.gp
3 |
4 |
--------------------------------------------------------------------------------
/cloud-computing/gnuplot/build.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | docker image build -f Dockerfile -t iexechub/gnuplot .
4 |
--------------------------------------------------------------------------------
/cloud-computing/gnuplot/gnuplotPoCoReady.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | touch /tmp/gnuplot.log
4 | chmod 755 /tmp/gnuplot.log
5 |
6 | mkdir -p /iexec_out/
7 | mkdir -p /iexec_in/
8 |
9 | echo $@ > /tmp/gnuplot.log 2>&1
10 | nohup /usr/bin/gnuplot $@ >> /tmp/gnuplot.log 2>&1
11 |
12 | cat /tmp/gnuplot.log > /iexec_out/stdout.log
13 | cat /tmp/gnuplot.log | sha256sum >> /iexec_out/determinism.iexec
14 | cat >> /iexec_out/computed.json << EOF
15 | { "deterministic-output-path" : "/iexec_out/determinism.iexec" }
16 | EOF
17 |
18 |
--------------------------------------------------------------------------------
/cloud-computing/gnuplot/gnuplot_sin_and_log.gp:
--------------------------------------------------------------------------------
1 | set terminal png
2 | set output "/iexec_out/sin_and_log.png"
3 | set multiplot
4 | set size 1, 0.5
5 | set origin 0.0,0.5
6 | plot sin(x), log(x)
7 |
8 |
--------------------------------------------------------------------------------
/cloud-computing/nodejs-hello-world/.gitignore:
--------------------------------------------------------------------------------
1 | node_modules/*
2 |
--------------------------------------------------------------------------------
/cloud-computing/nodejs-hello-world/Readme.md:
--------------------------------------------------------------------------------
1 |
2 |
3 | * Basic
4 |
5 | Build:
6 | `./standard/build`
7 |
8 | Run locally:
9 | `./standard/run`
10 | `./standard/run Alice`
11 |
12 |
13 | * Tee
14 |
15 | Build:
16 | `./tee/build`
17 |
18 | Run locally:
19 | `./tee/run`
20 | `./tee/run Alice`
21 |
--------------------------------------------------------------------------------
/cloud-computing/nodejs-hello-world/src/app.js:
--------------------------------------------------------------------------------
1 | const fsPromises = require('fs').promises;
2 | const figlet = require('figlet');
3 |
4 | const iexecOut = process.env.IEXEC_OUT;
5 | const iexecIn = process.env.IEXEC_IN;
6 | const confidentialFilepath = `${iexecIn}/confidential-asset.txt`;
7 |
8 | (async () => {
9 | try {
10 | // Write hello to fs
11 | let text = process.argv.length > 2 ? `Hello, ${process.argv[2]}!` : 'Hello, World';
12 | text = `${figlet.textSync(text)}\n${text}`; // Let's add some art for e.g.
13 |
14 | // Eventually use some confidential assets
15 | try {
16 | const confidentialFile = await fsPromises.readFile(confidentialFilepath);
17 | text = `${text}\nConfidential asset: ${confidentialFile}`;
18 | } catch (e) {
19 | // confidential asset does not exist
20 | }
21 | // Append some results
22 | await fsPromises.writeFile(`${iexecOut}/result.txt`, text);
23 | console.log(text);
24 | // Declare everything is computed
25 | const computedJsonObj = {
26 | 'deterministic-output-path': `${iexecOut}/result.txt`,
27 | };
28 | await fsPromises.writeFile(
29 | `${iexecOut}/computed.json`,
30 | JSON.stringify(computedJsonObj),
31 | );
32 | } catch (e) {
33 | console.error(e);
34 | process.exit(1);
35 | }
36 | })();
37 |
38 | /* Try
39 | Basic:
40 | mkdir -p /tmp/iexec_out && IEXEC_OUT=/tmp/iexec_out IEXEC_IN=/tmp/iexec_in node app.js Alice
41 |
42 | Tee:
43 | mkdir -p /tmp/iexec_out && IEXEC_OUT=/tmp/iexec_out IEXEC_IN=../tee/confidential-assets node app.js Alice
44 | */
45 |
--------------------------------------------------------------------------------
/cloud-computing/nodejs-hello-world/standard/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM node:10
2 |
3 | ### install your dependencies
4 | RUN mkdir /app && cd /app && npm install figlet@1.x
5 |
6 | COPY ./src /app
7 |
8 | ENTRYPOINT [ "node", "/app/app.js"]
9 |
--------------------------------------------------------------------------------
/cloud-computing/nodejs-hello-world/standard/build:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | cd $(dirname $0)
3 |
4 | docker image build -f ../standard/Dockerfile -t node-hello-world .. $@
5 |
--------------------------------------------------------------------------------
/cloud-computing/nodejs-hello-world/standard/run:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | cd $(dirname $0)
3 |
4 | IEXEC_OUT=/tmp/iexec_out
5 |
6 | rm -rf $IEXEC_OUT
7 | mkdir -p $IEXEC_OUT
8 |
9 | docker run --rm -e IEXEC_OUT=/iexec_out -e IEXEC_IN=/iexec_in -v $IEXEC_OUT:/iexec_out node-hello-world $@
10 |
11 | echo
12 | find $IEXEC_OUT
13 |
--------------------------------------------------------------------------------
/cloud-computing/nodejs-hello-world/tee/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM sconecuratedimages/public-apps:node-10-alpine-scone3.0
2 |
3 | ### install dependencies you need
4 | RUN apk add bash nodejs-npm
5 | RUN mkdir /app && cd /app && SCONE_MODE=sim npm install figlet@1.x
6 |
7 | COPY ./src /app
8 |
9 | ### protect file system with Scone
10 | COPY ./tee/protect-fs.sh ./tee/Dockerfile /build/
11 | RUN sh /build/protect-fs.sh /app
12 |
13 | ENTRYPOINT [ "node", "/app/app.js"]
14 |
--------------------------------------------------------------------------------
/cloud-computing/nodejs-hello-world/tee/build:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | cd $(dirname $0)
3 | docker image build -f ../tee/Dockerfile -t tee-nodejs-hello-world .. $@
4 |
--------------------------------------------------------------------------------
/cloud-computing/nodejs-hello-world/tee/confidential-assets/confidential-asset.txt:
--------------------------------------------------------------------------------
1 | dummy confidential file
2 |
--------------------------------------------------------------------------------
/cloud-computing/nodejs-hello-world/tee/protect-fs.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | cd $(dirname $0)
4 |
5 | if [ ! -e Dockerfile ]
6 | then
7 | printf "\nFailed to parse Dockerfile ENTRYPOINT\n"
8 | printf "Did you forget to add your Dockerfile in your build?\n"
9 | printf "COPY ./tee/Dockerfile /build/\n\n"
10 | exit 1
11 | fi
12 |
13 | ENTRYPOINT_ARSG=$(grep ENTRYPOINT ./Dockerfile | tail -1 | grep -o '"[^"]\+"' | tr -d '"')
14 | echo $ENTRYPOINT_ARSG > ./entrypoint
15 |
16 | if [ -z "$ENTRYPOINT_ARSG" ]
17 | then
18 | printf "\nFailed to parse Dockerfile ENTRYPOINT\n"
19 | printf "Did you forget to add an ENTRYPOINT to your Dockerfile?\n"
20 | printf "ENTRYPOINT [\"executable\", \"param1\", \"param2\"]\n\n"
21 | exit 1
22 | fi
23 |
24 | INTERPRETER=$(awk '{print $1}' ./entrypoint) # python
25 | ENTRYPOINT=$(cat ./entrypoint) # /python /app/app.py
26 |
27 | export SCONE_MODE=sim
28 | export SCONE_HEAP=1G
29 |
30 | APP_FOLDER=$1
31 |
32 | printf "\n### Starting file system protection ...\n\n"
33 |
34 | scone fspf create /fspf.pb
35 | scone fspf addr /fspf.pb / --not-protected --kernel /
36 | scone fspf addr /fspf.pb /usr --authenticated --kernel /usr
37 | scone fspf addf /fspf.pb /usr /usr
38 | scone fspf addr /fspf.pb /bin --authenticated --kernel /bin
39 | scone fspf addf /fspf.pb /bin /bin
40 | scone fspf addr /fspf.pb /lib --authenticated --kernel /lib
41 | scone fspf addf /fspf.pb /lib /lib
42 | scone fspf addr /fspf.pb /etc/ssl --authenticated --kernel /etc/ssl
43 | scone fspf addf /fspf.pb /etc/ssl /etc/ssl
44 | scone fspf addr /fspf.pb /sbin --authenticated --kernel /sbin
45 | scone fspf addf /fspf.pb /sbin /sbin
46 | printf "\n### Protecting code found in folder \"$APP_FOLDER\"\n\n"
47 | scone fspf addr /fspf.pb $APP_FOLDER --authenticated --kernel $APP_FOLDER
48 | scone fspf addf /fspf.pb $APP_FOLDER $APP_FOLDER
49 |
50 | scone fspf encrypt /fspf.pb > ./keytag
51 |
52 | MRENCLAVE="$(SCONE_HASH=1 $INTERPRETER)"
53 | FSPF_TAG=$(cat ./keytag | awk '{print $9}')
54 | FSPF_KEY=$(cat ./keytag | awk '{print $11}')
55 | FINGERPRINT="$FSPF_KEY|$FSPF_TAG|$MRENCLAVE|$ENTRYPOINT"
56 | echo $FINGERPRINT > ./fingerprint
57 |
58 | printf "\n\n"
59 | printf "Your application fingerprint (mrenclave) is ready:\n"
60 | printf "#####################################################################\n"
61 | printf "iexec.json:\n\n"
62 | printf "%s\n" "\"app\": { " " \"owner\" : ... " " \"name\": ... " " ..." " \"mrenclave\": \"$FINGERPRINT\"" "}"
63 | printf "#####################################################################\n"
64 | printf "Hint: Replace 'mrenclave' before doing 'iexec app deploy' step.\n"
65 | printf "\n\n"
66 |
--------------------------------------------------------------------------------
/cloud-computing/nodejs-hello-world/tee/run:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | cd $(dirname $0)
3 |
4 | IEXEC_OUT=/tmp/iexec_out
5 |
6 | rm -rf $IEXEC_OUT
7 | mkdir -p $IEXEC_OUT
8 |
9 | docker run --rm -e IEXEC_OUT=/iexec_out -e IEXEC_IN=/iexec_in -v $IEXEC_OUT:/iexec_out -v $(pwd)/confidential-assets:/iexec_in --device /dev/isgx tee-nodejs-hello-world $@
10 |
11 | echo
12 | find $IEXEC_OUT
13 |
--------------------------------------------------------------------------------
/cloud-computing/python-hello-world/Readme.md:
--------------------------------------------------------------------------------
1 |
2 |
3 | * Basic
4 |
5 | Build:
6 | `./standard/build`
7 |
8 | Run locally:
9 | `./standard/run`
10 | `./standard/run Alice`
11 |
12 |
13 | * Tee
14 |
15 | Build:
16 | `./tee/build`
17 |
18 | Run locally:
19 | `./tee/run`
20 | `./tee/run Alice`
21 |
--------------------------------------------------------------------------------
/cloud-computing/python-hello-world/src/app.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 | import json
4 | from pyfiglet import Figlet
5 |
6 | iexec_out = os.environ['IEXEC_OUT']
7 | iexec_in = os.environ['IEXEC_IN']
8 |
9 | # Do whatever you want
10 | text = "Hello, World!"
11 | if len(sys.argv) > 1:
12 | text = 'Hello, {}!'.format(sys.argv[1])
13 | text = Figlet().renderText(text) + text # Let's add some art for e.g.
14 |
15 | # Eventually use some confidential assets
16 | if os.path.exists(iexec_in + '/confidential-asset.txt'):
17 | with open(iexec_in + '/confidential-asset.txt', 'r') as f:
18 | text = text + '\nConfidential asset: ' + f.read()
19 |
20 | # Append some results
21 | with open(iexec_out + '/result.txt', 'w+') as f:
22 | f.write(text)
23 | print(text)
24 |
25 | # Declare everything is computed
26 | with open(iexec_out + '/computed.json', 'w+') as f:
27 | json.dump({ "deterministic-output-path" : iexec_out + '/result.txt' }, f)
28 |
29 | ## Try:
30 | # Basic:
31 | # mkdir -p /tmp/iexec_out && IEXEC_OUT=/tmp/iexec_out IEXEC_IN=/tmp/iexec_in python3 app.py Alice
32 | #
33 | # Tee:
34 | # mkdir -p /tmp/iexec_out && IEXEC_OUT=/tmp/iexec_out IEXEC_IN=../tee/confidential-assets python3 app.py Alice
35 |
--------------------------------------------------------------------------------
/cloud-computing/python-hello-world/standard/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.7.3-alpine3.10
2 |
3 | ### install python3 dependencies you need
4 | RUN pip3 install pyfiglet
5 |
6 | COPY ./src /app
7 |
8 | ENTRYPOINT ["python", "/app/app.py"]
9 |
--------------------------------------------------------------------------------
/cloud-computing/python-hello-world/standard/build:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | cd $(dirname $0)
3 |
4 | docker image build -f ../standard/Dockerfile -t python-hello-world .. $@
5 |
--------------------------------------------------------------------------------
/cloud-computing/python-hello-world/standard/run:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | cd $(dirname $0)
3 |
4 | IEXEC_OUT=/tmp/iexec_out
5 |
6 | rm -rf $IEXEC_OUT
7 | mkdir -p $IEXEC_OUT
8 |
9 | docker run --rm -e IEXEC_OUT=/iexec_out -e IEXEC_IN=/iexec_in -v /tmp/iexec_out:/iexec_out python-hello-world $@
10 |
11 | echo
12 | find $IEXEC_OUT
13 |
--------------------------------------------------------------------------------
/cloud-computing/python-hello-world/tee/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM sconecuratedimages/public-apps:python-3.7.3-alpine3.10-scone3.0
2 |
3 | ### install python3 dependencies you need
4 | RUN SCONE_MODE=sim pip3 install pyfiglet
5 |
6 | COPY ./src /app
7 |
8 | ### protect file system with Scone
9 | COPY ./tee/protect-fs.sh ./tee/Dockerfile /build/
10 | RUN sh /build/protect-fs.sh /app
11 |
12 | ENTRYPOINT ["python", "/app/app.py"]
13 |
--------------------------------------------------------------------------------
/cloud-computing/python-hello-world/tee/build:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | cd $(dirname $0)
3 | docker image build -f ../tee/Dockerfile -t tee-python-hello-world .. $@
4 |
--------------------------------------------------------------------------------
/cloud-computing/python-hello-world/tee/confidential-assets/confidential-asset.txt:
--------------------------------------------------------------------------------
1 | dummy confidential file
2 |
--------------------------------------------------------------------------------
/cloud-computing/python-hello-world/tee/protect-fs.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | cd $(dirname $0)
4 |
5 | if [ ! -e Dockerfile ]
6 | then
7 | printf "\nFailed to parse Dockerfile ENTRYPOINT\n"
8 | printf "Did you forget to add your Dockerfile in your build?\n"
9 | printf "COPY ./tee/Dockerfile /build/\n\n"
10 | exit 1
11 | fi
12 |
13 | ENTRYPOINT_ARSG=$(grep ENTRYPOINT ./Dockerfile | tail -1 | grep -o '"[^"]\+"' | tr -d '"')
14 | echo $ENTRYPOINT_ARSG > ./entrypoint
15 |
16 | if [ -z "$ENTRYPOINT_ARSG" ]
17 | then
18 | printf "\nFailed to parse Dockerfile ENTRYPOINT\n"
19 | printf "Did you forget to add an ENTRYPOINT to your Dockerfile?\n"
20 | printf "ENTRYPOINT [\"executable\", \"param1\", \"param2\"]\n\n"
21 | exit 1
22 | fi
23 |
24 | INTERPRETER=$(awk '{print $1}' ./entrypoint) # python
25 | ENTRYPOINT=$(cat ./entrypoint) # /python /app/app.py
26 |
27 | export SCONE_MODE=sim
28 | export SCONE_HEAP=1G
29 |
30 | APP_FOLDER=$1
31 |
32 | printf "\n### Starting file system protection ...\n\n"
33 |
34 | scone fspf create /fspf.pb
35 | scone fspf addr /fspf.pb / --not-protected --kernel /
36 | scone fspf addr /fspf.pb /usr --authenticated --kernel /usr
37 | scone fspf addf /fspf.pb /usr /usr
38 | scone fspf addr /fspf.pb /bin --authenticated --kernel /bin
39 | scone fspf addf /fspf.pb /bin /bin
40 | scone fspf addr /fspf.pb /lib --authenticated --kernel /lib
41 | scone fspf addf /fspf.pb /lib /lib
42 | scone fspf addr /fspf.pb /etc/ssl --authenticated --kernel /etc/ssl
43 | scone fspf addf /fspf.pb /etc/ssl /etc/ssl
44 | scone fspf addr /fspf.pb /sbin --authenticated --kernel /sbin
45 | scone fspf addf /fspf.pb /sbin /sbin
46 | printf "\n### Protecting code found in folder \"$APP_FOLDER\"\n\n"
47 | scone fspf addr /fspf.pb $APP_FOLDER --authenticated --kernel $APP_FOLDER
48 | scone fspf addf /fspf.pb $APP_FOLDER $APP_FOLDER
49 |
50 | scone fspf encrypt /fspf.pb > ./keytag
51 |
52 | MRENCLAVE="$(SCONE_HASH=1 $INTERPRETER)"
53 | FSPF_TAG=$(cat ./keytag | awk '{print $9}')
54 | FSPF_KEY=$(cat ./keytag | awk '{print $11}')
55 | FINGERPRINT="$FSPF_KEY|$FSPF_TAG|$MRENCLAVE|$ENTRYPOINT"
56 | echo $FINGERPRINT > ./fingerprint
57 |
58 | printf "\n\n"
59 | printf "Your application fingerprint (mrenclave) is ready:\n"
60 | printf "#####################################################################\n"
61 | printf "iexec.json:\n\n"
62 | printf "%s\n" "\"app\": { " " \"owner\" : ... " " \"name\": ... " " ..." " \"mrenclave\": \"$FINGERPRINT\"" "}"
63 | printf "#####################################################################\n"
64 | printf "Hint: Replace 'mrenclave' before doing 'iexec app deploy' step.\n"
65 | printf "\n\n"
66 |
--------------------------------------------------------------------------------
/cloud-computing/python-hello-world/tee/run:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | cd $(dirname $0)
3 |
4 | IEXEC_OUT=/tmp/iexec_out
5 |
6 | rm -rf $IEXEC_OUT
7 | mkdir -p $IEXEC_OUT
8 |
9 | docker run --rm -e IEXEC_OUT=/iexec_out -e IEXEC_IN=/iexec_in -v $IEXEC_OUT:/iexec_out -v $(pwd)/confidential-assets:/iexec_in --device /dev/isgx tee-python-hello-world $@
10 |
11 | echo
12 | find $IEXEC_OUT
13 |
--------------------------------------------------------------------------------
/cloud-computing/vanityeth/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu:16.04
2 | RUN apt-get update
3 | RUN apt-get install -y sudo
4 | RUN apt-get install -y curl
5 | RUN curl -sL https://deb.nodesource.com/setup_10.x | sudo -E bash -
6 | RUN apt-get install -y nodejs
7 | RUN npm install -g vanity-eth --unsafe
8 | COPY vanityeth-with-consensus.sh /vanityeth-with-consensus.sh
9 | RUN chmod +x /vanityeth-with-consensus.sh
10 | ENTRYPOINT ["/vanityeth-with-consensus.sh"]
11 |
12 | # docker image build -t iexechub/vanityeth:3.0.0 .
--------------------------------------------------------------------------------
/cloud-computing/vanityeth/vanityeth-with-consensus.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | vanityDefaultResultFile=VanityEth-log-*.txt
4 | vanityResult="$IEXEC_OUT/keypair.txt"
5 | deterministicTrace="$IEXEC_OUT/deterministic-trace.txt"
6 | computedJsonFile="$IEXEC_OUT/computed.json"
7 |
8 | vanityPattern=$1
9 |
10 | rm -f $vanityDefaultResultFile $vanityResult $deterministicTrace
11 |
12 | vanityeth -i $vanityPattern -l &> /dev/null
13 |
14 | mv $vanityDefaultResultFile $vanityResult
15 |
16 | if [[ -f $vanityResult ]]; then
17 | publicAddress=$(cat $vanityResult | grep -Po '"address": *\K"[^"]*"' | tr -d '"')
18 | publicAddressLength=${#publicAddress}
19 |
20 | echo "Address found is "$publicAddress
21 | echo "(private key inside "$vanityResult")"
22 |
23 | if [[ $publicAddress = "0x"$vanityPattern* ]]; then
24 | echo "Pattern "$vanityPattern" found">> $deterministicTrace
25 | echo $publicAddressLength >> $deterministicTrace
26 | fi
27 | else
28 | echo "Bad input params"
29 | echo "Bad input params" >> $deterministicTrace
30 | fi
31 |
32 | echo "{ \"deterministic-output-path\": \"$deterministicTrace\" }" >> $computedJsonFile
33 |
--------------------------------------------------------------------------------
/offchain-computing/offchain-python-hello-world/Readme.md:
--------------------------------------------------------------------------------
1 |
2 |
3 | * Basic
4 |
5 | Build:
6 | `./standard/build`
7 |
8 | Run locally:
9 | `./standard/run`
10 | `./standard/run Alice`
11 |
12 |
13 | * Tee
14 |
15 | Build:
16 | `./tee/build`
17 |
18 | Run locally:
19 | `./tee/run`
20 | `./tee/run Alice`
21 |
--------------------------------------------------------------------------------
/offchain-computing/offchain-python-hello-world/src/app.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 | import json
4 | import eth_abi
5 |
6 | iexec_out = os.environ['IEXEC_OUT']
7 | iexec_in = os.environ['IEXEC_IN']
8 |
9 | # Do whatever you want
10 | data = "Hello, World!"
11 | if len(sys.argv) > 1:
12 | data = 'Hello, {}!'.format(sys.argv[1])
13 |
14 | # Eventually use some confidential assets
15 | if os.path.exists(iexec_in + '/confidential-asset.txt'):
16 | with open(iexec_in + '/confidential-asset.txt', 'r') as f:
17 | print('Confidential asset: ' + f.read())
18 |
19 | # Send callback data to smart-contract
20 | callback_data = eth_abi.encode_abi([ 'string'], [ data ]).hex()
21 | print('Offchain computing for Smart-Contracts [data:{}, callback_data:{}]'.format(data, callback_data))
22 | with open(iexec_out + '/computed.json', 'w+') as f:
23 | json.dump({ "callback-data" : callback_data}, f)
24 |
25 |
26 | ## Try:
27 | # Basic:
28 | # mkdir -p /tmp/iexec_out && IEXEC_OUT=/tmp/iexec_out IEXEC_IN=/tmp/iexec_in python3 app.py Alice
29 | #
30 | # Tee:
31 | # mkdir -p /tmp/iexec_out && IEXEC_OUT=/tmp/iexec_out IEXEC_IN=../tee/confidential-assets python3 app.py Alice
32 |
--------------------------------------------------------------------------------
/offchain-computing/offchain-python-hello-world/standard/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.7.3-alpine3.10
2 |
3 | ### install some python3 dependencies
4 | RUN apk add gcc musl-dev
5 | RUN pip3 install eth_abi
6 |
7 | COPY ./src /app
8 |
9 | ENTRYPOINT ["python", "/app/app.py"]
10 |
--------------------------------------------------------------------------------
/offchain-computing/offchain-python-hello-world/standard/build:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | cd $(dirname $0)
3 |
4 | docker image build -f ../standard/Dockerfile -t offchain-python-hello-world .. $@
5 |
--------------------------------------------------------------------------------
/offchain-computing/offchain-python-hello-world/standard/run:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | cd $(dirname $0)
3 |
4 | IEXEC_OUT=/tmp/iexec_out
5 |
6 | rm -rf $IEXEC_OUT
7 | mkdir -p $IEXEC_OUT
8 |
9 | docker run --rm -e IEXEC_OUT=/iexec_out -e IEXEC_IN=/iexec_in -v $IEXEC_OUT:/iexec_out offchain-python-hello-world $@
10 |
11 | echo
12 | find $IEXEC_OUT
13 |
--------------------------------------------------------------------------------
/offchain-computing/offchain-python-hello-world/tee/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM sconecuratedimages/public-apps:python-3.7.3-alpine3.10-scone3.0
2 |
3 | ### install some python3 dependencies
4 | RUN apk add gcc
5 | RUN SCONE_MODE=sim pip3 install eth_abi
6 |
7 | ### copy the code inside the image
8 | COPY ./src /app
9 |
10 | ### protect file system with Scone
11 | COPY ./tee/protect-fs.sh ./tee/Dockerfile /build/
12 | RUN sh /build/protect-fs.sh /app
13 |
14 | ENTRYPOINT ["python", "/app/app.py"]
15 |
--------------------------------------------------------------------------------
/offchain-computing/offchain-python-hello-world/tee/build:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | cd $(dirname $0)
3 | docker image build -f ../tee/Dockerfile -t offchain-tee-python-hello-world .. $@
4 |
--------------------------------------------------------------------------------
/offchain-computing/offchain-python-hello-world/tee/confidential-assets/confidential-asset.txt:
--------------------------------------------------------------------------------
1 | dummy dataset file
--------------------------------------------------------------------------------
/offchain-computing/offchain-python-hello-world/tee/protect-fs.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | cd $(dirname $0)
4 |
5 | if [ ! -e Dockerfile ]
6 | then
7 | printf "\nFailed to parse Dockerfile ENTRYPOINT\n"
8 | printf "Did you forget to add your Dockerfile in your build?\n"
9 | printf "COPY ./tee/Dockerfile /build/\n\n"
10 | exit 1
11 | fi
12 |
13 | ENTRYPOINT_ARSG=$(grep ENTRYPOINT ./Dockerfile | tail -1 | grep -o '"[^"]\+"' | tr -d '"')
14 | echo $ENTRYPOINT_ARSG > ./entrypoint
15 |
16 | if [ -z "$ENTRYPOINT_ARSG" ]
17 | then
18 | printf "\nFailed to parse Dockerfile ENTRYPOINT\n"
19 | printf "Did you forget to add an ENTRYPOINT to your Dockerfile?\n"
20 | printf "ENTRYPOINT [\"executable\", \"param1\", \"param2\"]\n\n"
21 | exit 1
22 | fi
23 |
24 | INTERPRETER=$(awk '{print $1}' ./entrypoint) # python
25 | ENTRYPOINT=$(cat ./entrypoint) # /python /app/app.py
26 |
27 | export SCONE_MODE=sim
28 | export SCONE_HEAP=1G
29 |
30 | APP_FOLDER=$1
31 |
32 | printf "\n### Starting file system protection ...\n\n"
33 |
34 | scone fspf create /fspf.pb
35 | scone fspf addr /fspf.pb / --not-protected --kernel /
36 | scone fspf addr /fspf.pb /usr --authenticated --kernel /usr
37 | scone fspf addf /fspf.pb /usr /usr
38 | scone fspf addr /fspf.pb /bin --authenticated --kernel /bin
39 | scone fspf addf /fspf.pb /bin /bin
40 | scone fspf addr /fspf.pb /lib --authenticated --kernel /lib
41 | scone fspf addf /fspf.pb /lib /lib
42 | scone fspf addr /fspf.pb /etc/ssl --authenticated --kernel /etc/ssl
43 | scone fspf addf /fspf.pb /etc/ssl /etc/ssl
44 | scone fspf addr /fspf.pb /sbin --authenticated --kernel /sbin
45 | scone fspf addf /fspf.pb /sbin /sbin
46 | printf "\n### Protecting code found in folder \"$APP_FOLDER\"\n\n"
47 | scone fspf addr /fspf.pb $APP_FOLDER --authenticated --kernel $APP_FOLDER
48 | scone fspf addf /fspf.pb $APP_FOLDER $APP_FOLDER
49 |
50 | scone fspf encrypt /fspf.pb > ./keytag
51 |
52 | MRENCLAVE="$(SCONE_HASH=1 $INTERPRETER)"
53 | FSPF_TAG=$(cat ./keytag | awk '{print $9}')
54 | FSPF_KEY=$(cat ./keytag | awk '{print $11}')
55 | FINGERPRINT="$FSPF_KEY|$FSPF_TAG|$MRENCLAVE|$ENTRYPOINT"
56 | echo $FINGERPRINT > ./fingerprint
57 |
58 | printf "\n\n"
59 | printf "Your application fingerprint (mrenclave) is ready:\n"
60 | printf "#####################################################################\n"
61 | printf "iexec.json:\n\n"
62 | printf "%s\n" "\"app\": { " " \"owner\" : ... " " \"name\": ... " " ..." " \"mrenclave\": \"$FINGERPRINT\"" "}"
63 | printf "#####################################################################\n"
64 | printf "Hint: Replace 'mrenclave' before doing 'iexec app deploy' step.\n"
65 | printf "\n\n"
66 |
--------------------------------------------------------------------------------
/offchain-computing/offchain-python-hello-world/tee/run:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | cd $(dirname $0)
3 |
4 | IEXEC_OUT=/tmp/iexec_out
5 |
6 | rm -rf $IEXEC_OUT
7 | mkdir -p $IEXEC_OUT
8 |
9 | docker run --rm -e IEXEC_OUT=/iexec_out -e IEXEC_IN=/iexec_in -v $IEXEC_OUT:/iexec_out -v $(pwd)/confidential-assets:/iexec_in --device /dev/isgx offchain-tee-python-hello-world $@
10 |
11 | echo
12 | find $IEXEC_OUT
13 |
--------------------------------------------------------------------------------
/offchain-computing/offchain-tee-kaiko-pricefeed/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 | ## Build app, get MrEnclve, docker tag & push, iexec app deploy, iexec order publish --app
4 |
5 | ### Docker Build & Push
6 |
7 | `./tee/build`
8 |
9 | ### Deploy app
10 |
11 | ```
12 | "app": {
13 | "owner": "0x15Bd06807eF0F2284a9C5baeAA2EF4d5a88eB72A",
14 | "name": "offchain-tee-kaiko-pricefeed",
15 | "type": "DOCKER",
16 | "multiaddr": "docker.io/iexechub/offchain-tee-kaiko-pricefeed:5.0.1",
17 | "checksum": "0x0000000000000000000000000000000000000000000000000000000000000000",
18 | "mrenclave": "4a39755a6a07cf885d7d3c7358bef277c7233746d283f4a48491b87d772b4199|958742b031be41d52d4f11ab2afc063a|2a421b3b7a6f771c3a602f49ce05b6a75793312b8e2c61c673fe7085a16cf138|python /app/app.py"
19 | }
20 | ℹ Using chain [goerli]
21 | ✔ Deployed new app at address 0xa78bf0FF3661b96A97bDd7a1382360fce5F1eFdD
22 |
23 | iexec app init --wallet-file xx
24 | # Add app uri & MrEnclave before next step
25 | iexec app deploy --wallet-file xx --chain goerli
26 | ```
27 |
28 | ## Create confidential asset (key.txt), iexec dataset encrypt, ipfs add encrypted-dataset.zip, iexec dataset push-secret, iexec dataset deploy, iexec order publish --dataset
29 |
30 | ```
31 | ls /home/alice/iexec
32 | ├── iexec.json
33 | ├── chain.json
34 | ```
35 |
36 | Secret: ```/home/alice/iexec/dataset/original/key.txt```
37 |
38 | ```iexec dataset encrypt --algorithm scone```
39 | ```
40 | ├── iexec.json
41 | ├── chain.json
42 | ├── datasets
43 | │ ├── encrypted
44 | │ │ └── dataset_key.txt.zip
45 | │ └── original
46 | │ └── key.txt
47 | ```
48 |
49 | ### Deploy Dataset
50 |
51 | Make ```dataset_key.txt.zip``` publicly available (IPFS, Raw Github)
52 |
53 | ```
54 | //master branch
55 | "dataset": {
56 | "owner": "0x15Bd06807eF0F2284a9C5baeAA2EF4d5a88eB72A",
57 | "name": "encrypted-kaiko-pricefeed-api-key",
58 | "multiaddr": "https://raw.githubusercontent.com/iExecBlockchainComputing/iexec-apps/master/offchain-computing/offchain-tee-kaiko-pricefeed/tee/datasets/encrypted/dataset_key.txt.zip",
59 | "checksum": "0x0000000000000000000000000000000000000000000000000000000000000000"
60 | }
61 | ℹ Using chain [goerli]
62 | ✔ Deployed new dataset at address 0xc544573dEf12c71F0bA8bCF992d3Ed9590586452
63 | ```
64 |
65 |
66 | ```
67 | iexec dataset init --wallet-file xx
68 | # Add dataset uri before next step
69 | iexec dataset deploy --wallet-file xx --chain goerli
70 |
71 | iexec dataset push-secret 0xdataset --secret-path /home/alice/iexec/.secrets/datasets/dataset.secret --keystoredir=/home/alice/wallets --wallet-file=wallet.json --password=xx --chain kovan
72 | ```
73 |
74 | ```
75 | ## Orders
76 |
77 | iexec order init --app --wallet-file xx --chain goerli
78 | iexec order sign --app --wallet-file xx --chain goerli
79 | iexec order publish --app --wallet-file xx --chain goerli
80 |
81 | iexec order init --dataset --wallet-file xx --chain goerli
82 | # Add app restriction before next step
83 | iexec order sign --dataset --wallet-file xx --chain goerli
84 | iexec order publish --dataset --wallet-file xx --chain goerli
85 | ```
86 |
87 | ## Run
88 |
89 | ```
90 | iexec app run 0xa78bf0FF3661b96A97bDd7a1382360fce5F1eFdD --dataset 0xc544573dEf12c71F0bA8bCF992d3Ed9590586452 --workerpool 0xEb6c3FA8522f2C342E94941C21d3947c099e6Da0 --params '{"iexec_args": "eth usd 9"}' --tag 0x0000000000000000000000000000000000000000000000000000000000000001 --callback 0xB2bb24cEa9aA32c0555F934C57145414286b70f0 --password whatever --force
91 | ```
92 |
93 |
94 | * params: ```btc usd 9```
95 | * tag: ```0x0000000000000000000000000000000000000000000000000000000000000001```
96 |
97 | Goerli run: https://v5.explorer.iex.ec/goerli/deal/0xa78e36abe5f106b4bc9ce95cb9d77a3a99fb336ca891c6926f05b0143aed42b3
98 |
99 | ## Oracle receiver
100 |
101 | Goerli:
102 | ```
103 | # Unsafe callback - 0x2760E0CE853b3FfE8d55A6642e597D466A00C8f0
104 | # Safe callback - 0xB2bb24cEa9aA32c0555F934C57145414286b70f0
105 | ```
106 |
--------------------------------------------------------------------------------
/offchain-computing/offchain-tee-kaiko-pricefeed/src/app.py:
--------------------------------------------------------------------------------
1 | import eth_abi
2 | import json
3 | import os
4 | import re
5 | import sys
6 | import urllib.request
7 |
8 | iexec_out = os.environ['IEXEC_OUT']
9 | iexec_in = os.environ['IEXEC_IN']
10 |
11 | class Lib:
12 | def parseValue(rawValue, ethType, power):
13 | if re.search('^u?int[0-9]*$', ethType):
14 | return round(float(rawValue) * 10 ** int(power))
15 | else:
16 | return rawValue
17 |
18 | def formatArgs(args):
19 | return '&'.join('{}={}'.format(k,v) for k,v in args.items())
20 |
21 | def getAPIKey():
22 | try:
23 | with open(iexec_in + '/' + 'key.txt', 'r') as dataset_file:
24 | apiKey = dataset_file.read().strip()
25 | if not re.search('^[0-9a-zA-Z]{1,128}$', apiKey):
26 | raise Exception('Invalid API key')
27 | return apiKey
28 | except FileNotFoundError:
29 | raise Exception('Missing API key dataset')
30 |
31 | def fetchMarketData(region, endpoint, params):
32 | print('Request https://{region}.market-api.kaiko.io/v1/data/trades.v1/{endpoint}?{params}'.format(
33 | region = region,
34 | endpoint = endpoint,
35 | params = params,
36 | ))
37 | return json.loads(
38 | urllib.request.urlopen(
39 | urllib.request.Request(
40 | 'https://{region}.market-api.kaiko.io/v1/data/trades.v1/{endpoint}?{params}'.format(
41 | region = region,
42 | endpoint = endpoint,
43 | params = params,
44 | ),
45 | headers = {
46 | 'X-Api-Key': Lib.getAPIKey(),
47 | 'User-Agent': 'Kaiko iExec Adapter',
48 | }
49 | )
50 | ).read()
51 | )
52 |
53 | class PriceFeed:
54 | def fetchRate(baseAsset, quoteAsset):
55 | return Lib.fetchMarketData(
56 | 'us',
57 | 'spot_direct_exchange_rate/{baseAsset}/{quoteAsset}/recent'.format(baseAsset=baseAsset, quoteAsset=quoteAsset),
58 | Lib.formatArgs({
59 | 'interval': '1m',
60 | 'limit': 720,
61 | })
62 | )
63 |
64 | def run(baseAsset, quoteAsset, power):
65 | response = PriceFeed.fetchRate(
66 | baseAsset = baseAsset,
67 | quoteAsset = quoteAsset,
68 | )
69 | try:
70 | data = response.get('data')[0]
71 | timestamp = data.get('timestamp')
72 | details = 'Price-{base}/{quote}-{power}'.format(base=baseAsset.upper(), quote=quoteAsset.upper(), power=power)
73 | rawValue = data.get('price')
74 | value = Lib.parseValue(rawValue, 'uint256', power)
75 | return (timestamp, details, value)
76 | except Exception as e:
77 | raise Exception('API response parsing failure: {}'.format(e))
78 |
79 |
80 | # Example usage:
81 | # btc usd 9
82 | if __name__ == '__main__':
83 | print('PriceFeed started')
84 | success = False
85 | data = (0, '', 0) # default returned value to avoid attack on scheduler
86 |
87 | try:
88 | # EXECUTE CALL
89 | data = PriceFeed.run(
90 | baseAsset = sys.argv[1],
91 | quoteAsset = sys.argv[2],
92 | power = sys.argv[3],
93 | )
94 | success = True
95 | print('- Success: {} {} {}'.format(*data))
96 |
97 | except IndexError as e:
98 | print('Error: missing arguments')
99 |
100 | except Exception as e:
101 | print('Execution Failure: {}'.format(e))
102 |
103 | # GENERATE CALLBACK
104 | callback_data = eth_abi.encode_abi(['uint256', 'string', 'uint256'], [*data]).hex()
105 | callback_data = '0x{}'.format(callback_data)
106 | print('Offchain Computing for Smart-Contracts [data:{}, callback_data:{}]'.format(data, callback_data))
107 |
108 | with open(iexec_out + '/computed.json', 'w+') as f:
109 | json.dump({ "callback-data" : callback_data}, f)
110 |
111 | if success:
112 | print('PriceFeed completed')
113 | else:
114 | print('PriceFeed failed')
--------------------------------------------------------------------------------
/offchain-computing/offchain-tee-kaiko-pricefeed/tee/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM sconecuratedimages/public-apps:python-3.7.3-alpine3.10-scone3.0
2 |
3 | ### install some python3 dependencies
4 | RUN apk --no-cache --update-cache add gcc libc-dev
5 | RUN SCONE_MODE=sim pip3 install eth_abi
6 |
7 | ### copy the code inside the image
8 | COPY ./src /app
9 |
10 | ### protect file system with Scone
11 | COPY ./tee/protect-fs.sh ./tee/Dockerfile /build/
12 | RUN sh /build/protect-fs.sh /app
13 |
14 | ENTRYPOINT ["python", "/app/app.py"]
15 |
--------------------------------------------------------------------------------
/offchain-computing/offchain-tee-kaiko-pricefeed/tee/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 | ## Build app, get MrEnclve, docker tag & push, iexec app deploy, iexec order publish --app
4 |
5 | ### Docker Build & Push
6 |
7 | `./tee/build`
8 |
9 | ### Deploy app
10 |
11 | ```
12 | "app": {
13 | "owner": "0x15Bd06807eF0F2284a9C5baeAA2EF4d5a88eB72A",
14 | "name": "offchain-tee-kaiko-pricefeed",
15 | "type": "DOCKER",
16 | "multiaddr": "docker.io/iexechub/offchain-tee-kaiko-pricefeed:5.0.0",
17 | "checksum": "0x0000000000000000000000000000000000000000000000000000000000000000",
18 | "mrenclave": "02371762e6daedd94bddc8d378e465a56178d2db086befb32cc30a503b92405c|593aa8de86756c99b599117dd3e79ac7|2a421b3b7a6f771c3a602f49ce05b6a75793312b8e2c61c673fe7085a16cf138|python /app/app.py"
19 | }
20 | ℹ Using chain [goerli]
21 | ✔ Deployed new app at address 0xbfE5C1eacD47ba0C9876cc541a3dF8D70d221D4f
22 |
23 | iexec app init --wallet-file xx
24 | # Add app uri & MrEnclave before next step
25 | iexec app deploy --wallet-file xx --chain goerli
26 | ```
27 |
28 | ## Create confidential asset (key.txt), iexec dataset encrypt, ipfs add encrypted-dataset.zip, iexec dataset push-secret, iexec dataset deploy, iexec order publish --dataset
29 |
30 | ```
31 | ls /home/alice/iexec
32 | ├── iexec.json
33 | ├── chain.json
34 | ```
35 |
36 | Secret: ```/home/alice/iexec/dataset/original/key.txt```
37 |
38 | ```iexec dataset encrypt --algorithm scone```
39 | ```
40 | ├── iexec.json
41 | ├── chain.json
42 | ├── datasets
43 | │ ├── encrypted
44 | │ │ └── dataset_key.txt.zip
45 | │ └── original
46 | │ └── key.txt
47 | ```
48 |
49 | ### Deploy Dataset
50 |
51 | Make ```dataset_key.txt.zip``` publicly available (IPFS, Raw Github)
52 |
53 | ```
54 | //v5 branch
55 | "dataset": {
56 | "owner": "0x15Bd06807eF0F2284a9C5baeAA2EF4d5a88eB72A",
57 | "name": "encrypted-kaiko-pricefeed-api-key",
58 | "multiaddr": "https://raw.githubusercontent.com/iExecBlockchainComputing/iexec-apps/v5/offchain-computing/offchain-tee-kaiko-pricefeed/tee/datasets/encrypted/dataset_key.txt.zip",
59 | "checksum": "0x0000000000000000000000000000000000000000000000000000000000000000"
60 | }
61 | ℹ Using chain [goerli]
62 | ✔ Deployed new dataset at address 0x792D22e259D78D7939daa4De4Da99C3fd2C80074
63 |
64 | //master branch
65 | "dataset": {
66 | "owner": "0x15Bd06807eF0F2284a9C5baeAA2EF4d5a88eB72A",
67 | "name": "encrypted-kaiko-pricefeed-api-key",
68 | "multiaddr": "https://raw.githubusercontent.com/iExecBlockchainComputing/iexec-apps/master/offchain-computing/offchain-tee-kaiko-pricefeed/tee/datasets/encrypted/dataset_key.txt.zip",
69 | "checksum": "0x0000000000000000000000000000000000000000000000000000000000000000"
70 | }
71 | ℹ Using chain [goerli]
72 | ✔ Deployed new dataset at address 0xc544573dEf12c71F0bA8bCF992d3Ed9590586452
73 | ```
74 |
75 |
76 | ```
77 | iexec dataset init --wallet-file xx
78 | # Add dataset uri before next step
79 | iexec dataset deploy --wallet-file xx --chain goerli
80 |
81 | iexec dataset push-secret 0xdataset --secret-path /home/alice/iexec/.secrets/datasets/dataset.secret --keystoredir=/home/alice/wallets --wallet-file=wallet.json --password=xx --chain kovan
82 | ```
83 |
84 | ```
85 | ## Orders
86 |
87 | iexec order init --app --wallet-file xx --chain goerli
88 | iexec order sign --app --wallet-file xx --chain goerli
89 | iexec order publish --app --wallet-file xx --chain goerli
90 |
91 | iexec order init --dataset --wallet-file xx --chain goerli
92 | # Add app restriction before next step
93 | iexec order sign --dataset --wallet-file xx --chain goerli
94 | iexec order publish --dataset --wallet-file xx --chain goerli
95 | ```
96 |
97 | ## Run
98 |
99 | ```
100 | iexec app run 0xbfE5C1eacD47ba0C9876cc541a3dF8D70d221D4f --dataset 0x792D22e259D78D7939daa4De4Da99C3fd2C80074 --workerpool 0xEb6c3FA8522f2C342E94941C21d3947c099e6Da0 --params '{"iexec_args": "btc usd 9","iexec_tee_post_compute_image":"iexechub/tee-worker-post-compute:1.0.0","iexec_tee_post_compute_fingerprint":"7f9f64e152f30d3f6e450d18fd64d6cd5d323d2af3fd153a3697a155a0d8f113|aa413ae09b0483bf8bbaf83cf4cc6957|13076027fc67accba753a3ed2edf03227dfd013b450d68833a5589ec44132100"}' --tag 0x0000000000000000000000000000000000000000000000000000000000000001 --callback 0x0000000000000000000000000000000000000001
101 | ```
102 |
103 |
104 | * params: ```btc usd 9```
105 | * tag: ```0x0000000000000000000000000000000000000000000000000000000000000001```
106 |
107 | Goerli run: https://v5.explorer.iex.ec/goerli/task/0xe9be15e98933a2809a07fa64a7cd7ecbd22bf9cc2bb038fffebc006f6e3bca48
108 |
109 | ## Oracle receiver
110 |
111 | //TODO UPDATE
112 | Goerli:
113 | * receiver address: https://goerli.etherscan.io/address/0x7Ca601977C9075bAe2F173bA248356280008AeaF
114 | * verified code: https://goerli.etherscan.io/address/0x7Ca601977C9075bAe2F173bA248356280008AeaF#code
115 | * owner: `0xA1162f07afC3e45Ae89D2252706eB355F6349641`
116 |
--------------------------------------------------------------------------------
/offchain-computing/offchain-tee-kaiko-pricefeed/tee/build:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | cd $(dirname $0)
3 | docker image build --no-cache --force-rm -f ../tee/Dockerfile -t offchain-tee-kaiko-pricefeed .. $@
4 |
--------------------------------------------------------------------------------
/offchain-computing/offchain-tee-kaiko-pricefeed/tee/datasets/encrypted/dataset_key.txt.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/iExecBlockchainComputing/iexec-apps/afb2843ca746eb5c1d537cd247b67bf73ba32e0d/offchain-computing/offchain-tee-kaiko-pricefeed/tee/datasets/encrypted/dataset_key.txt.zip
--------------------------------------------------------------------------------
/offchain-computing/offchain-tee-kaiko-pricefeed/tee/protect-fs.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | cd $(dirname $0)
4 |
5 | if [ ! -e Dockerfile ]
6 | then
7 | printf "\nFailed to parse Dockerfile ENTRYPOINT\n"
8 | printf "Did you forget to add your Dockerfile in your build?\n"
9 | printf "COPY ./tee/Dockerfile /build/\n\n"
10 | exit 1
11 | fi
12 |
13 | ENTRYPOINT_ARSG=$(grep ENTRYPOINT ./Dockerfile | tail -1 | grep -o '"[^"]\+"' | tr -d '"')
14 | echo $ENTRYPOINT_ARSG > ./entrypoint
15 |
16 | if [ -z "$ENTRYPOINT_ARSG" ]
17 | then
18 | printf "\nFailed to parse Dockerfile ENTRYPOINT\n"
19 | printf "Did you forget to add an ENTRYPOINT to your Dockerfile?\n"
20 | printf "ENTRYPOINT [\"executable\", \"param1\", \"param2\"]\n\n"
21 | exit 1
22 | fi
23 |
24 | INTERPRETER=$(awk '{print $1}' ./entrypoint) # python
25 | ENTRYPOINT=$(cat ./entrypoint) # /python /app/app.py
26 |
27 | export SCONE_MODE=sim
28 | export SCONE_HEAP=1G
29 |
30 | APP_FOLDER=$1
31 |
32 | printf "\n### Starting file system protection ...\n\n"
33 |
34 | scone fspf create /fspf.pb
35 | scone fspf addr /fspf.pb / --not-protected --kernel /
36 | scone fspf addr /fspf.pb /usr --authenticated --kernel /usr
37 | scone fspf addf /fspf.pb /usr /usr
38 | scone fspf addr /fspf.pb /bin --authenticated --kernel /bin
39 | scone fspf addf /fspf.pb /bin /bin
40 | scone fspf addr /fspf.pb /lib --authenticated --kernel /lib
41 | scone fspf addf /fspf.pb /lib /lib
42 | scone fspf addr /fspf.pb /etc/ssl --authenticated --kernel /etc/ssl
43 | scone fspf addf /fspf.pb /etc/ssl /etc/ssl
44 | scone fspf addr /fspf.pb /sbin --authenticated --kernel /sbin
45 | scone fspf addf /fspf.pb /sbin /sbin
46 | printf "\n### Protecting code found in folder \"$APP_FOLDER\"\n\n"
47 | scone fspf addr /fspf.pb $APP_FOLDER --authenticated --kernel $APP_FOLDER
48 | scone fspf addf /fspf.pb $APP_FOLDER $APP_FOLDER
49 |
50 | scone fspf encrypt /fspf.pb > ./keytag
51 |
52 | MRENCLAVE="$(SCONE_HASH=1 $INTERPRETER)"
53 | FSPF_TAG=$(cat ./keytag | awk '{print $9}')
54 | FSPF_KEY=$(cat ./keytag | awk '{print $11}')
55 | FINGERPRINT="$FSPF_KEY|$FSPF_TAG|$MRENCLAVE|$ENTRYPOINT"
56 | echo $FINGERPRINT > ./fingerprint
57 |
58 | printf "\n\n"
59 | printf "Your application fingerprint (mrenclave) is ready:\n"
60 | printf "#####################################################################\n"
61 | printf "iexec.json:\n\n"
62 | printf "%s\n" "\"app\": { " " \"owner\" : ... " " \"name\": ... " " ..." " \"mrenclave\": \"$FINGERPRINT\"" "}"
63 | printf "#####################################################################\n"
64 | printf "Hint: Replace 'mrenclave' before doing 'iexec app deploy' step.\n"
65 | printf "\n\n"
66 |
--------------------------------------------------------------------------------
/offchain-computing/offchain-tee-kaiko-pricefeed/tee/run:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | cd $(dirname $0)
3 |
4 | IEXEC_OUT=/tmp/iexec_out
5 |
6 | rm -rf $IEXEC_OUT
7 | mkdir -p $IEXEC_OUT
8 |
9 | docker run --rm -e IEXEC_OUT=/iexec_out -e IEXEC_IN=/iexec_in -e IEXEC_DATASET_FILENAME=key.txt -v $IEXEC_OUT:/iexec_out -v $(pwd)/confidential-assets:/iexec_in --device /dev/isgx offchain-tee-kaiko-pricefeed $@
10 |
11 | echo
12 | find $IEXEC_OUT
13 |
--------------------------------------------------------------------------------