├── CHANGELOG.md ├── LICENSE ├── README.md ├── README_CN.md ├── config.ini ├── datatoken ├── .DS_Store ├── __init__.py ├── cli │ └── __init__.py ├── config.py ├── core │ ├── __init__.py │ ├── ddo.py │ ├── dt_helper.py │ ├── metadata.py │ ├── operator.py │ ├── service.py │ └── utils.py ├── csp │ ├── __init__.py │ └── agreement.py ├── model │ ├── __init__.py │ ├── asset_provider.py │ ├── constants.py │ ├── dt_factory.py │ ├── keeper.py │ ├── op_template.py │ ├── role_controller.py │ └── task_market.py ├── service │ ├── __init__.py │ ├── asset.py │ ├── job.py │ ├── system.py │ ├── tracer.py │ └── verifier.py ├── store │ ├── __init__.py │ ├── asset_resolve.py │ └── ipfs_provider.py └── web3 │ ├── __init__.py │ ├── account.py │ ├── constants.py │ ├── contract_base.py │ ├── contract_handler.py │ ├── event_filter.py │ ├── event_listener.py │ ├── transactions.py │ ├── utils.py │ ├── wallet.py │ ├── web3_overrides │ ├── __init__.py │ ├── contract.py │ ├── http_provider.py │ ├── request.py │ └── signature.py │ └── web3_provider.py ├── docs ├── .DS_Store ├── figures │ ├── 1.png │ ├── 2.png │ ├── test.png │ └── tree.png └── grants_cn.md ├── requirements.txt ├── setup.cfg ├── setup.py └── tests ├── template ├── add_op.py └── args.json ├── test.py └── test_web3.py /CHANGELOG.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MVP-Labs/data-token/fae7adc8945418590764f284ccd70f9dbb4a4382/CHANGELOG.md -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | GNU LESSER GENERAL PUBLIC LICENSE 2 | Version 2.1, February 1999 3 | 4 | Copyright (C) 1991, 1999 Free Software Foundation, Inc. 5 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA 6 | Everyone is permitted to copy and distribute verbatim copies 7 | of this license document, but changing it is not allowed. 8 | 9 | [This is the first released version of the Lesser GPL. It also counts 10 | as the successor of the GNU Library Public License, version 2, hence 11 | the version number 2.1.] 12 | 13 | Preamble 14 | 15 | The licenses for most software are designed to take away your 16 | freedom to share and change it. By contrast, the GNU General Public 17 | Licenses are intended to guarantee your freedom to share and change 18 | free software--to make sure the software is free for all its users. 19 | 20 | This license, the Lesser General Public License, applies to some 21 | specially designated software packages--typically libraries--of the 22 | Free Software Foundation and other authors who decide to use it. You 23 | can use it too, but we suggest you first think carefully about whether 24 | this license or the ordinary General Public License is the better 25 | strategy to use in any particular case, based on the explanations below. 26 | 27 | When we speak of free software, we are referring to freedom of use, 28 | not price. Our General Public Licenses are designed to make sure that 29 | you have the freedom to distribute copies of free software (and charge 30 | for this service if you wish); that you receive source code or can get 31 | it if you want it; that you can change the software and use pieces of 32 | it in new free programs; and that you are informed that you can do 33 | these things. 34 | 35 | To protect your rights, we need to make restrictions that forbid 36 | distributors to deny you these rights or to ask you to surrender these 37 | rights. These restrictions translate to certain responsibilities for 38 | you if you distribute copies of the library or if you modify it. 39 | 40 | For example, if you distribute copies of the library, whether gratis 41 | or for a fee, you must give the recipients all the rights that we gave 42 | you. You must make sure that they, too, receive or can get the source 43 | code. If you link other code with the library, you must provide 44 | complete object files to the recipients, so that they can relink them 45 | with the library after making changes to the library and recompiling 46 | it. And you must show them these terms so they know their rights. 47 | 48 | We protect your rights with a two-step method: (1) we copyright the 49 | library, and (2) we offer you this license, which gives you legal 50 | permission to copy, distribute and/or modify the library. 51 | 52 | To protect each distributor, we want to make it very clear that 53 | there is no warranty for the free library. Also, if the library is 54 | modified by someone else and passed on, the recipients should know 55 | that what they have is not the original version, so that the original 56 | author's reputation will not be affected by problems that might be 57 | introduced by others. 58 | 59 | Finally, software patents pose a constant threat to the existence of 60 | any free program. We wish to make sure that a company cannot 61 | effectively restrict the users of a free program by obtaining a 62 | restrictive license from a patent holder. Therefore, we insist that 63 | any patent license obtained for a version of the library must be 64 | consistent with the full freedom of use specified in this license. 65 | 66 | Most GNU software, including some libraries, is covered by the 67 | ordinary GNU General Public License. This license, the GNU Lesser 68 | General Public License, applies to certain designated libraries, and 69 | is quite different from the ordinary General Public License. We use 70 | this license for certain libraries in order to permit linking those 71 | libraries into non-free programs. 72 | 73 | When a program is linked with a library, whether statically or using 74 | a shared library, the combination of the two is legally speaking a 75 | combined work, a derivative of the original library. The ordinary 76 | General Public License therefore permits such linking only if the 77 | entire combination fits its criteria of freedom. The Lesser General 78 | Public License permits more lax criteria for linking other code with 79 | the library. 80 | 81 | We call this license the "Lesser" General Public License because it 82 | does Less to protect the user's freedom than the ordinary General 83 | Public License. It also provides other free software developers Less 84 | of an advantage over competing non-free programs. These disadvantages 85 | are the reason we use the ordinary General Public License for many 86 | libraries. However, the Lesser license provides advantages in certain 87 | special circumstances. 88 | 89 | For example, on rare occasions, there may be a special need to 90 | encourage the widest possible use of a certain library, so that it becomes 91 | a de-facto standard. To achieve this, non-free programs must be 92 | allowed to use the library. A more frequent case is that a free 93 | library does the same job as widely used non-free libraries. In this 94 | case, there is little to gain by limiting the free library to free 95 | software only, so we use the Lesser General Public License. 96 | 97 | In other cases, permission to use a particular library in non-free 98 | programs enables a greater number of people to use a large body of 99 | free software. For example, permission to use the GNU C Library in 100 | non-free programs enables many more people to use the whole GNU 101 | operating system, as well as its variant, the GNU/Linux operating 102 | system. 103 | 104 | Although the Lesser General Public License is Less protective of the 105 | users' freedom, it does ensure that the user of a program that is 106 | linked with the Library has the freedom and the wherewithal to run 107 | that program using a modified version of the Library. 108 | 109 | The precise terms and conditions for copying, distribution and 110 | modification follow. Pay close attention to the difference between a 111 | "work based on the library" and a "work that uses the library". The 112 | former contains code derived from the library, whereas the latter must 113 | be combined with the library in order to run. 114 | 115 | GNU LESSER GENERAL PUBLIC LICENSE 116 | TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 117 | 118 | 0. This License Agreement applies to any software library or other 119 | program which contains a notice placed by the copyright holder or 120 | other authorized party saying it may be distributed under the terms of 121 | this Lesser General Public License (also called "this License"). 122 | Each licensee is addressed as "you". 123 | 124 | A "library" means a collection of software functions and/or data 125 | prepared so as to be conveniently linked with application programs 126 | (which use some of those functions and data) to form executables. 127 | 128 | The "Library", below, refers to any such software library or work 129 | which has been distributed under these terms. A "work based on the 130 | Library" means either the Library or any derivative work under 131 | copyright law: that is to say, a work containing the Library or a 132 | portion of it, either verbatim or with modifications and/or translated 133 | straightforwardly into another language. (Hereinafter, translation is 134 | included without limitation in the term "modification".) 135 | 136 | "Source code" for a work means the preferred form of the work for 137 | making modifications to it. For a library, complete source code means 138 | all the source code for all modules it contains, plus any associated 139 | interface definition files, plus the scripts used to control compilation 140 | and installation of the library. 141 | 142 | Activities other than copying, distribution and modification are not 143 | covered by this License; they are outside its scope. The act of 144 | running a program using the Library is not restricted, and output from 145 | such a program is covered only if its contents constitute a work based 146 | on the Library (independent of the use of the Library in a tool for 147 | writing it). Whether that is true depends on what the Library does 148 | and what the program that uses the Library does. 149 | 150 | 1. You may copy and distribute verbatim copies of the Library's 151 | complete source code as you receive it, in any medium, provided that 152 | you conspicuously and appropriately publish on each copy an 153 | appropriate copyright notice and disclaimer of warranty; keep intact 154 | all the notices that refer to this License and to the absence of any 155 | warranty; and distribute a copy of this License along with the 156 | Library. 157 | 158 | You may charge a fee for the physical act of transferring a copy, 159 | and you may at your option offer warranty protection in exchange for a 160 | fee. 161 | 162 | 2. You may modify your copy or copies of the Library or any portion 163 | of it, thus forming a work based on the Library, and copy and 164 | distribute such modifications or work under the terms of Section 1 165 | above, provided that you also meet all of these conditions: 166 | 167 | a) The modified work must itself be a software library. 168 | 169 | b) You must cause the files modified to carry prominent notices 170 | stating that you changed the files and the date of any change. 171 | 172 | c) You must cause the whole of the work to be licensed at no 173 | charge to all third parties under the terms of this License. 174 | 175 | d) If a facility in the modified Library refers to a function or a 176 | table of data to be supplied by an application program that uses 177 | the facility, other than as an argument passed when the facility 178 | is invoked, then you must make a good faith effort to ensure that, 179 | in the event an application does not supply such function or 180 | table, the facility still operates, and performs whatever part of 181 | its purpose remains meaningful. 182 | 183 | (For example, a function in a library to compute square roots has 184 | a purpose that is entirely well-defined independent of the 185 | application. Therefore, Subsection 2d requires that any 186 | application-supplied function or table used by this function must 187 | be optional: if the application does not supply it, the square 188 | root function must still compute square roots.) 189 | 190 | These requirements apply to the modified work as a whole. If 191 | identifiable sections of that work are not derived from the Library, 192 | and can be reasonably considered independent and separate works in 193 | themselves, then this License, and its terms, do not apply to those 194 | sections when you distribute them as separate works. But when you 195 | distribute the same sections as part of a whole which is a work based 196 | on the Library, the distribution of the whole must be on the terms of 197 | this License, whose permissions for other licensees extend to the 198 | entire whole, and thus to each and every part regardless of who wrote 199 | it. 200 | 201 | Thus, it is not the intent of this section to claim rights or contest 202 | your rights to work written entirely by you; rather, the intent is to 203 | exercise the right to control the distribution of derivative or 204 | collective works based on the Library. 205 | 206 | In addition, mere aggregation of another work not based on the Library 207 | with the Library (or with a work based on the Library) on a volume of 208 | a storage or distribution medium does not bring the other work under 209 | the scope of this License. 210 | 211 | 3. You may opt to apply the terms of the ordinary GNU General Public 212 | License instead of this License to a given copy of the Library. To do 213 | this, you must alter all the notices that refer to this License, so 214 | that they refer to the ordinary GNU General Public License, version 2, 215 | instead of to this License. (If a newer version than version 2 of the 216 | ordinary GNU General Public License has appeared, then you can specify 217 | that version instead if you wish.) Do not make any other change in 218 | these notices. 219 | 220 | Once this change is made in a given copy, it is irreversible for 221 | that copy, so the ordinary GNU General Public License applies to all 222 | subsequent copies and derivative works made from that copy. 223 | 224 | This option is useful when you wish to copy part of the code of 225 | the Library into a program that is not a library. 226 | 227 | 4. You may copy and distribute the Library (or a portion or 228 | derivative of it, under Section 2) in object code or executable form 229 | under the terms of Sections 1 and 2 above provided that you accompany 230 | it with the complete corresponding machine-readable source code, which 231 | must be distributed under the terms of Sections 1 and 2 above on a 232 | medium customarily used for software interchange. 233 | 234 | If distribution of object code is made by offering access to copy 235 | from a designated place, then offering equivalent access to copy the 236 | source code from the same place satisfies the requirement to 237 | distribute the source code, even though third parties are not 238 | compelled to copy the source along with the object code. 239 | 240 | 5. A program that contains no derivative of any portion of the 241 | Library, but is designed to work with the Library by being compiled or 242 | linked with it, is called a "work that uses the Library". Such a 243 | work, in isolation, is not a derivative work of the Library, and 244 | therefore falls outside the scope of this License. 245 | 246 | However, linking a "work that uses the Library" with the Library 247 | creates an executable that is a derivative of the Library (because it 248 | contains portions of the Library), rather than a "work that uses the 249 | library". The executable is therefore covered by this License. 250 | Section 6 states terms for distribution of such executables. 251 | 252 | When a "work that uses the Library" uses material from a header file 253 | that is part of the Library, the object code for the work may be a 254 | derivative work of the Library even though the source code is not. 255 | Whether this is true is especially significant if the work can be 256 | linked without the Library, or if the work is itself a library. The 257 | threshold for this to be true is not precisely defined by law. 258 | 259 | If such an object file uses only numerical parameters, data 260 | structure layouts and accessors, and small macros and small inline 261 | functions (ten lines or less in length), then the use of the object 262 | file is unrestricted, regardless of whether it is legally a derivative 263 | work. (Executables containing this object code plus portions of the 264 | Library will still fall under Section 6.) 265 | 266 | Otherwise, if the work is a derivative of the Library, you may 267 | distribute the object code for the work under the terms of Section 6. 268 | Any executables containing that work also fall under Section 6, 269 | whether or not they are linked directly with the Library itself. 270 | 271 | 6. As an exception to the Sections above, you may also combine or 272 | link a "work that uses the Library" with the Library to produce a 273 | work containing portions of the Library, and distribute that work 274 | under terms of your choice, provided that the terms permit 275 | modification of the work for the customer's own use and reverse 276 | engineering for debugging such modifications. 277 | 278 | You must give prominent notice with each copy of the work that the 279 | Library is used in it and that the Library and its use are covered by 280 | this License. You must supply a copy of this License. If the work 281 | during execution displays copyright notices, you must include the 282 | copyright notice for the Library among them, as well as a reference 283 | directing the user to the copy of this License. Also, you must do one 284 | of these things: 285 | 286 | a) Accompany the work with the complete corresponding 287 | machine-readable source code for the Library including whatever 288 | changes were used in the work (which must be distributed under 289 | Sections 1 and 2 above); and, if the work is an executable linked 290 | with the Library, with the complete machine-readable "work that 291 | uses the Library", as object code and/or source code, so that the 292 | user can modify the Library and then relink to produce a modified 293 | executable containing the modified Library. (It is understood 294 | that the user who changes the contents of definitions files in the 295 | Library will not necessarily be able to recompile the application 296 | to use the modified definitions.) 297 | 298 | b) Use a suitable shared library mechanism for linking with the 299 | Library. A suitable mechanism is one that (1) uses at run time a 300 | copy of the library already present on the user's computer system, 301 | rather than copying library functions into the executable, and (2) 302 | will operate properly with a modified version of the library, if 303 | the user installs one, as long as the modified version is 304 | interface-compatible with the version that the work was made with. 305 | 306 | c) Accompany the work with a written offer, valid for at 307 | least three years, to give the same user the materials 308 | specified in Subsection 6a, above, for a charge no more 309 | than the cost of performing this distribution. 310 | 311 | d) If distribution of the work is made by offering access to copy 312 | from a designated place, offer equivalent access to copy the above 313 | specified materials from the same place. 314 | 315 | e) Verify that the user has already received a copy of these 316 | materials or that you have already sent this user a copy. 317 | 318 | For an executable, the required form of the "work that uses the 319 | Library" must include any data and utility programs needed for 320 | reproducing the executable from it. However, as a special exception, 321 | the materials to be distributed need not include anything that is 322 | normally distributed (in either source or binary form) with the major 323 | components (compiler, kernel, and so on) of the operating system on 324 | which the executable runs, unless that component itself accompanies 325 | the executable. 326 | 327 | It may happen that this requirement contradicts the license 328 | restrictions of other proprietary libraries that do not normally 329 | accompany the operating system. Such a contradiction means you cannot 330 | use both them and the Library together in an executable that you 331 | distribute. 332 | 333 | 7. You may place library facilities that are a work based on the 334 | Library side-by-side in a single library together with other library 335 | facilities not covered by this License, and distribute such a combined 336 | library, provided that the separate distribution of the work based on 337 | the Library and of the other library facilities is otherwise 338 | permitted, and provided that you do these two things: 339 | 340 | a) Accompany the combined library with a copy of the same work 341 | based on the Library, uncombined with any other library 342 | facilities. This must be distributed under the terms of the 343 | Sections above. 344 | 345 | b) Give prominent notice with the combined library of the fact 346 | that part of it is a work based on the Library, and explaining 347 | where to find the accompanying uncombined form of the same work. 348 | 349 | 8. You may not copy, modify, sublicense, link with, or distribute 350 | the Library except as expressly provided under this License. Any 351 | attempt otherwise to copy, modify, sublicense, link with, or 352 | distribute the Library is void, and will automatically terminate your 353 | rights under this License. However, parties who have received copies, 354 | or rights, from you under this License will not have their licenses 355 | terminated so long as such parties remain in full compliance. 356 | 357 | 9. You are not required to accept this License, since you have not 358 | signed it. However, nothing else grants you permission to modify or 359 | distribute the Library or its derivative works. These actions are 360 | prohibited by law if you do not accept this License. Therefore, by 361 | modifying or distributing the Library (or any work based on the 362 | Library), you indicate your acceptance of this License to do so, and 363 | all its terms and conditions for copying, distributing or modifying 364 | the Library or works based on it. 365 | 366 | 10. Each time you redistribute the Library (or any work based on the 367 | Library), the recipient automatically receives a license from the 368 | original licensor to copy, distribute, link with or modify the Library 369 | subject to these terms and conditions. You may not impose any further 370 | restrictions on the recipients' exercise of the rights granted herein. 371 | You are not responsible for enforcing compliance by third parties with 372 | this License. 373 | 374 | 11. If, as a consequence of a court judgment or allegation of patent 375 | infringement or for any other reason (not limited to patent issues), 376 | conditions are imposed on you (whether by court order, agreement or 377 | otherwise) that contradict the conditions of this License, they do not 378 | excuse you from the conditions of this License. If you cannot 379 | distribute so as to satisfy simultaneously your obligations under this 380 | License and any other pertinent obligations, then as a consequence you 381 | may not distribute the Library at all. For example, if a patent 382 | license would not permit royalty-free redistribution of the Library by 383 | all those who receive copies directly or indirectly through you, then 384 | the only way you could satisfy both it and this License would be to 385 | refrain entirely from distribution of the Library. 386 | 387 | If any portion of this section is held invalid or unenforceable under any 388 | particular circumstance, the balance of the section is intended to apply, 389 | and the section as a whole is intended to apply in other circumstances. 390 | 391 | It is not the purpose of this section to induce you to infringe any 392 | patents or other property right claims or to contest validity of any 393 | such claims; this section has the sole purpose of protecting the 394 | integrity of the free software distribution system which is 395 | implemented by public license practices. Many people have made 396 | generous contributions to the wide range of software distributed 397 | through that system in reliance on consistent application of that 398 | system; it is up to the author/donor to decide if he or she is willing 399 | to distribute software through any other system and a licensee cannot 400 | impose that choice. 401 | 402 | This section is intended to make thoroughly clear what is believed to 403 | be a consequence of the rest of this License. 404 | 405 | 12. If the distribution and/or use of the Library is restricted in 406 | certain countries either by patents or by copyrighted interfaces, the 407 | original copyright holder who places the Library under this License may add 408 | an explicit geographical distribution limitation excluding those countries, 409 | so that distribution is permitted only in or among countries not thus 410 | excluded. In such case, this License incorporates the limitation as if 411 | written in the body of this License. 412 | 413 | 13. The Free Software Foundation may publish revised and/or new 414 | versions of the Lesser General Public License from time to time. 415 | Such new versions will be similar in spirit to the present version, 416 | but may differ in detail to address new problems or concerns. 417 | 418 | Each version is given a distinguishing version number. If the Library 419 | specifies a version number of this License which applies to it and 420 | "any later version", you have the option of following the terms and 421 | conditions either of that version or of any later version published by 422 | the Free Software Foundation. If the Library does not specify a 423 | license version number, you may choose any version ever published by 424 | the Free Software Foundation. 425 | 426 | 14. If you wish to incorporate parts of the Library into other free 427 | programs whose distribution conditions are incompatible with these, 428 | write to the author to ask for permission. For software which is 429 | copyrighted by the Free Software Foundation, write to the Free 430 | Software Foundation; we sometimes make exceptions for this. Our 431 | decision will be guided by the two goals of preserving the free status 432 | of all derivatives of our free software and of promoting the sharing 433 | and reuse of software generally. 434 | 435 | NO WARRANTY 436 | 437 | 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO 438 | WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. 439 | EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR 440 | OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY 441 | KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE 442 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 443 | PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE 444 | LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME 445 | THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 446 | 447 | 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN 448 | WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY 449 | AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU 450 | FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR 451 | CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE 452 | LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING 453 | RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A 454 | FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF 455 | SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH 456 | DAMAGES. 457 | 458 | END OF TERMS AND CONDITIONS 459 | 460 | How to Apply These Terms to Your New Libraries 461 | 462 | If you develop a new library, and you want it to be of the greatest 463 | possible use to the public, we recommend making it free software that 464 | everyone can redistribute and change. You can do so by permitting 465 | redistribution under these terms (or, alternatively, under the terms of the 466 | ordinary General Public License). 467 | 468 | To apply these terms, attach the following notices to the library. It is 469 | safest to attach them to the start of each source file to most effectively 470 | convey the exclusion of warranty; and each file should have at least the 471 | "copyright" line and a pointer to where the full notice is found. 472 | 473 | 474 | Copyright (C) 475 | 476 | This library is free software; you can redistribute it and/or 477 | modify it under the terms of the GNU Lesser General Public 478 | License as published by the Free Software Foundation; either 479 | version 2.1 of the License, or (at your option) any later version. 480 | 481 | This library is distributed in the hope that it will be useful, 482 | but WITHOUT ANY WARRANTY; without even the implied warranty of 483 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 484 | Lesser General Public License for more details. 485 | 486 | You should have received a copy of the GNU Lesser General Public 487 | License along with this library; if not, write to the Free Software 488 | Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 489 | USA 490 | 491 | Also add information on how to contact you by electronic and paper mail. 492 | 493 | You should also get your employer (if you work as a programmer) or your 494 | school, if any, to sign a "copyright disclaimer" for the library, if 495 | necessary. Here is a sample; alter the names: 496 | 497 | Yoyodyne, Inc., hereby disclaims all copyright interest in the 498 | library `Frob' (a library for tweaking knobs) written by James Random 499 | Hacker. 500 | 501 | , 1 April 1990 502 | Ty Coon, President of Vice 503 | 504 | That's all there is to it! 505 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 |

DataToken

2 | 3 | ## Overview 4 |
5 | 6 |
7 | 8 | [中文版](./README_CN.md) 9 | 10 | This project implements a new decentralized data management and off-chain trusted computing middleware, DataToken SDK. It is developed by Ownership Labs and supported by the [LatticeX](https://github.com/LatticeX-Foundation) Foundation. Design philosophies can be found in the [grants](./docs/grants_cn.md) and [paper](./docs/white_paper.md). The SDK leverages the trusted features of blockchains to return data ownership to its owners while maintaining the computability of data. 11 | 12 | ### Motivation 13 | 14 | Our vision is to make the data flows more transparent. To achieve it, we design a new data service specification for traceable computation and hierarchical aggregation. Data owners can declare a permitted list of trusted operators and related constraints in the data service terms. Data aggregators can define trusted, distributed computing workflows on multiple data assets, formalizing data in different domains into an aggregated data union. Data buyers can directly purchase aggregated datasets and confirm the origins of each data inside it. 15 | 16 | Specifically, only when the pre-declared constraints are satisfied, assets will be authorized for aggregated computation. This process can be executed automatically without manually audits, ultimately enabling data assets to be defined once and sold multiple times. This design is consistent with the structure of real-world data flows, and the whole lifecycle of data sharing and utilization becomes more transparent, compliant and traceable. 17 | 18 | ### System Design 19 | 20 | | Module | Description | 21 | | -------------------------- | ----------------------------------------------------------------------------------------------- | 22 | | [dt-contracts](https://github.com/ownership-labs/dt-contracts) | smart contracts for data token | 23 | | [DataToken](https://github.com/ownership-labs/DataToken) | access control for decentralized data and runtime for computation monetization | 24 | | [Compute-to-Data](https://github.com/ownership-labs/Compute-to-Data) | smart data grid and on-premise computing system | 25 | | [AuthComputa](https://github.com/ownership-labs/AuthComputa) | data science framework for constrained, authorized, privacy-preserving ML | 26 | 27 | 28 | ## SDK Guides 29 | 30 | ### highlights 31 | 32 | The repo provides several key services for data collaboration, including System module, Asset module, Job module, Tracer module and Verifier module. Different modules are designed for different participators: 33 | 34 | - System administrators can manage asset providers and trusted operators that are registered on the blockchain by using the System module; 35 | - Asset providers and aggregators can use the Asset module to publish datasets/computation/algorithms, and validate service agreements and then authorize the aggregation of data unions; 36 | - Demanders and solvers can use the Job module to create tasks and submit solutions (e.g., off-chain data collaboration). Asset providers can also quickly verify remote execution; 37 | - Regulatory parties can use the Tracer module to check the whole lifecycle of cross-domain data sharing and utilization, ensuring the user privacy and legality of data monetization. Also, the data traders can price data as assets based on their origins and historical market information. 38 | 39 | The definition of data unions and trusted workflow service specification can be found in the [AuthComputa](https://github.com/ownership-labs/AuthComputa) repository. 40 | 41 | ### play with it 42 | 43 | You first need to deploy dt-contracts, refer to [Deployment Tutorial](https://github.com/ownership-labs/dt-contracts). Then set up the config.ini in the DataToken directory (e.g., artifacts_path and address_file), and modify the accounts in the test files, e.g., using the four private keys provided by ganache-cli. 44 | 45 | Run the following commands: 46 | ``` 47 | $ git clone https://github.com/ownership-labs/DataToken 48 | $ git clone https://github.com/ownership-labs/dt-contracts 49 | $ cd DataToken 50 | $ export PYTHONPATH=$PYTHONPATH:../DataToken 51 | $ pip install -r requirements.txt --no-deps 52 | $ python tests/test.py 53 | ``` 54 | 55 | When you run it multiple times or modify the constraint parameters, the command line will print out the whole lifecycle of data sharing and utilization. 56 |
57 | 58 |
59 | 60 | ### examples and tutorials 61 | 62 | We provide several use cases, including cross-site data collaboration (between enterprises) and edge federated learning (between users), see the [examples](. /examples). We also design a smart data grid for serving private machine learning of sensitive data assets, see the [Compute-to-Data](https://github.com/ownership-labs/Compute-to-Data). With DataToken combined, data owners can quickly define allowed AI services and the data grid will automatically verify the external data usage requests. Third-party scientists can start remote executions and get results on data they cannot see. In other words, data owners run the codes on-premise and thus monetize the computation rights of private data. 63 | -------------------------------------------------------------------------------- /README_CN.md: -------------------------------------------------------------------------------- 1 |

DataToken

2 | 3 | ## 概览 4 |
5 | 6 |
7 | 8 | [英文版](./README.md) 9 | 10 | 本项目为Ownership Labs开发的跨域分布式数据权限管理和链下可信计算中间件(DataToken SDK),由[LatticeX](https://github.com/LatticeX-Foundation)基金会提供支持,细节可查阅[Grants](./docs/grants_cn.md)和[论文](./docs/white_paper.md)。该SDK利用了区块链的多方对等共识和信息不可篡改等可信特征,实现了数据归属确权、数据服务授权和数据计算追溯。 11 | 12 | ### 核心理念 13 | 14 | 项目旨在将数据流动的链条透明化,由此设计了可追溯计算、可层次化聚合的数据服务规范。资产方可在数据服务条款中声明可信操作的许可列表;聚合方可在多个不同域的数据资产和算力资产上定义可信的分布式计算工作流,形成数据联合体;数据买方可以直接购买聚合数据,并确认其中各数据的来源。 15 | 16 | 当满足资产预先声明的操作规范时,聚合计算才会被授权,且该过程可自动执行,无需人为地校验外部操作,最终实现(数据)资产一次发布、多次出售。这样的设计符合现实世界中的数据流动结构,数据共享计算的全生命周期将变得更透明合规且具有可追溯性。 17 | 18 | ### 系统组成 19 | 20 | | 模块 | 描述 | 21 | | -------------------------- | ----------------------------------------------------------------------------------------------- | 22 | | [dt-contracts](https://github.com/ownership-labs/dt-contracts) | 数据通证颁发、可信算子发布、任务市场等合约 | 23 | | [DataToken](https://github.com/ownership-labs/DataToken) | 分布式数据访问控制、数据协作运行时 | 24 | | [Compute-to-Data](https://github.com/ownership-labs/Compute-to-Data) | 数据资产服务网格、本地计算系统 | 25 | | [AuthComputa](https://github.com/ownership-labs/AuthComputa) | 面向数据科学家的跨域数据计算框架 | 26 | 27 | ## SDK使用指南 28 | 29 | ### 功能特性 30 | 31 | 该仓库下封装了数据协作过程中的几个关键服务模块,包括系统管理模块、资产管理模块、任务工作模块、跨域追溯模块和服务验证模块。不同的业务角色可以使用不同的模块: 32 | 33 | - 系统管理员可通过系统模块来管理链上的资产提供方和可信算子模版; 34 | - 资产提供方和聚合方可通过资产模块来实现数据资产和数据联合体的发布、服务验证和授权聚合; 35 | - 需求方和求解方可通过任务工作模块来完成任务发布和计算求解,资产方也可快速验证远程计算; 36 | - 监管方可通过跨域追溯模块来确认资产的合理利用,交易方也可根据资产的来源和历史生命周期来定价。 37 | 38 | 关于数据联合体的定义和分布式可信计算的服务规范可参考[AuthComputa](https://github.com/ownership-labs/AuthComputa)仓库。 39 | 40 | ### 运行流程 41 | 42 | 在运行测试前,首先需要部署dt-contracts,参考[部署教程](https://github.com/ownership-labs/dt-contracts)。同时配置DataToken目录下的config.ini,包括artifacts_path和address_file。修改tests目录中测试文件的账户,例如ganache-cli提供的前四个私钥。 43 | 44 | 使用如下命令测试: 45 | ``` 46 | $ git clone https://github.com/ownership-labs/DataToken 47 | $ git clone https://github.com/ownership-labs/dt-contracts 48 | $ cd DataToken 49 | $ export PYTHONPATH=$PYTHONPATH:../DataToken 50 | $ pip install -r requirements.txt --no-deps 51 | $ python tests/test.py 52 | ``` 53 | 54 | 当你运行地足够多次或修改其中的约束参数,命令行将打印出数据资产共享利用的全流程: 55 |
56 | 57 |
58 | 59 | ### MVP用例 60 | 61 | 我们还提供了DataToken在实际场景中的用例,包括企业间数据协作和用户级边缘计算(查看[examples](./examples))。同时,我们针对隐私AI问题,设计了可追溯计算的私域数据服务网格,帮助资产方快速定义本地计算服务并自动校验外部计算请求,第三方科学家可在数据可用不可见的情况下完成远程的隐私AI计算,查看[Compute-to-Data](https://github.com/ownership-labs/Compute-to-Data)。 -------------------------------------------------------------------------------- /config.ini: -------------------------------------------------------------------------------- 1 | [keeper] 2 | artifacts_path = ../dt-contracts/artifacts 3 | address_file = ../dt-contracts/artifacts/address.json 4 | network_url = http://localhost:8545 5 | network_name = ganache 6 | ipfs_endpoint = /dns/localhost/tcp/5001/http -------------------------------------------------------------------------------- /datatoken/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MVP-Labs/data-token/fae7adc8945418590764f284ccd70f9dbb4a4382/datatoken/.DS_Store -------------------------------------------------------------------------------- /datatoken/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MVP-Labs/data-token/fae7adc8945418590764f284ccd70f9dbb4a4382/datatoken/__init__.py -------------------------------------------------------------------------------- /datatoken/cli/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MVP-Labs/data-token/fae7adc8945418590764f284ccd70f9dbb4a4382/datatoken/cli/__init__.py -------------------------------------------------------------------------------- /datatoken/config.py: -------------------------------------------------------------------------------- 1 | """Config module.""" 2 | # Copyright 2021 The dt-sdk Authors 3 | # SPDX-License-Identifier: LGPL-2.1-only 4 | 5 | import os 6 | import logging 7 | from os import getenv 8 | from pathlib import Path 9 | from configparser import ConfigParser 10 | from datatoken.web3.web3_provider import Web3Provider 11 | 12 | NAME_ARTIFACTS_PATH = 'artifacts_path' 13 | NAME_ADDRESS_FILE = 'address_file' 14 | NAME_NETWORK_URL = 'network_url' 15 | NAME_NETWORK = 'network_name' 16 | NAME_IPFS_ENDPOINT = 'ipfs_endpoint' 17 | 18 | class Config(ConfigParser): 19 | def __init__(self, filename=None, options_dict=None): 20 | """ 21 | Initialize Config class. 22 | 23 | :param filename: Path of the config file, str. 24 | :param options_dict: Python dict with the config, dict. 25 | """ 26 | ConfigParser.__init__(self) 27 | 28 | self._keeper_section = 'keeper' 29 | self._logger = logging.getLogger('config') 30 | 31 | if not filename: 32 | filename = getenv('CONFIG_FILE', './config.ini') 33 | 34 | if not os.path.exists(filename) and not options_dict: 35 | raise FileNotFoundError(f'please provider the config first') 36 | 37 | if os.path.exists(filename): 38 | self._logger.debug(f'Config: loading config file {filename}') 39 | self.read(filename) 40 | 41 | if options_dict: 42 | self._logger.debug(f'Config: loading from dict {options_dict}') 43 | self.read_dict(options_dict) 44 | 45 | @property 46 | def network_url(self): 47 | """Get the url of the network.""" 48 | return self.get(self._keeper_section, NAME_NETWORK_URL) 49 | 50 | @property 51 | def network_name(self): 52 | """get the name of the network.""" 53 | return self.get(self._keeper_section, NAME_NETWORK) 54 | 55 | @property 56 | def ipfs_endpoint(self): 57 | """get the name of the network.""" 58 | return self.get(self._keeper_section, NAME_IPFS_ENDPOINT) 59 | 60 | @property 61 | def artifacts_path(self): 62 | """get the contracts artifact file path.""" 63 | path = None 64 | _path_string = self.get(self._keeper_section, NAME_ARTIFACTS_PATH) 65 | if _path_string: 66 | path = Path(_path_string).expanduser().resolve() 67 | 68 | if path and os.path.exists(path): 69 | return path 70 | 71 | if not os.path.exists(path): 72 | path = Path('~/.dt/artifacts').expanduser().resolve() 73 | 74 | return path 75 | 76 | @property 77 | def address_file(self): 78 | """get the contracts address file path.""" 79 | file_path = self.get(self._keeper_section, NAME_ADDRESS_FILE) 80 | if file_path: 81 | file_path = Path(file_path).expanduser().resolve() 82 | 83 | if not file_path or not os.path.exists(file_path): 84 | file_path = os.path.join(self.artifacts_path, 'address.json') 85 | 86 | return file_path 87 | 88 | @property 89 | def keeper_options(self): 90 | """Prepare the option dict for the dt-web3 keeper.""" 91 | return {self._keeper_section: {NAME_NETWORK_URL: self.network_url, 92 | NAME_NETWORK: self.network_name, 93 | NAME_ARTIFACTS_PATH: self.artifacts_path, 94 | NAME_ADDRESS_FILE: self.address_file}} 95 | 96 | @property 97 | def web3(self): 98 | """Get the web3 provider of the network.""" 99 | return Web3Provider.get_web3(network_url=self.network_url) 100 | -------------------------------------------------------------------------------- /datatoken/core/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MVP-Labs/data-token/fae7adc8945418590764f284ccd70f9dbb4a4382/datatoken/core/__init__.py -------------------------------------------------------------------------------- /datatoken/core/ddo.py: -------------------------------------------------------------------------------- 1 | """DDO Lib.""" 2 | # Modified from common-utils-py library. 3 | # Copyright 2018 Ocean Protocol Foundation 4 | 5 | import copy 6 | import json 7 | 8 | from datatoken.core.dt_helper import PREFIX 9 | from datatoken.core.metadata import Metadata 10 | from datatoken.core.service import Service 11 | from datatoken.core.utils import get_timestamp, calc_checksum 12 | 13 | 14 | class DDO: 15 | """DDO class to create, import and export DDO objects.""" 16 | 17 | def __init__(self, json_text=None, json_filename=None, dictionary=None): 18 | self._dt = None 19 | self._creator = None 20 | self._metadata = {} 21 | self._services = [] 22 | self._proof = None 23 | 24 | self._asset_type = None 25 | self._child_dts = None 26 | 27 | if not json_text and json_filename: 28 | with open(json_filename, 'r') as file_handle: 29 | json_text = file_handle.read() 30 | 31 | if json_text: 32 | self.from_dict(json.loads(json_text)) 33 | elif dictionary: 34 | self.from_dict(dictionary) 35 | 36 | @property 37 | def dt(self): 38 | """ Get the DT identifier.""" 39 | return self._dt 40 | 41 | @property 42 | def creator(self): 43 | """ Get the creator address.""" 44 | return self._creator 45 | 46 | @property 47 | def metadata(self): 48 | """Get the metadata service.""" 49 | return self._metadata 50 | 51 | @property 52 | def services(self): 53 | """Get the list of services.""" 54 | return self._services 55 | 56 | @property 57 | def asset_type(self): 58 | """Get the asset type.""" 59 | return self._asset_type 60 | 61 | @property 62 | def child_dts(self): 63 | """Get the child dts.""" 64 | return self._child_dts 65 | 66 | @property 67 | def is_cdt(self): 68 | """Check cdt or not.""" 69 | return bool(self._child_dts) 70 | 71 | @property 72 | def proof(self): 73 | """Get the static proof, or None.""" 74 | return self._proof 75 | 76 | def get_service_by_index(self, index): 77 | """ 78 | Get service for a given index. 79 | 80 | :param index: Service id, str 81 | :return: Service 82 | """ 83 | for service in self._services: 84 | if service.index == index: 85 | return service 86 | 87 | return None 88 | 89 | def assign_dt(self, dt: str): 90 | """ 91 | Assign dt to the DDO. 92 | """ 93 | assert dt.startswith(PREFIX), \ 94 | f'"dt" seems invalid, must start with {PREFIX} prefix.' 95 | self._dt = dt 96 | return dt 97 | 98 | def add_creator(self, creator_address: str): 99 | """ 100 | Add creator. 101 | 102 | :param creator_address: str 103 | """ 104 | self._creator = creator_address 105 | 106 | def add_metadata(self, value_dict, child_dts=None): 107 | """ 108 | Add metadata to the DDO. 109 | 110 | :param values: dict 111 | """ 112 | values = copy.deepcopy(value_dict) if value_dict else {} 113 | assert Metadata.validate(values), \ 114 | f'values {values} seems invalid.' 115 | 116 | asset_type = values['main']['type'] 117 | if asset_type == 'Algorithm' and not child_dts: 118 | raise AssertionError('Algorithm must be composable DT.') 119 | 120 | self._metadata = values 121 | self._asset_type = asset_type 122 | self._child_dts = child_dts 123 | 124 | def add_service(self, value_dict): 125 | """ 126 | Add a service to the list of services on the DDO. 127 | 128 | :param value_dict: Python dict with setvice index, endpoint, descriptor, attributes. 129 | """ 130 | assert self._asset_type, \ 131 | f'asset type seems unknown, please add metadata first.' 132 | if self._asset_type == 'Algorithm': 133 | if len(self._services): 134 | raise AssertionError( 135 | 'Algorithm can only contain one service for termination.') 136 | 137 | values = copy.deepcopy(value_dict) if value_dict else {} 138 | 139 | _index, _endpoint, _descriptor, _attributes = Service.parse_dict( 140 | values) 141 | if self.get_service_by_index(_index) != None: 142 | raise AssertionError(f'service index already exists.') 143 | 144 | service = Service(_index, _endpoint, _descriptor, _attributes) 145 | if not service.validate(self._asset_type, self._child_dts): 146 | raise AssertionError(f'values {values} seems invalid.') 147 | 148 | self._services.append(service) 149 | 150 | return 151 | 152 | def create_proof(self): 153 | """create the proof for this template.""" 154 | data = { 155 | 'dt': self._dt, 156 | 'creator': self._creator, 157 | 'metadata': self._metadata, 158 | 'child_dts': self._child_dts 159 | } 160 | 161 | if self._services: 162 | values = [] 163 | for service in self._services: 164 | values.append(service.to_dict()) 165 | data['services'] = values 166 | 167 | checksum = calc_checksum(data) 168 | 169 | self._proof = { 170 | 'created': get_timestamp(), 171 | 'checksum': checksum 172 | } 173 | 174 | return checksum 175 | 176 | def to_dict(self): 177 | """ 178 | Return the DDO as a JSON dict. 179 | 180 | :return: dict 181 | """ 182 | data = { 183 | 'dt': self._dt, 184 | 'creator': self._creator, 185 | 'metadata': self._metadata, 186 | 'child_dts': self._child_dts, 187 | 'proof': self._proof 188 | } 189 | if self._services: 190 | values = [] 191 | for service in self._services: 192 | values.append(service.to_dict()) 193 | data['services'] = values 194 | 195 | return data 196 | 197 | def from_dict(self, value_dict): 198 | """Import a JSON dict into this DDO.""" 199 | values = copy.deepcopy(value_dict) 200 | 201 | dt = values.pop('dt') 202 | creator = values.pop('creator') 203 | metadata = values.pop('metadata') 204 | child_dts = values.pop('child_dts') 205 | proof = values.pop('proof') 206 | 207 | self.assign_dt(dt) 208 | self.add_creator(creator) 209 | self.add_metadata(metadata, child_dts) 210 | 211 | self._services = [] 212 | for value in values.pop('services'): 213 | self.add_service(value) 214 | 215 | checksum = self.create_proof() 216 | 217 | if not isinstance(proof, dict) or proof.get( 218 | 'checksum') == None or proof['checksum'] != checksum: 219 | raise AssertionError(f'wrong template checksum') 220 | 221 | self._proof = proof 222 | -------------------------------------------------------------------------------- /datatoken/core/dt_helper.py: -------------------------------------------------------------------------------- 1 | """DT helper Lib""" 2 | # Modified from common-utils-py library. 3 | # Copyright 2018 Ocean Protocol Foundation 4 | 5 | import re 6 | import uuid 7 | from web3 import Web3 8 | from eth_utils import remove_0x_prefix 9 | from datatoken.core.utils import convert_to_string 10 | 11 | PREFIX = 'dt:ownership:' 12 | 13 | 14 | class DTHelper: 15 | """Class representing an asset dt.""" 16 | 17 | @staticmethod 18 | def generate_new_dt(): 19 | """ 20 | Create a dt. 21 | 22 | Format of the dt: 23 | dt:ownership:cb36cf78d87f4ce4a784f17c2a4a694f19f3fbf05b814ac6b0b7197163888865 24 | 25 | :param seed: The list of checksums that is allocated in the proof, dict 26 | :return: Asset dt, str. 27 | """ 28 | return PREFIX + uuid.uuid4().hex + uuid.uuid4().hex 29 | 30 | @staticmethod 31 | def id_to_dt(dt_id): 32 | """Return an Ownership dt from given a hex id.""" 33 | if isinstance(dt_id, bytes): 34 | dt_id = Web3.toHex(dt_id) 35 | 36 | # remove leading '0x' of a hex string 37 | if isinstance(dt_id, str): 38 | dt_id = remove_0x_prefix(dt_id) 39 | else: 40 | raise TypeError("dt id must be a hex string or bytes") 41 | 42 | # test for zero address 43 | if Web3.toBytes(hexstr=dt_id) == b'': 44 | dt_id = '0' 45 | return f'{PREFIX}{dt_id}' 46 | 47 | @staticmethod 48 | def dt_to_id(dt): 49 | """Return an id extracted from a dt string.""" 50 | result = DTHelper.dt_parse(dt) 51 | if result and result['id'] is not None: 52 | return result['id'] 53 | return None 54 | 55 | @staticmethod 56 | def dt_to_id_bytes(dt): 57 | """ 58 | Return an Ownership dt to it's correspondng hex id in bytes. 59 | 60 | So dt:ownership:, will return in byte format 61 | """ 62 | if isinstance(dt, str): 63 | if re.match('^[0x]?[0-9A-Za-z]+$', dt): 64 | raise ValueError(f'{dt} must be a dt not a hex string') 65 | else: 66 | dt_result = DTHelper.dt_parse(dt) 67 | if not dt_result: 68 | raise ValueError(f'{dt} is not a valid dt') 69 | if not dt_result['id']: 70 | raise ValueError(f'{dt} is not a valid Ownership dt') 71 | id_bytes = Web3.toBytes(hexstr=dt_result['id']) 72 | elif isinstance(dt, bytes): 73 | id_bytes = dt 74 | else: 75 | raise TypeError( 76 | f'Unknown dt format, expected str or bytes, got {dt} of type {type(dt)}' 77 | ) 78 | 79 | return id_bytes 80 | 81 | @staticmethod 82 | def id_bytes_to_dt(id_bytes): 83 | id = convert_to_string(id_bytes) 84 | return DTHelper.id_to_dt(id) 85 | 86 | @staticmethod 87 | def dt_parse(dt): 88 | """ 89 | Parse a dt into it's parts. 90 | 91 | :param dt: Asset dt, str. 92 | :return: Python dictionary with the method and the id. 93 | """ 94 | if not isinstance(dt, str): 95 | raise TypeError( 96 | f'Expecting dt of string type, got {dt} of {type(dt)} type') 97 | 98 | match = re.match('^dt:([a-z0-9]+):([a-zA-Z0-9-.]+)(.*)', dt) 99 | if not match: 100 | raise ValueError(f'dt {dt} does not seem to be valid.') 101 | 102 | result = { 103 | 'method': match.group(1), 104 | 'id': match.group(2), 105 | } 106 | 107 | return result 108 | -------------------------------------------------------------------------------- /datatoken/core/metadata.py: -------------------------------------------------------------------------------- 1 | """Metadata Lib.""" 2 | # Modified from common-utils-py library. 3 | # Copyright 2018 Ocean Protocol Foundation 4 | 5 | import logging 6 | 7 | logger = logging.getLogger(__name__) 8 | 9 | 10 | class MetadataMain(object): 11 | """The main attributes that need to be included in the Asset Metadata.""" 12 | KEY = 'main' 13 | VALUES_KEYS = { 14 | 'type', 15 | 'author', 16 | 'name', 17 | 'created', 18 | 'license' 19 | } 20 | REQUIRED_VALUES_KEYS = {'type'} 21 | # type e.g., Dataset/Computa/Model/Algorithm/Operation 22 | # future: need to specify different usage properties for different type 23 | 24 | 25 | class Metadata(object): 26 | REQUIRED_SECTIONS = {MetadataMain.KEY} 27 | MAIN_SECTIONS = { 28 | MetadataMain.KEY: MetadataMain 29 | } 30 | 31 | @staticmethod 32 | def validate(metadata): 33 | """Validator of the metadata composition 34 | 35 | :param metadata: dict 36 | :return: bool 37 | """ 38 | 39 | for section_key in Metadata.REQUIRED_SECTIONS: 40 | if section_key not in metadata or not metadata[section_key] or not isinstance( 41 | metadata[section_key], dict): 42 | return False 43 | 44 | section = Metadata.MAIN_SECTIONS[section_key] 45 | section_metadata = metadata[section_key] 46 | for subkey in section.REQUIRED_VALUES_KEYS: 47 | if subkey not in section_metadata or section_metadata[subkey] is None: 48 | return False 49 | 50 | return True 51 | -------------------------------------------------------------------------------- /datatoken/core/operator.py: -------------------------------------------------------------------------------- 1 | """OpTemplate Lib.""" 2 | # Copyright 2021 The DataToken Authors 3 | # SPDX-License-Identifier: LGPL-2.1-only 4 | 5 | import copy 6 | from datatoken.core.metadata import Metadata 7 | from datatoken.core.dt_helper import PREFIX 8 | from datatoken.core.utils import get_timestamp, calc_checksum 9 | 10 | 11 | class OpTemplate: 12 | """OpTemplate class for describing trusted operations.""" 13 | 14 | def __init__(self, dictionary=None): 15 | self._tid = None 16 | self._creator = None 17 | self._metadata = None 18 | self._operation = None 19 | self._params = None 20 | self._proof = None 21 | 22 | if dictionary: 23 | self.from_dict(dictionary) 24 | 25 | @property 26 | def tid(self): 27 | """ Get the op tid.""" 28 | return self._tid 29 | 30 | @property 31 | def creator(self): 32 | """ Get the creator address.""" 33 | return self._creator 34 | 35 | @property 36 | def metadata(self): 37 | """Get the op metadata.""" 38 | return self._metadata 39 | 40 | @property 41 | def operation(self): 42 | """Get the op code.""" 43 | return self._operation 44 | 45 | @property 46 | def params(self): 47 | """Get the op params.""" 48 | return self._params 49 | 50 | @property 51 | def proof(self): 52 | """Get the static proof, or None.""" 53 | return self._proof 54 | 55 | def assign_tid(self, tid: str): 56 | """ 57 | Add tid to this template. 58 | 59 | :param values: dict 60 | """ 61 | assert tid.startswith(PREFIX), \ 62 | f'"tid" seems invalid, must start with {PREFIX} prefix.' 63 | self._tid = tid 64 | 65 | def add_creator(self, creator_address: str): 66 | """ 67 | Add creator. 68 | 69 | :param creator_address: str 70 | """ 71 | self._creator = creator_address 72 | 73 | def add_metadata(self, values: dict): 74 | """ 75 | Add metadata. 76 | 77 | :param values: dict 78 | """ 79 | values = copy.deepcopy(values) if values else {} 80 | assert Metadata.validate(values), \ 81 | f'values {values} seems invalid.' 82 | 83 | asset_type = values['main']['type'] 84 | if asset_type != 'Operation': 85 | raise AssertionError('Template must be Operation type.') 86 | 87 | self._metadata = values 88 | 89 | def add_template(self, operation, params): 90 | """ 91 | Add template to this instance. 92 | 93 | :param operation: trusted code, str 94 | :param params: required parameters for the code, dict 95 | """ 96 | if not self._metadata: 97 | raise AssertionError(f'please add metadata first') 98 | 99 | self._operation = operation 100 | self._params = params 101 | 102 | def create_proof(self): 103 | """create the proof for this template.""" 104 | data = { 105 | 'tid': self._tid, 106 | 'creator': self._creator, 107 | 'metadata': self._metadata, 108 | 'operation': self._operation, 109 | 'params': self._params 110 | } 111 | 112 | checksum = calc_checksum(data) 113 | 114 | self._proof = { 115 | 'created': get_timestamp(), 116 | 'checksum': checksum 117 | } 118 | 119 | return checksum 120 | 121 | def to_dict(self): 122 | """Return the template as a JSON dict.""" 123 | data = { 124 | 'tid': self._tid, 125 | 'creator': self._creator, 126 | 'metadata': self._metadata, 127 | 'operation': self._operation, 128 | 'params': self._params, 129 | 'proof': self._proof 130 | } 131 | 132 | return data 133 | 134 | def from_dict(self, value_dict): 135 | """Import a JSON dict into this template.""" 136 | values = copy.deepcopy(value_dict) 137 | 138 | tid = values.pop('tid') 139 | creator = values.pop('creator') 140 | metadata = values.pop('metadata') 141 | operation = values.pop('operation') 142 | params = values.pop('params') 143 | proof = values.pop('proof') 144 | 145 | self.assign_tid(tid) 146 | self.add_creator(creator) 147 | self.add_metadata(metadata) 148 | self.add_template(operation, params) 149 | 150 | checksum = self.create_proof() 151 | 152 | if not isinstance(proof, dict) or proof.get( 153 | 'checksum') == None or proof['checksum'] != checksum: 154 | raise AssertionError(f'wrong template checksum') 155 | 156 | self._proof = proof 157 | -------------------------------------------------------------------------------- /datatoken/core/service.py: -------------------------------------------------------------------------------- 1 | """Service Lib.""" 2 | # Copyright 2021 The DataToken Authors 3 | # SPDX-License-Identifier: LGPL-2.1-only 4 | 5 | import copy 6 | 7 | 8 | class Service: 9 | """Service class for storing the asset descriptor.""" 10 | INDEX = 'index' 11 | ENDPOINT = 'endpoint' 12 | DESCRIPTOR = 'descriptor' 13 | ATTRIBUTES = 'attributes' 14 | 15 | def __init__(self, index, endpoint, descriptor, attributes): 16 | self._index = index 17 | self._endpoint = endpoint 18 | self._descriptor = descriptor 19 | self._attributes = attributes 20 | 21 | @property 22 | def index(self): 23 | """ Get the service index.""" 24 | return self._index 25 | 26 | @property 27 | def endpoint(self): 28 | """ Get the service endpoint.""" 29 | return self._endpoint 30 | 31 | @property 32 | def descriptor(self): 33 | """ Get the service descriptor.""" 34 | return self._descriptor 35 | 36 | @property 37 | def attributes(self): 38 | """ Get the service attributes.""" 39 | return self._attributes 40 | 41 | def to_dict(self): 42 | """ 43 | Return the service as a JSON dict. 44 | 45 | :return: dict 46 | """ 47 | descriptor = {} 48 | for key, value in self._descriptor.items(): 49 | if isinstance(value, object) and hasattr(value, 'to_dict'): 50 | value = value.to_dict() 51 | elif isinstance(value, list): 52 | value = [v.to_dict() if hasattr( 53 | v, 'to_dict') else v for v in value] 54 | descriptor[key] = value 55 | 56 | values = { 57 | self.INDEX: self._index, 58 | self.ENDPOINT: self._endpoint, 59 | self.DESCRIPTOR: descriptor, 60 | self.ATTRIBUTES: self._attributes 61 | } 62 | 63 | return values 64 | 65 | @classmethod 66 | def parse_dict(cls, value_dict): 67 | """Read a service dict.""" 68 | values = copy.deepcopy(value_dict) 69 | _index = values.pop(cls.INDEX, None) 70 | _endpoint = values.pop(cls.ENDPOINT, None) 71 | _descriptor = values.pop(cls.DESCRIPTOR, None) 72 | _attributes = values.pop(cls.ATTRIBUTES, None) 73 | 74 | return _index, _endpoint, _descriptor, _attributes 75 | 76 | def validate(self, asset_type, child_dts): 77 | """Validator of the service composition 78 | 79 | :param asset_type: str 80 | :param child_dts: list 81 | :return: bool 82 | """ 83 | if not self._endpoint and asset_type != 'Algorithm': 84 | return False 85 | if not self._descriptor or self._index == None or not isinstance(self._descriptor, dict): 86 | return False 87 | 88 | if bool(child_dts): 89 | workflow = self._descriptor.get('workflow') 90 | if not isinstance(workflow, dict) or set(workflow.keys()) != set(child_dts): 91 | return False 92 | 93 | for agreement in workflow.values(): 94 | if not isinstance(agreement, dict) or agreement.get('service') == None or not isinstance( 95 | agreement.get('constraint'), dict): 96 | return False 97 | else: 98 | if not self._descriptor.get('template') or not isinstance( 99 | self._descriptor.get('constraint'), dict): 100 | return False 101 | 102 | return True 103 | -------------------------------------------------------------------------------- /datatoken/core/utils.py: -------------------------------------------------------------------------------- 1 | """Utilities module.""" 2 | # Copyright 2021 The DataToken Authors 3 | # SPDX-License-Identifier: LGPL-2.1-only 4 | 5 | import hashlib 6 | import json 7 | import uuid 8 | from web3 import Web3 9 | from datetime import datetime 10 | 11 | 12 | def convert_to_bytes(data): 13 | return Web3.toBytes(text=data) 14 | 15 | 16 | def convert_to_string(data): 17 | return Web3.toHex(data) 18 | 19 | 20 | def get_timestamp(): 21 | """Return the current system timestamp.""" 22 | return f'{datetime.utcnow().replace(microsecond=0).isoformat()}Z' 23 | 24 | 25 | def calc_checksum(seed): 26 | """Calculate the hash3_256.""" 27 | 28 | def _sort_dict(dict_value: dict): 29 | dict_value = dict(sorted(dict_value.items(), reverse=False)) 30 | 31 | for key, value in dict_value.items(): 32 | if isinstance(value, dict): 33 | value = _sort_dict(value) 34 | dict_value[key] = value 35 | elif isinstance(value, list): 36 | for index, sub_value in enumerate(value): 37 | if isinstance(sub_value, dict): 38 | sub_value = _sort_dict(sub_value) 39 | value[index] = sub_value 40 | 41 | return dict_value 42 | 43 | return hashlib.sha3_256((json.dumps(_sort_dict(seed)).replace( 44 | " ", "")).encode('utf-8')).hexdigest() 45 | -------------------------------------------------------------------------------- /datatoken/csp/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MVP-Labs/data-token/fae7adc8945418590764f284ccd70f9dbb4a4382/datatoken/csp/__init__.py -------------------------------------------------------------------------------- /datatoken/csp/agreement.py: -------------------------------------------------------------------------------- 1 | """Constraint module.""" 2 | # Copyright 2021 The DataToken Authors 3 | # SPDX-License-Identifier: LGPL-2.1-only 4 | 5 | import json 6 | 7 | from datatoken.store.asset_resolve import resolve_op 8 | 9 | 10 | def validate_leaf_template(leaf_ddo, keeper_op_template): 11 | """ 12 | Check whether the leaf ddo contains illegal services. In this case, 13 | leaf constraints must provide the same parameters of used templates. 14 | 15 | :param leaf_ddo: DDO object for a leaf asset 16 | :param keeper_op_template: keeper instance of the op-template smart contract 17 | :return: bool 18 | """ 19 | for service in leaf_ddo.services: 20 | op_descriptor = service.descriptor 21 | 22 | tid = op_descriptor.get('template') 23 | constraint = op_descriptor.get('constraint') 24 | if not tid: 25 | return False 26 | 27 | data, op = resolve_op(tid, keeper_op_template) 28 | if not data or not op: 29 | return False 30 | 31 | params = json.loads(op.params) 32 | if not _check_params(params, constraint): 33 | return False 34 | 35 | return True 36 | 37 | 38 | def validate_service_agreement(cdt_ddo, required_ddo): 39 | """ 40 | Check whether a father ddo satisfies service requirements of a child ddo. 41 | In this case, the low-level constraints must be fulfilled and satisfied. 42 | 43 | :param cdt_ddo: DDO object for a high-level composable asset 44 | :param required_ddo: child DDO that needs to be satisfied. 45 | :return: bool 46 | """ 47 | is_cdt = required_ddo.is_cdt 48 | terminal = (cdt_ddo.asset_type == 'Algorithm') 49 | 50 | if required_ddo.asset_type == 'Algorithm': 51 | return False 52 | 53 | for service in cdt_ddo.services: 54 | fulfilled = service.descriptor['workflow'].get(required_ddo.dt) 55 | 56 | if not fulfilled: 57 | return False 58 | 59 | sid = fulfilled.get('service') 60 | constraint = fulfilled.get('constraint') 61 | 62 | child_service = required_ddo.get_service_by_index(sid) 63 | if not child_service: 64 | return False 65 | 66 | if is_cdt: 67 | sub_constraint = dict() 68 | for key, value in child_service.descriptor['workflow'].items(): 69 | sub_constraint[key] = value['constraint'] 70 | else: 71 | sub_constraint = child_service.descriptor['constraint'] 72 | 73 | if not _check_fulfills(sub_constraint, constraint, terminal): 74 | return False 75 | 76 | return True 77 | 78 | 79 | def _check_params(params: dict, constraint: dict): 80 | """ 81 | Check whether a leaf asset provide the required parameters when it 82 | uses a trusted op template. 83 | 84 | :param params: required parameters for a op template 85 | :param constraint: leaf contraints. 86 | :return: bool 87 | """ 88 | if set(params.keys()) != set(constraint.keys()): 89 | return False 90 | 91 | return True 92 | 93 | 94 | def _check_fulfills(required_constraint: dict, fulfill_constraint: dict, terminal=False): 95 | """ 96 | Check whether the low-level constraints are satisfied. 97 | 98 | :param required_constraint: low-level requirements 99 | :param fulfill_constraint: high-level fulfills 100 | :param terminal: true when it is an Algorithm asset. 101 | :return: bool 102 | """ 103 | if set(required_constraint.keys()) != set(fulfill_constraint.keys()): 104 | return False 105 | 106 | for key, value in fulfill_constraint.items(): 107 | required = required_constraint[key] 108 | if isinstance(value, dict): 109 | if not isinstance(required, dict): 110 | return False 111 | 112 | if required and set(value.keys()) != set(required.keys()): 113 | return False 114 | 115 | for sub_key, sub_value in value.items(): 116 | if terminal and sub_value == None: 117 | return False 118 | 119 | sub_required = required.get(sub_key) 120 | if sub_required and sub_value != sub_required: 121 | return False 122 | else: 123 | if (terminal and value == None) or (required and value != required): 124 | return False 125 | 126 | return True 127 | -------------------------------------------------------------------------------- /datatoken/model/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MVP-Labs/data-token/fae7adc8945418590764f284ccd70f9dbb4a4382/datatoken/model/__init__.py -------------------------------------------------------------------------------- /datatoken/model/asset_provider.py: -------------------------------------------------------------------------------- 1 | """Enterprise and Asset Provider""" 2 | # Copyright 2021 The DataToken Authors 3 | # SPDX-License-Identifier: LGPL-2.1-only 4 | 5 | import logging 6 | 7 | from datatoken.web3.contract_base import ContractBase 8 | from datatoken.model.constants import ErrorCode 9 | 10 | logger = logging.getLogger(__name__) 11 | 12 | 13 | class AssetProvider(ContractBase): 14 | CONTRACT_NAME = 'AssetProvider' 15 | ENTERPRIZE_REGISTER_EVENT = 'EnterpriseRegistered' 16 | PROVIDER_ADD_EVENT = 'ProviderAdded' 17 | 18 | def register_enterprise(self, id, name, desc, from_wallet): 19 | """ 20 | Register a new enterprise on chain by the admin. 21 | 22 | :param id: refers to the enterprise identifier 23 | :param name: refers to the enterprise name 24 | :param desc: refers to the enterprise description 25 | :param from_wallet: the system account 26 | :return 27 | """ 28 | tx_hash = self.send_transaction( 29 | 'registerEnterprise', 30 | (id, name, desc), 31 | from_wallet 32 | ) 33 | 34 | receipt = self.get_tx_receipt(tx_hash) 35 | 36 | if not bool(receipt and receipt.status == 1): 37 | raise AssertionError(f'transaction failed with tx id {tx_hash}.') 38 | 39 | topic_param = self.events.EnterpriseRegistered().processReceipt(receipt) 40 | error_code = topic_param[0]['args']['_code'] 41 | 42 | if error_code == ErrorCode.SUCCESS: 43 | logger.debug(f'sucessfully register enterprise {name} for id {id}') 44 | elif error_code == ErrorCode.ENTERPRISE_EXISTS: 45 | raise AssertionError(f'The enterprise already exists for id {id}') 46 | else: 47 | raise AssertionError(f'ERROR_NO_PERMISSION') 48 | 49 | def update_enterprise(self, id, name, desc, from_wallet): 50 | """ 51 | Update the enterprise on chain by the admin. 52 | 53 | :param id: refers to the enterprise identifier 54 | :param name: refers to the enterprise name 55 | :param desc: refers to the enterprise description 56 | :param from_wallet: the system account 57 | :return 58 | """ 59 | tx_hash = self.send_transaction( 60 | 'updateEnterprise', 61 | (id, name, desc), 62 | from_wallet 63 | ) 64 | 65 | receipt = self.get_tx_receipt(tx_hash) 66 | 67 | if not bool(receipt and receipt.status == 1): 68 | raise AssertionError(f'transaction failed with tx id {tx_hash}.') 69 | 70 | topic_param = self.events.EnterpriseRegistered().processReceipt(receipt) 71 | error_code = topic_param[0]['args']['_code'] 72 | 73 | if error_code == ErrorCode.SUCCESS: 74 | logger.debug(f'sucessfully update enterprise {name} for id {id}') 75 | elif error_code == ErrorCode.ENTERPRISE_NOT_EXISTS: 76 | raise AssertionError(f'The enterprise do not exists for id {id}') 77 | else: 78 | raise AssertionError(f'ERROR_NO_PERMISSION') 79 | 80 | def add_provider(self, id, from_wallet): 81 | """ 82 | Add a new provider on chain by the admin. 83 | 84 | :param id: refers to the provider identifier 85 | :param from_wallet: the system account 86 | :return 87 | """ 88 | tx_hash = self.send_transaction( 89 | 'addProvider', (id,), from_wallet 90 | ) 91 | 92 | receipt = self.get_tx_receipt(tx_hash) 93 | 94 | if not bool(receipt and receipt.status == 1): 95 | raise AssertionError(f'transaction failed with tx id {tx_hash}.') 96 | 97 | topic_param = self.events.ProviderAdded().processReceipt(receipt) 98 | error_code = topic_param[0]['args']['_code'] 99 | 100 | if error_code == ErrorCode.SUCCESS: 101 | logger.debug(f'sucessfully add provide for id {id}') 102 | elif error_code == ErrorCode.PROVIDER_EXISTS: 103 | raise AssertionError(f'The provider already exists for id {id}') 104 | else: 105 | raise AssertionError(f'ERROR_NO_PERMISSION') 106 | 107 | def update_provider(self, id, from_wallet): 108 | """ 109 | Update the provider on chain by the admin. 110 | 111 | :param id: refers to the provider identifier 112 | :param from_wallet: the system account 113 | :return 114 | """ 115 | tx_hash = self.send_transaction('updateProvider', (id,), from_wallet) 116 | 117 | receipt = self.get_tx_receipt(tx_hash) 118 | 119 | if not bool(receipt and receipt.status == 1): 120 | raise AssertionError(f'transaction failed with tx id {tx_hash}.') 121 | 122 | topic_param = self.events.ProviderAdded().processReceipt(receipt) 123 | error_code = topic_param[0]['args']['_code'] 124 | 125 | if error_code == ErrorCode.SUCCESS: 126 | logger.debug(f'sucessfully update provide for id {id}') 127 | elif error_code == ErrorCode.PROVIDER_NOT_EXISTS: 128 | raise AssertionError(f'The provider do not exists for id {id}') 129 | else: 130 | raise AssertionError(f'ERROR_NO_PERMISSION') 131 | 132 | def check_enterprise(self, id): 133 | """ 134 | Check enterprise role. 135 | 136 | :param id: refers to the enterprise identifier 137 | :return: bool 138 | """ 139 | return self.contract_concise.isEnterprise(id) 140 | 141 | def check_provider(self, id): 142 | """ 143 | Check provider role. 144 | 145 | :param id: refers to the provider identifier 146 | :return: bool 147 | """ 148 | return self.contract_concise.isProvider(id) 149 | 150 | def get_enterprise(self, id): 151 | """ 152 | Get the enterprise info. 153 | 154 | :param id: refers to the enterprise identifier 155 | :return: Enterprise struct 156 | """ 157 | return self.contract_concise.getEnterprisebyId(id) 158 | 159 | def get_issuer_names(self, idx): 160 | """ 161 | Get the list of names of issuer enterprises. 162 | 163 | :return: string[] 164 | """ 165 | return self.contract_concise.getIssuerNames(idx) 166 | -------------------------------------------------------------------------------- /datatoken/model/constants.py: -------------------------------------------------------------------------------- 1 | """Constants module.""" 2 | # Copyright 2021 The DataToken Authors 3 | # SPDX-License-Identifier: LGPL-2.1-only 4 | 5 | class Role: 6 | ROLE_ADMIN = 100 7 | ROLE_ENTERPRISE = 101 8 | ROLE_PROVIDER = 102 9 | 10 | 11 | class Operation: 12 | MODIFY_ADMIN = 200 13 | MODIFY_ENTERPRISE = 201 14 | MODIFY_PROVIDER = 202 15 | MODIFY_OP = 203 16 | MODIFY_ASSET = 204 17 | MODIFY_AUTHORIZE = 205 18 | MODIFY_TASK = 206 19 | 20 | 21 | class ErrorCode: 22 | SUCCESS = 0 23 | ERROR_NO_PERMISSION = 10000 24 | ROLE_EXISTS = 1001 25 | ENTERPRISE_EXISTS = 2001 26 | ENTERPRISE_NOT_EXISTS = 2002 27 | PROVIDER_EXISTS = 2003 28 | PROVIDER_NOT_EXISTS = 2004 29 | TEMPLATE_EXISTS = 3001 30 | TEMPLATE_NOT_EXISTS = 3002 31 | DT_EXISTS = 4001 32 | DT_NOT_EXISTS = 4002 33 | CDT_EXISTS = 4003 34 | CDT_NOT_EXISTS = 4004 35 | DT_GRATED = 4005 36 | DT_NOT_GRATED = 4006 37 | -------------------------------------------------------------------------------- /datatoken/model/dt_factory.py: -------------------------------------------------------------------------------- 1 | """Data Token Factory""" 2 | # Copyright 2021 The DataToken Authors 3 | # SPDX-License-Identifier: LGPL-2.1-only 4 | 5 | import logging 6 | 7 | from datatoken.web3.contract_base import ContractBase 8 | from datatoken.web3.event_filter import EventFilter 9 | from datatoken.model.constants import ErrorCode 10 | 11 | logger = logging.getLogger(__name__) 12 | 13 | 14 | class DTFactory(ContractBase): 15 | CONTRACT_NAME = 'DTFactory' 16 | DT_MINT_EVENT = 'DataTokenMinted' 17 | DT_GRANT_EVENT = 'DataTokenGranted' 18 | CDT_MINT_EVENT = 'CDTMinted' 19 | 20 | def mint_dt(self, dt, owner, is_leaf, checksum, ipfs_path, from_wallet): 21 | """ 22 | Create new data token on chain. 23 | 24 | :param dt: refers to data token identifier 25 | :param owner: refers to data token owner 26 | :param is_leaf: leaf dt or composable dt 27 | :param checksum: checksum associated with dt/metadata 28 | :param ipfs_path: refers to the metadata storage path 29 | :param from_wallet: issuer account 30 | :return 31 | """ 32 | tx_hash = self.send_transaction( 33 | 'mintDataToken', 34 | (dt, owner, is_leaf, checksum, ipfs_path), 35 | from_wallet 36 | ) 37 | 38 | receipt = self.get_tx_receipt(tx_hash) 39 | 40 | if not bool(receipt and receipt.status == 1): 41 | raise AssertionError(f'transaction failed with tx id {tx_hash}.') 42 | 43 | topic_param = self.events.DataTokenMinted().processReceipt(receipt) 44 | error_code = topic_param[0]['args']['_code'] 45 | 46 | if error_code == ErrorCode.SUCCESS: 47 | logger.debug(f'sucessfully mint data token for dt {dt}') 48 | elif error_code == ErrorCode.DT_EXISTS: 49 | logger.warning(f'The data token already exists for dt {dt}') 50 | else: 51 | raise AssertionError(f'ERROR_NO_PERMISSION') 52 | 53 | def start_compose_dt(self, cdt, child_dts, from_wallet): 54 | """ 55 | Activate cdt when all perms are ready. 56 | 57 | :param cdt: refers to cdt identifier 58 | :param child_dts: associated with child_dts identifier 59 | :param from_wallet: aggregator account 60 | :return 61 | """ 62 | tx_hash = self.send_transaction( 63 | 'startComposeDT', 64 | (cdt, child_dts), 65 | from_wallet 66 | ) 67 | 68 | receipt = self.get_tx_receipt(tx_hash) 69 | 70 | if not bool(receipt and receipt.status == 1): 71 | raise AssertionError(f'transaction failed with tx id {tx_hash}.') 72 | 73 | topic_param = self.events.CDTMinted().processReceipt(receipt) 74 | error_code = topic_param[0]['args']['_code'] 75 | 76 | if error_code == ErrorCode.SUCCESS: 77 | logger.debug( 78 | f'sucessfully mint composable data token for cdt {cdt}') 79 | elif error_code == ErrorCode.CDT_EXISTS: 80 | logger.warning( 81 | f'The composable data token already exists for cdt {cdt}') 82 | else: 83 | raise AssertionError(f'ERROR_NO_PERMISSION') 84 | 85 | def grant_dt(self, dt, grantee, from_wallet): 86 | """ 87 | Grant one dt to other dt. 88 | 89 | :param dt: refers to data token identifier 90 | :param grantee: refers to granted dt identifier 91 | :param from_wallet: owner account 92 | :return 93 | """ 94 | tx_hash = self.send_transaction( 95 | 'grantPermission', 96 | (dt, grantee), 97 | from_wallet 98 | ) 99 | 100 | receipt = self.get_tx_receipt(tx_hash) 101 | 102 | if not bool(receipt and receipt.status == 1): 103 | raise AssertionError(f'transaction failed with tx id {tx_hash}.') 104 | 105 | topic_param = self.events.DataTokenGranted().processReceipt(receipt) 106 | error_code = topic_param[0]['args']['_code'] 107 | 108 | if error_code == ErrorCode.SUCCESS: 109 | logger.debug( 110 | f'sucessfully grant permission for pair of dt {dt} and cdt {grantee} ') 111 | elif error_code == ErrorCode.DT_GRATED: 112 | logger.warning(f'The permission already granted') 113 | elif error_code == ErrorCode.DT_NOT_EXISTS: 114 | raise AssertionError(f'error, some dt assets are not found') 115 | elif error_code == ErrorCode.CDT_NOT_EXISTS: 116 | raise AssertionError(f'error, some cdt assets are not found') 117 | else: 118 | raise AssertionError(f'ERROR_NO_PERMISSION') 119 | 120 | def check_dt_available(self, dt): 121 | """ 122 | Get dt availability. 123 | 124 | :param dt: refers to the dt identifier 125 | :return: bool 126 | """ 127 | return self.contract_concise.isDTAvailable(dt) 128 | 129 | def check_cdt_available(self, cdt): 130 | """ 131 | Get cdt availability. 132 | 133 | :param cdt: refers to the cdt identifier 134 | :return: bool 135 | """ 136 | return self.contract_concise.isCDTAvailable(cdt) 137 | 138 | def check_dt_perm(self, dt, grantee): 139 | """ 140 | Check permission. 141 | 142 | :param dt: refers to data token identifier 143 | :param grantee: refers to granted dt identifier 144 | :return: bool 145 | """ 146 | return self.contract_concise.getPermission(dt, grantee) 147 | 148 | def get_dt_owner(self, dt): 149 | """ 150 | Get the owner for a data token. 151 | 152 | :param dt: refers to data token identifier 153 | :return: owner address 154 | """ 155 | return self.contract_concise.getDTOwner(dt) 156 | 157 | def get_dt_register(self, dt): 158 | """ 159 | Get the dt records. 160 | 161 | :param dt: refers to data token identifier 162 | :return: DataToken struct 163 | """ 164 | return self.contract_concise.getDTRegister(dt) 165 | 166 | def blockNumberUpdated(self, dt): 167 | """ 168 | Get the blockUpdated for a dt 169 | 170 | :param dt: refers to data token identifier 171 | :return: int blockUpdated 172 | """ 173 | return self.contract_concise.getBlockNumberUpdated(dt) 174 | 175 | def check_clinks(self, cdt, child_dts): 176 | """ 177 | Check permission for related parties of a Composable DT. 178 | 179 | :param cdt: refers to cdt identifier 180 | :param child_dts: refers to child_dts identifiers 181 | :return: bool 182 | """ 183 | return self.contract_concise.CLinksCheck(cdt, child_dts) 184 | 185 | def get_dt_num(self): 186 | """ 187 | Get the total numbers of datatokens. 188 | 189 | :return: int 190 | """ 191 | return self.contract_concise.getDTNum() 192 | 193 | def get_available_dts(self): 194 | """ 195 | Get all the available datatokens. 196 | 197 | :return: DataToken[] 198 | """ 199 | return self.contract_concise.getDTMap() 200 | 201 | ###################### 202 | def get_owner_assets(self, address): 203 | """ 204 | Get all assets for a given owner. 205 | 206 | :param address: refers to owner address 207 | :return: List Datatoken 208 | """ 209 | _filters = {'_owner': address, '_code': ErrorCode.SUCCESS} 210 | 211 | block_filter = EventFilter( 212 | DTFactory.DT_MINT_EVENT, 213 | getattr(self.events, DTFactory.DT_MINT_EVENT), 214 | from_block=0, 215 | to_block='latest', 216 | argument_filters=_filters 217 | ) 218 | 219 | log_items = block_filter.get_all_entries(max_tries=5) 220 | dt_list = [] 221 | for log_i in log_items: 222 | dt_list.append(log_i.args['_dt']) 223 | 224 | return dt_list 225 | 226 | def get_dt_grantees(self, dt): 227 | """ 228 | Get the granteed father for a dt. 229 | 230 | :param dt: refers to the data token identifier 231 | :return: List granteed dts 232 | """ 233 | _filters = {'_dt': dt, '_code': ErrorCode.SUCCESS} 234 | 235 | block_filter = EventFilter( 236 | DTFactory.DT_GRANT_EVENT, 237 | getattr(self.events, DTFactory.DT_GRANT_EVENT), 238 | from_block=0, 239 | to_block='latest', 240 | argument_filters=_filters 241 | ) 242 | 243 | log_items = block_filter.get_all_entries(max_tries=5) 244 | grantee_list = [] 245 | for log_i in log_items: 246 | grantee_list.append(log_i.args['_grantee']) 247 | 248 | return grantee_list 249 | -------------------------------------------------------------------------------- /datatoken/model/keeper.py: -------------------------------------------------------------------------------- 1 | """Keeper module.""" 2 | # Copyright 2021 The DataToken Authors 3 | # SPDX-License-Identifier: LGPL-2.1-only 4 | 5 | import os 6 | import logging 7 | from os import getenv 8 | from configparser import ConfigParser 9 | 10 | from datatoken.web3.contract_handler import ContractHandler 11 | from datatoken.web3.web3_provider import Web3Provider 12 | from datatoken.model.role_controller import RoleController 13 | from datatoken.model.asset_provider import AssetProvider 14 | from datatoken.model.op_template import OpTemplate 15 | from datatoken.model.dt_factory import DTFactory 16 | from datatoken.model.task_market import TaskMarket 17 | 18 | logger = logging.getLogger('datatoken') 19 | 20 | 21 | class Keeper: 22 | """The entry point for accessing datatoken contracts.""" 23 | 24 | def __init__(self, options_dict=None): 25 | 26 | filename = getenv('CONFIG_FILE', './config.ini') 27 | if not os.path.exists(filename) and not options_dict: 28 | raise FileNotFoundError(f'please provider the config first') 29 | 30 | config_parser = ConfigParser() 31 | if os.path.exists(filename): 32 | config_parser.read(filename) 33 | if options_dict: 34 | config_parser.read_dict(options_dict) 35 | 36 | artifacts_path = config_parser.get('keeper', 'artifacts_path') 37 | network_url = config_parser.get('keeper', 'network_url') 38 | network_name = config_parser.get('keeper', 'network_name') 39 | address_file = config_parser.get('keeper', 'address_file') 40 | 41 | ContractHandler.set_artifacts_path(artifacts_path) 42 | addresses = ContractHandler.get_contracts_addresses( 43 | network_name, address_file) 44 | 45 | self._web3 = Web3Provider.get_web3(network_url=network_url) 46 | 47 | self.role_controller = RoleController( 48 | addresses.get(RoleController.CONTRACT_NAME)) 49 | self.asset_provider = AssetProvider( 50 | addresses.get(AssetProvider.CONTRACT_NAME)) 51 | self.op_template = OpTemplate(addresses.get(OpTemplate.CONTRACT_NAME)) 52 | self.dt_factory = DTFactory(addresses.get(DTFactory.CONTRACT_NAME)) 53 | self.task_market = TaskMarket(addresses.get(TaskMarket.CONTRACT_NAME)) 54 | 55 | logger.debug('Keeper instance initialized: ') 56 | 57 | @property 58 | def web3(self): 59 | return self._web3 60 | -------------------------------------------------------------------------------- /datatoken/model/op_template.py: -------------------------------------------------------------------------------- 1 | """Trusted Operation Template""" 2 | # Copyright 2021 The DataToken Authors 3 | # SPDX-License-Identifier: LGPL-2.1-only 4 | 5 | import logging 6 | 7 | from datatoken.web3.contract_base import ContractBase 8 | from datatoken.model.constants import ErrorCode 9 | 10 | logger = logging.getLogger(__name__) 11 | 12 | 13 | class OpTemplate(ContractBase): 14 | CONTRACT_NAME = 'OpTemplate' 15 | TEMPLATE_PUBLISH_EVENT = 'TemplatePublished' 16 | 17 | def publish_template(self, tid, name, checksum, ipfs_path, from_wallet): 18 | """ 19 | Publish an off-chain code template on chain. 20 | 21 | :param tid: refers to the op template identifier 22 | :param name: refers to the op template name 23 | :param checksum: checksum associated with tid/metadata 24 | :param ipfs_path: referes to the metadata storage path 25 | :param from_wallet: publisher account 26 | :return 27 | """ 28 | tx_hash = self.send_transaction( 29 | 'publishTemplate', 30 | (tid, name, checksum, ipfs_path), 31 | from_wallet 32 | ) 33 | 34 | receipt = self.get_tx_receipt(tx_hash) 35 | 36 | if not bool(receipt and receipt.status == 1): 37 | raise AssertionError(f'transaction failed with tx id {tx_hash}.') 38 | 39 | topic_param = self.events.TemplatePublished().processReceipt(receipt) 40 | error_code = topic_param[0]['args']['_code'] 41 | 42 | if error_code == ErrorCode.SUCCESS: 43 | logger.debug( 44 | f'sucessfully publish op template {name} for tid {tid}') 45 | elif error_code == ErrorCode.TEMPLATE_EXISTS: 46 | raise AssertionError(f'The template already exists for tid {tid}') 47 | else: 48 | raise AssertionError(f'ERROR_NO_PERMISSION') 49 | 50 | def update_template(self, tid, name, checksum, ipfs_path, from_wallet): 51 | """ 52 | Update the op template that is already exists on chain. 53 | 54 | :param tid: refers to the op template identifier 55 | :param name: refers to the op template name 56 | :param checksum: checksum associated with tid/metadata 57 | :param ipfs_path: referes to the metadata storage path 58 | :param from_wallet: publisher account 59 | :return 60 | """ 61 | tx_hash = self.send_transaction( 62 | 'updateTemplate', 63 | (tid, name, checksum, ipfs_path), 64 | from_wallet 65 | ) 66 | 67 | receipt = self.get_tx_receipt(tx_hash) 68 | 69 | if not bool(receipt and receipt.status == 1): 70 | raise AssertionError(f'transaction failed with tx id {tx_hash}.') 71 | 72 | topic_param = self.events.TemplatePublished().processReceipt(receipt) 73 | error_code = topic_param[0]['args']['_code'] 74 | 75 | if error_code == ErrorCode.SUCCESS: 76 | logger.debug( 77 | f'sucessfully update op template {name} for tid {tid}') 78 | elif error_code == ErrorCode.TEMPLATE_NOT_EXISTS: 79 | raise AssertionError(f'The template not exists for tid {tid}') 80 | else: 81 | raise AssertionError(f'ERROR_NO_PERMISSION') 82 | 83 | def is_template_exist(self, tid): 84 | """ 85 | Check template existence. 86 | 87 | :param tid: refers to the address identifier 88 | :return: bool 89 | """ 90 | return self.contract_concise.isTemplateExist(tid) 91 | 92 | def get_template(self, tid): 93 | """ 94 | Get the template records by id. 95 | 96 | :param tid: refers to the address identifier 97 | :return: Template struct 98 | """ 99 | return self.contract_concise.getTemplateById(tid) 100 | 101 | def blockNumberUpdated(self, tid): 102 | """ 103 | Get the blockUpdated for a template. 104 | 105 | :param tid: refers to the address identifier 106 | :return: int blockUpdated 107 | """ 108 | return self.contract_concise.getBlockNumberUpdated(tid) 109 | 110 | def get_template_num(self): 111 | """ 112 | Get the total numbers of op templates. 113 | 114 | :return: int 115 | """ 116 | return self.contract_concise.getTemplateNum() 117 | -------------------------------------------------------------------------------- /datatoken/model/role_controller.py: -------------------------------------------------------------------------------- 1 | """Role Controller""" 2 | # Copyright 2021 The DataToken Authors 3 | # SPDX-License-Identifier: LGPL-2.1-only 4 | 5 | import logging 6 | 7 | from datatoken.web3.contract_base import ContractBase 8 | from datatoken.model.constants import ErrorCode 9 | 10 | logger = logging.getLogger(__name__) 11 | 12 | 13 | class RoleController(ContractBase): 14 | CONTRACT_NAME = 'RoleController' 15 | ROLE_ADD_EVENT = 'RoleAdded' 16 | 17 | def check_role(self, id, role): 18 | """ 19 | Check role for a given address. 20 | 21 | :param id: refers to address identifier 22 | :param role: refers to the certain role 23 | :return: bool 24 | """ 25 | return self.contract_concise.checkRole(id, role) 26 | 27 | def check_permission(self, id, operation): 28 | """ 29 | Check operation permission for a given address. 30 | 31 | :param id: refers to address identifier 32 | :param operation: refers to the certain operation 33 | :return: bool 34 | """ 35 | return self.contract_concise.checkPermission(id, operation) 36 | 37 | def add_role(self, id, role, from_wallet): 38 | """ 39 | Add a role for given id. 40 | 41 | :param id: refers to address identifier 42 | :param role: refers to the certain role 43 | :return 44 | """ 45 | tx_hash = self.send_transaction( 46 | 'addRole', 47 | (id, role), 48 | from_wallet 49 | ) 50 | 51 | receipt = self.get_tx_receipt(tx_hash) 52 | 53 | if not bool(receipt and receipt.status == 1): 54 | raise AssertionError(f'transaction failed with tx id {tx_hash}.') 55 | 56 | topic_param = self.events.RoleAdded().processReceipt(receipt) 57 | error_code = topic_param[0]['args']['_code'] 58 | 59 | if error_code == ErrorCode.SUCCESS: 60 | logger.debug(f'sucessfully add role {role} for id {id}') 61 | elif error_code == ErrorCode.ROLE_EXISTS: 62 | raise AssertionError(f'The role {role} already exists for id {id}') 63 | else: 64 | raise AssertionError(f'ERROR_NO_PERMISSION') 65 | -------------------------------------------------------------------------------- /datatoken/model/task_market.py: -------------------------------------------------------------------------------- 1 | """Task Market for Data Collaboration""" 2 | # Copyright 2021 The DataToken Authors 3 | # SPDX-License-Identifier: LGPL-2.1-only 4 | 5 | import logging 6 | 7 | from datatoken.web3.contract_base import ContractBase 8 | from datatoken.web3.event_filter import EventFilter 9 | from datatoken.model.constants import ErrorCode 10 | 11 | logger = logging.getLogger(__name__) 12 | 13 | 14 | class TaskMarket(ContractBase): 15 | CONTRACT_NAME = 'TaskMarket' 16 | TASK_ADD_EVENT = 'TaskAdded' 17 | JOB_ADD_EVENT = 'JobAdded' 18 | 19 | def create_task(self, name, desc, from_wallet): 20 | """ 21 | Add a new task on chain. 22 | 23 | :param name: refers to the task name 24 | :param desc: refers to the task description 25 | :param from_wallet: demander account 26 | :return: int task_id 27 | """ 28 | tx_hash = self.send_transaction( 29 | 'createTask', 30 | (name, desc), 31 | from_wallet 32 | ) 33 | receipt = self.get_tx_receipt(tx_hash) 34 | 35 | if not bool(receipt and receipt.status == 1): 36 | raise AssertionError(f'transaction failed with tx id {tx_hash}.') 37 | 38 | topic_param = self.events.TaskAdded().processReceipt(receipt) 39 | error_code = topic_param[0]['args']['_code'] 40 | 41 | if error_code == ErrorCode.SUCCESS: 42 | logger.debug(f'sucessfully create task for name {name}') 43 | else: 44 | raise AssertionError(f'ERROR_NO_PERMISSION') 45 | 46 | return topic_param[0]['args']['_taskId'] 47 | 48 | def add_job(self, cdt, task_id, from_wallet): 49 | """ 50 | Create a new job on chain with the algorithm cdt. 51 | 52 | :param cdt: refers to the composable data token identifier 53 | :param task_id: refers to the task id 54 | :param from_wallet: solver account 55 | :return: int job_id 56 | """ 57 | tx_hash = self.send_transaction( 58 | 'addJob', 59 | (cdt, task_id), 60 | from_wallet 61 | ) 62 | 63 | receipt = self.get_tx_receipt(tx_hash) 64 | 65 | if not bool(receipt and receipt.status == 1): 66 | raise AssertionError(f'transaction failed with tx id {tx_hash}.') 67 | 68 | topic_param = self.events.JobAdded().processReceipt(receipt) 69 | error_code = topic_param[0]['args']['_code'] 70 | 71 | if error_code == ErrorCode.SUCCESS: 72 | logger.debug(f'sucessfully add job for cdt {cdt}') 73 | else: 74 | raise AssertionError(f'ERROR_NO_PERMISSION') 75 | 76 | return topic_param[0]['args']['_jobId'] 77 | 78 | def get_task(self, task_id): 79 | """ 80 | Get task info. 81 | 82 | :param task_id: refers to the task id 83 | :return: Task struct 84 | """ 85 | return self.contract_concise.getTaskbyId(task_id) 86 | 87 | def get_job(self, job_id): 88 | """ 89 | Get job info. 90 | 91 | :param job_id: refers to the job id 92 | :return: Job struct 93 | """ 94 | return self.contract_concise.getJobbyId(job_id) 95 | 96 | def get_task_num(self): 97 | """ 98 | Get the total numbers of tasks. 99 | 100 | :return: int 101 | """ 102 | return self.contract_concise.getTaskNum() 103 | 104 | def get_job_num(self): 105 | """ 106 | Get the total numbers of jobs. 107 | 108 | :return: int 109 | """ 110 | return self.contract_concise.getJobNum() 111 | 112 | ###################### 113 | def get_cdt_jobs(self, cdt): 114 | """ 115 | Get previous jobs for a given cdt. 116 | 117 | :param cdt: refers to the composable data token identifier 118 | :return: List Job 119 | """ 120 | _filters = {'_cdt': cdt, '_code': ErrorCode.SUCCESS} 121 | 122 | block_filter = EventFilter( 123 | TaskMarket.JOB_ADD_EVENT, 124 | getattr(self.events, TaskMarket.JOB_ADD_EVENT), 125 | from_block=0, 126 | to_block='latest', 127 | argument_filters=_filters 128 | ) 129 | 130 | log_items = block_filter.get_all_entries(max_tries=5) 131 | job_list = [] 132 | for log_i in log_items: 133 | _jobId = log_i.args['_jobId'] 134 | _taskId = log_i.args['_taskId'] 135 | _solver = log_i.args['_solver'] 136 | _task = self.get_task(_taskId) 137 | _demander = _task[0] 138 | _name = _task[1] 139 | _desc = _task[2] 140 | 141 | job_list.append( 142 | (_jobId, _solver, _taskId, _demander, _name, _desc)) 143 | 144 | return job_list 145 | -------------------------------------------------------------------------------- /datatoken/service/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MVP-Labs/data-token/fae7adc8945418590764f284ccd70f9dbb4a4382/datatoken/service/__init__.py -------------------------------------------------------------------------------- /datatoken/service/asset.py: -------------------------------------------------------------------------------- 1 | """Asset service module.""" 2 | # Copyright 2021 The DataToken Authors 3 | # SPDX-License-Identifier: LGPL-2.1-only 4 | 5 | import logging 6 | 7 | from datatoken.core.ddo import DDO 8 | from datatoken.core.dt_helper import DTHelper 9 | from datatoken.store.ipfs_provider import IPFSProvider 10 | from datatoken.store.asset_resolve import resolve_asset, resolve_asset_by_url 11 | from datatoken.model.keeper import Keeper 12 | from datatoken.service.verifier import VerifierService 13 | from datatoken.service.tracer import TracerService 14 | 15 | logger = logging.getLogger(__name__) 16 | 17 | 18 | class AssetService(object): 19 | """The entry point for accessing the asset service.""" 20 | 21 | def __init__(self, config): 22 | keeper = Keeper(config.keeper_options) 23 | 24 | self.dt_factory = keeper.dt_factory 25 | self.asset_provider = keeper.asset_provider 26 | self.verifier = VerifierService(config) 27 | self.tracer = TracerService(config) 28 | 29 | self.config = config 30 | 31 | def generate_ddo(self, metadata, services, owner_address, child_dts=None, verify=True): 32 | """ 33 | Create an asset document and declare its services. 34 | 35 | :param metadata: refers to the asset metadata 36 | :param services: list of asset services 37 | :param owner_address: refers to the asset owner 38 | :param child_dts: list of child asset identifiers 39 | :param verify: check the correctness of asset services 40 | :return ddo: DDO instance 41 | """ 42 | ddo = DDO() 43 | ddo.add_metadata(metadata, child_dts) 44 | ddo.add_creator(owner_address) 45 | 46 | for service in services: 47 | ddo.add_service(service) 48 | 49 | ddo.assign_dt(DTHelper.generate_new_dt()) 50 | ddo.create_proof() 51 | 52 | # make sure the generated ddo is under system constraits 53 | if verify and not self.verifier.verify_services(ddo): 54 | raise AssertionError(f'Service agreements are not satisfied') 55 | 56 | return ddo 57 | 58 | def publish_dt(self, ddo, issuer_wallet): 59 | """ 60 | Publish a ddo to the decentralized storage network and register its 61 | data token on the smart-contract chain. 62 | 63 | :param ddo: refers to the asset DDO document 64 | :param issuer_wallet: issuer account, enterprize now 65 | :return 66 | """ 67 | ipfs_client = IPFSProvider(self.config) 68 | ipfs_path = ipfs_client.add(ddo.to_dict()) 69 | 70 | dt = DTHelper.dt_to_id(ddo.dt) 71 | owner = ddo.creator 72 | isLeaf = not bool(ddo.child_dts) 73 | checksum = ddo.proof['checksum'] 74 | 75 | self.dt_factory.mint_dt(dt, owner, isLeaf, checksum, 76 | ipfs_path, issuer_wallet) 77 | 78 | return 79 | 80 | def grant_dt_perm(self, dt, grantee, owner_wallet): 81 | """ 82 | Grant one dt to other dt. 83 | 84 | :param dt: refers to data token identifier 85 | :param grantee: refers to granted dt identifier 86 | :param owner_wallet: owner account 87 | :return 88 | """ 89 | _dt = DTHelper.dt_to_id(dt) 90 | _grantee = DTHelper.dt_to_id(grantee) 91 | 92 | self.dt_factory.grant_dt(_dt, _grantee, owner_wallet) 93 | 94 | return 95 | 96 | def activate_cdt(self, cdt, child_dts, aggregator_wallet): 97 | """ 98 | Activate cdt when all perms are ready. 99 | 100 | :param cdt: refers to cdt identifier 101 | :param child_dts: associated with child_dts identifier 102 | :param aggregator_wallet: aggregator account 103 | :return 104 | """ 105 | _cdt = DTHelper.dt_to_id(cdt) 106 | _child_dts = [DTHelper.dt_to_id(dt) for dt in child_dts] 107 | 108 | self.dt_factory.start_compose_dt(_cdt, _child_dts, aggregator_wallet) 109 | 110 | return 111 | 112 | def check_service_terms(self, cdt, dt, owner_address, signature): 113 | """ 114 | Check service agreements automatically when receiving a remote permission 115 | authorization request, used by Compute-to-Data. 116 | 117 | :param cdt: refers to cdt identifier provided by aggregator 118 | :param dt: refers to dt identifier owned by the provider grid 119 | :param owner_address: asset owner address 120 | :param signature: signed by aggregator, [consume_address, cdt] 121 | :return: bool 122 | """ 123 | if self.verifier.check_dt_perm(dt, cdt): 124 | return True 125 | 126 | if not self.verifier.check_dt_owner(dt, owner_address): 127 | return False 128 | 129 | data, cdt_ddo = resolve_asset(cdt, self.dt_factory) 130 | if not data or not cdt_ddo: 131 | return False 132 | 133 | consume_address = data[1] 134 | original_msg = f'{consume_address}{cdt}' 135 | if not self.verifier.verify_signature(consume_address, signature, original_msg): 136 | return False 137 | 138 | checksum = data[2] 139 | if not self.verifier.verify_ddo_integrity(cdt_ddo, checksum): 140 | return False 141 | 142 | if not self.verifier.verify_services(cdt_ddo, [dt], False): 143 | return False 144 | 145 | return True 146 | 147 | def get_dt_marketplace(self): 148 | """ 149 | Get all available dts in the marketplace. 150 | 151 | :return: list 152 | """ 153 | dt_idx, _, issuers, checksums, _, ipfs_paths, _ = self.dt_factory.get_available_dts() 154 | 155 | issuer_names = self.asset_provider.get_issuer_names(issuers) 156 | 157 | marketplace_list = [] 158 | for dt, issuer_name, ipfs_path, checksum in zip(dt_idx, issuer_names, ipfs_paths, checksums): 159 | ddo = resolve_asset_by_url(ipfs_path) 160 | 161 | if ddo and ddo.metadata['main'].get('type') != "Algorithm": 162 | if self.verifier.verify_ddo_integrity(ddo, checksum): 163 | dt = DTHelper.id_bytes_to_dt(dt) 164 | asset_name = ddo.metadata["main"].get("name") 165 | asset_fig = ddo.metadata['main'].get('fig') 166 | union_or_not = ddo.is_cdt 167 | 168 | marketplace_list.append( 169 | {"dt": dt, "issuer": issuer_name, "name": asset_name, 170 | "fig": asset_fig, "union_or_not": union_or_not}) 171 | 172 | return marketplace_list 173 | 174 | def get_dt_details(self, dt): 175 | """ 176 | Get the detailed information given a datatoken. 177 | 178 | :param dt: refers to dt identifier 179 | :return: tuple 180 | """ 181 | data, ddo = resolve_asset(dt, self.dt_factory) 182 | if not data or not ddo: 183 | return None 184 | 185 | checksum = data[2] 186 | if not self.verifier.verify_ddo_integrity(ddo, checksum): 187 | return None 188 | 189 | owner = data[0] 190 | issuer = data[1] 191 | issuer_name = self.asset_provider.get_enterprise(issuer)[0] 192 | 193 | asset_name = ddo.metadata['main'].get('name') 194 | asset_desc = ddo.metadata['main'].get('desc') 195 | asset_type = ddo.metadata['main'].get('type') 196 | asset_fig = ddo.metadata['main'].get('fig') 197 | 198 | dt_info = {"name": asset_name, "owner": owner, "issuer": issuer_name, 199 | "desc": asset_desc, "type": asset_type, "fig": asset_fig} 200 | 201 | union_data = None 202 | if ddo.is_cdt: 203 | union_paths = self.tracer.trace_data_union(ddo, [ddo.dt]) 204 | tree = self.tracer.tree_format(union_paths) 205 | union_data = self.tracer.tree_to_json(tree) 206 | # self.tracer.print_tree(tree, indent=[], final_node=True) 207 | 208 | service_lists = [] 209 | for service in ddo.services: 210 | sid = service.index 211 | op_name = service.attributes.get('op_name') 212 | price = service.attributes['price'] 213 | constrains = service.descriptor 214 | 215 | service_lists.append( 216 | {"sid": sid, "op": op_name, "price": price, "constrains": constrains}) 217 | 218 | return (dt_info, service_lists, union_data) 219 | -------------------------------------------------------------------------------- /datatoken/service/job.py: -------------------------------------------------------------------------------- 1 | """Job service module.""" 2 | # Copyright 2021 The DataToken Authors 3 | # SPDX-License-Identifier: LGPL-2.1-only 4 | 5 | import logging 6 | 7 | from datatoken.core.dt_helper import DTHelper 8 | from datatoken.store.asset_resolve import resolve_asset, resolve_op 9 | from datatoken.model.keeper import Keeper 10 | from datatoken.service.verifier import VerifierService 11 | 12 | logger = logging.getLogger(__name__) 13 | 14 | 15 | class JobService(): 16 | """The entry point for accessing the job service.""" 17 | 18 | def __init__(self, config): 19 | keeper = Keeper(config.keeper_options) 20 | 21 | self.dt_factory = keeper.dt_factory 22 | self.op_template = keeper.op_template 23 | self.task_market = keeper.task_market 24 | self.verifier = VerifierService(config) 25 | 26 | self.config = config 27 | 28 | def create_task(self, name, desc, demander_wallet): 29 | """ 30 | Add a new task on chain. 31 | 32 | :param name: refers to the task name 33 | :param desc: refers to the task description 34 | :param demander_wallet: demander account 35 | :return: int task_id 36 | """ 37 | task_id = self.task_market.create_task(name, desc, demander_wallet) 38 | return task_id 39 | 40 | def add_job(self, task_id, cdt, solver_wallet): 41 | """ 42 | Create a new job on chain with the algorithm cdt. 43 | 44 | :param cdt: refers to the algorithm composable data token 45 | :param task_id: refers to the task id that be solved 46 | :param from_wallet: solver account 47 | :return: int job_id 48 | """ 49 | _id = DTHelper.dt_to_id(cdt) 50 | job_id = self.task_market.add_job(_id, task_id, solver_wallet) 51 | return job_id 52 | 53 | def check_remote_compute(self, cdt, dt, job_id, owner_address, signature): 54 | """ 55 | Check job status and resource permissions automatically when receiving an 56 | on-premise computation request, used by Compute-to-Data. 57 | 58 | :param cdt: refers to cdt identifier provided by solver 59 | :param dt: refers to dt identifier owned by the provider grid 60 | :param job_id: refers to job identifier in the task market 61 | :param owner_address: asset owner address 62 | :param signature: signed by solver, [solver_address, job_id] 63 | :return: bool 64 | """ 65 | if not self.verifier.verify_job_registered(job_id, cdt): 66 | return False 67 | 68 | if not self.verifier.check_dt_owner(dt, owner_address): 69 | return False 70 | 71 | data, cdt_ddo = resolve_asset(cdt, self.dt_factory) 72 | if not data or not cdt_ddo: 73 | return False 74 | 75 | if not self.verifier.check_asset_type(cdt_ddo, 'Algorithm'): 76 | return False 77 | 78 | solver_address = data[1] 79 | checksum = data[2] 80 | 81 | original_msg = f'{solver_address}{job_id}' 82 | if not self.verifier.verify_signature(solver_address, signature, original_msg): 83 | return False 84 | 85 | if not self.verifier.verify_ddo_integrity(cdt_ddo, checksum): 86 | return False 87 | 88 | if not self.verifier.verify_perms_ready(cdt_ddo, required_dt=dt): 89 | return False 90 | 91 | return True 92 | 93 | def fetch_exec_code(self, cdt, leaf_dt): 94 | """ 95 | Get the code template and its fulfiled arguments, given a father cdt and a 96 | leaf dt. The father ddo specifies which child service/template to use. 97 | 98 | :param cdt: father composable data token 99 | :param leaf_dt: child data token, must be leaf 100 | :return: bool 101 | """ 102 | _, cdt_ddo = resolve_asset(cdt, self.dt_factory) 103 | _, dt_ddo = resolve_asset(leaf_dt, self.dt_factory) 104 | 105 | fulfilled = cdt_ddo.services[0].descriptor['workflow'].get(leaf_dt) 106 | 107 | if not fulfilled: 108 | return None, None 109 | 110 | sid = fulfilled.get('service') 111 | args = fulfilled.get('constraint') 112 | 113 | tid = dt_ddo.get_service_by_index(sid).descriptor['template'] 114 | 115 | _, op = resolve_op(tid, self.op_template) 116 | 117 | return op.operation, args 118 | -------------------------------------------------------------------------------- /datatoken/service/system.py: -------------------------------------------------------------------------------- 1 | """System service module.""" 2 | # Copyright 2021 The DataToken Authors 3 | # SPDX-License-Identifier: LGPL-2.1-only 4 | 5 | import logging 6 | 7 | from datatoken.core.operator import OpTemplate 8 | from datatoken.core.dt_helper import DTHelper 9 | from datatoken.store.ipfs_provider import IPFSProvider 10 | from datatoken.model.keeper import Keeper 11 | from datatoken.model.constants import Role 12 | from datatoken.service.verifier import VerifierService 13 | 14 | logger = logging.getLogger(__name__) 15 | 16 | 17 | class SystemService: 18 | """The entry point for accessing the system service.""" 19 | 20 | def __init__(self, config): 21 | keeper = Keeper(config.keeper_options) 22 | 23 | self.asset_provider = keeper.asset_provider 24 | self.op_template = keeper.op_template 25 | self.verifier = VerifierService(config) 26 | 27 | self.config = config 28 | 29 | def register_enterprise(self, address, name, desc, from_wallet): 30 | """ 31 | Register a new enterprise on-chain. 32 | 33 | :param address: refers to the enterprise address 34 | :param name: refers to the enterprise name 35 | :param desc: refers to the enterprise description 36 | :param from_wallet: the system account 37 | :return 38 | """ 39 | if not self.verifier.check_enterprise(address): 40 | self.asset_provider.register_enterprise( 41 | address, name, desc, from_wallet) 42 | else: 43 | self.asset_provider.update_enterprise( 44 | address, name, desc, from_wallet) 45 | 46 | return 47 | 48 | def add_provider(self, address, from_wallet): 49 | """ 50 | Add a new provider on-chain. 51 | 52 | :param address: refers to the provider address 53 | :param from_wallet: the system account 54 | :return 55 | """ 56 | if not self.verifier.check_provider(address): 57 | self.asset_provider.add_provider(address, from_wallet) 58 | else: 59 | self.asset_provider.update_provider(address, from_wallet) 60 | 61 | return 62 | 63 | def publish_template(self, metadata, operation, params, from_wallet): 64 | """ 65 | Publish the op template on chain. 66 | 67 | :param metadata: refers to the template metadata 68 | :param operation: refers to the code template 69 | :param params: refers to the code parameters 70 | :param from_wallet: the system account 71 | :return 72 | """ 73 | op = OpTemplate() 74 | 75 | op.add_metadata(metadata) 76 | op.add_template(operation, params) 77 | op.add_creator(from_wallet.address) 78 | op.assign_tid(DTHelper.generate_new_dt()) 79 | op.create_proof() 80 | 81 | ipfs_client = IPFSProvider(self.config) 82 | ipfs_path = ipfs_client.add(op.to_dict()) 83 | 84 | tid = DTHelper.dt_to_id(op.tid) 85 | name = metadata['main']['name'] 86 | checksum = op.proof['checksum'] 87 | 88 | if not self.verifier.check_op_exist(op.tid): 89 | self.op_template.publish_template( 90 | tid, name, checksum, ipfs_path, from_wallet) 91 | else: 92 | self.op_template.update_template( 93 | tid, name, checksum, ipfs_path, from_wallet) 94 | 95 | return op 96 | -------------------------------------------------------------------------------- /datatoken/service/tracer.py: -------------------------------------------------------------------------------- 1 | """Tracer service module.""" 2 | # Copyright 2021 The DataToken Authors 3 | # SPDX-License-Identifier: LGPL-2.1-only 4 | 5 | import logging 6 | 7 | from datatoken.core.dt_helper import DTHelper 8 | from datatoken.store.asset_resolve import resolve_asset 9 | from datatoken.model.keeper import Keeper 10 | from datatoken.service.verifier import VerifierService 11 | 12 | logger = logging.getLogger(__name__) 13 | 14 | 15 | class TracerService(object): 16 | """The entry point for accessing the tracer service.""" 17 | TERMINAL = 'Algorithm' 18 | 19 | def __init__(self, config): 20 | keeper = Keeper(config.keeper_options) 21 | 22 | self.asset_provider = keeper.asset_provider 23 | self.op_template = keeper.op_template 24 | self.dt_factory = keeper.dt_factory 25 | self.task_market = keeper.task_market 26 | self.verifier = VerifierService(config) 27 | 28 | self.config = config 29 | 30 | def get_enterprise(self, id): 31 | """Get the enterprise info.""" 32 | return self.asset_provider.get_enterprise(id) 33 | 34 | def get_task(self, task_id): 35 | """Get task info.""" 36 | return self.task_market.get_task(task_id) 37 | 38 | def get_job(self, job_id): 39 | """Get job info.""" 40 | return self.task_market.get_job(job_id) 41 | 42 | def get_dt_owner(self, dt): 43 | """Get the owner for a data token.""" 44 | _dt = DTHelper.dt_to_id_bytes(dt) 45 | return self.dt_factory.get_dt_owner(_dt) 46 | 47 | def get_marketplace_stat(self): 48 | """Get the statistics information.""" 49 | dt_nums = self.dt_factory.get_dt_num() 50 | template_nums = self.op_template.get_template_num() 51 | task_nums = self.task_market.get_task_num() 52 | job_nums = self.task_market.get_job_num() 53 | 54 | stats = (dt_nums, template_nums, task_nums, job_nums) 55 | 56 | return stats 57 | 58 | def trace_owner_assets(self, address): 59 | """Get all assets for a given owner.""" 60 | return self.dt_factory.get_owner_assets(address) 61 | 62 | def trace_dt_grantees(self, dt): 63 | """Get the list of granteed father for a dt.""" 64 | _dt = DTHelper.dt_to_id_bytes(dt) 65 | return self.dt_factory.get_dt_grantees(_dt) 66 | 67 | def trace_cdt_jobs(self, cdt): 68 | """Get the list of previous jobs for a given cdt.""" 69 | return self.task_market.get_cdt_jobs(cdt) 70 | 71 | def trace_data_union(self, ddo, prefix): 72 | """ 73 | Trace the data union structure. 74 | 75 | :param ddo: metadata object. 76 | :param prefix: fixed prefix path, then find its subsequent paths. 77 | :return all_paths: a list of found prefix + subsequent paths 78 | """ 79 | all_paths = [] 80 | 81 | if ddo.is_cdt: 82 | for child_dt in ddo.child_dts: 83 | new_path = prefix.copy() 84 | 85 | _, child_ddo = resolve_asset(child_dt, self.dt_factory) 86 | 87 | asset_name = child_ddo.metadata["main"].get("name") 88 | 89 | if child_ddo.is_cdt: 90 | owner = self.get_dt_owner(child_ddo.dt) 91 | owner_info = self.get_enterprise(owner)[0] 92 | 93 | new_path.append( 94 | {"dt": child_dt, "name": asset_name, "aggregator": owner_info}) 95 | path_lists = self.trace_data_union(child_ddo, new_path) 96 | all_paths.extend(path_lists) 97 | else: 98 | asset_type = child_ddo.metadata['main'].get('type') 99 | new_path.append( 100 | {"dt": child_dt, "name": asset_name, "type": asset_type}) 101 | all_paths.append(new_path) 102 | 103 | return all_paths 104 | 105 | def trace_dt_lifecycle(self, dt, prefix: list): 106 | """ 107 | Trace the whole lifecycle for a dt using dfs recursive search. Only when an 108 | algorithm cdt is submitted for solving tasks, the terminal state is reached. 109 | 110 | :param dt: data token identifier. 111 | :param prefix: fixed prefix path, then find its subsequent paths. 112 | :return all_paths: a list of found prefix + subsequent paths 113 | """ 114 | 115 | prefix = prefix.copy() 116 | if len(prefix): 117 | owner = self.get_dt_owner(dt) 118 | owner_info = self.get_enterprise(owner)[0] 119 | prefix.append({"dt": DTHelper.id_bytes_to_dt( 120 | dt), "aggregator": owner_info, "aggrement": 0}) 121 | else: 122 | prefix.append({"dt": dt}) 123 | dt = DTHelper.dt_to_id_bytes(dt) 124 | 125 | _, ddo = resolve_asset(dt, self.dt_factory) 126 | 127 | all_paths = [] 128 | 129 | if self.verifier.check_asset_type(ddo, self.TERMINAL): 130 | jobs = self.trace_cdt_jobs(dt) 131 | 132 | if len(jobs): 133 | for job in jobs: 134 | job_id, solver, task_id, demander, task_name, task_desc = job 135 | demander_info = self.get_enterprise(demander)[0] 136 | solver_info = self.get_enterprise(solver)[0] 137 | 138 | text = {"task_name": task_name, "task_desc": task_desc, "solver": solver_info, 139 | "demander": demander_info, "task_id": task_id, "job_id": job_id} 140 | 141 | new_path = prefix.copy() 142 | new_path.append(text) 143 | all_paths.append(new_path) 144 | 145 | return all_paths 146 | 147 | grantees = self.trace_dt_grantees(dt) 148 | 149 | for cdt in grantees: 150 | 151 | path_lists = self.trace_dt_lifecycle(cdt, prefix) 152 | all_paths.extend(path_lists) 153 | 154 | return all_paths 155 | 156 | def job_list_format(self, paths): 157 | """ 158 | Convert paths to a formated job list table. 159 | 160 | :param paths: a list of dt->...->dt-> [job, ..., job] authorization chains, with the same root dt 161 | :return: list 162 | """ 163 | if len(paths) == 0: 164 | print('Do not find any data linking path') 165 | return None 166 | 167 | job_list = [] 168 | root = paths[0][0] 169 | 170 | for path in paths: 171 | if path[0] != root: 172 | raise AssertionError(f'A tree can only contain one root') 173 | 174 | job_list.append(path[-1]) 175 | 176 | return job_list 177 | 178 | def tree_format(self, paths): 179 | """ 180 | Convert paths to a formated hierarchical tree using Node class. 181 | 182 | :param paths: a list of dt->...->dt->... authorization chains, with the same root dt 183 | :return: root Node instance 184 | """ 185 | if len(paths) == 0: 186 | print('Do not find any data linking path') 187 | return None 188 | 189 | root = paths[0][0] 190 | 191 | for path in paths: 192 | if path[0] != root: 193 | raise AssertionError(f'A tree can only contain one root') 194 | 195 | root_node = Node(text=root, level=0) 196 | for path in paths: 197 | tmp_node = root_node 198 | level = 1 199 | index = 0 200 | 201 | for path_value in path[1:]: 202 | child_node = tmp_node.get_child(text=path_value) 203 | if not child_node: 204 | child_node = Node(text=path_value, level=level) 205 | tmp_node.add_child(child_node) 206 | 207 | tmp_node = child_node 208 | level += 1 209 | index += 1 210 | 211 | return root_node 212 | 213 | def tree_to_json(self, node): 214 | 215 | data = {"values": node.text} 216 | 217 | if len(node.child_nodes): 218 | data["children"] = [] 219 | 220 | for n in node.child_nodes: 221 | data["children"].append(self.tree_to_json(n)) 222 | 223 | return data 224 | 225 | def print_tree(self, node, indent: list, final_node=True): 226 | """Recursively output the node text and its child node.""" 227 | for i in range(node.level): 228 | print(indent[i], end='') 229 | 230 | if final_node: 231 | print('└──', end='') 232 | else: 233 | print('├──', end='') 234 | 235 | print(node.text) 236 | 237 | if node.empty(): 238 | return 239 | else: 240 | cnt = len(node.child_nodes) 241 | for i, n in enumerate(node.child_nodes): 242 | c = ' ' if final_node else '│ ' 243 | indent.append(c) 244 | last_node = i == cnt - 1 245 | self.print_tree(n, indent, last_node) 246 | del indent[-1] 247 | 248 | 249 | ################### 250 | class Node: 251 | """The Node class used for linking child and father dts.""" 252 | 253 | def __init__(self, text, level): 254 | self._text = text # dt in this level 255 | self._level = level # current tree depth 256 | self._child_nodes = [] # its granted father dt 257 | 258 | @ property 259 | def text(self): 260 | return self._text 261 | 262 | @ property 263 | def level(self): 264 | return self._level 265 | 266 | @ property 267 | def child_nodes(self): 268 | return self._child_nodes 269 | 270 | def add_child(self, node): 271 | self._child_nodes.append(node) 272 | 273 | def get_child(self, text): 274 | for node in self._child_nodes: 275 | if node.text == text: 276 | return node 277 | return None 278 | 279 | def empty(self): 280 | return len(self._child_nodes) == 0 281 | 282 | def __str__(self): 283 | return self.text 284 | -------------------------------------------------------------------------------- /datatoken/service/verifier.py: -------------------------------------------------------------------------------- 1 | """Verifier service module.""" 2 | # Copyright 2021 The DataToken Authors 3 | # SPDX-License-Identifier: LGPL-2.1-only 4 | 5 | import logging 6 | 7 | from eth_utils import remove_0x_prefix 8 | from datatoken.core.dt_helper import DTHelper 9 | from datatoken.core.utils import convert_to_string 10 | from datatoken.store.asset_resolve import resolve_asset, resolve_op 11 | from datatoken.csp.agreement import validate_leaf_template, validate_service_agreement 12 | from datatoken.model.keeper import Keeper 13 | from datatoken.model.constants import Role 14 | from datatoken.web3.utils import personal_ec_recover 15 | 16 | logger = logging.getLogger(__name__) 17 | 18 | 19 | class VerifierService(object): 20 | """The entry point for accessing the verifier service.""" 21 | 22 | def __init__(self, config): 23 | keeper = Keeper(config.keeper_options) 24 | 25 | self.role_controller = keeper.role_controller 26 | self.asset_provider = keeper.asset_provider 27 | self.op_template = keeper.op_template 28 | self.dt_factory = keeper.dt_factory 29 | self.task_market = keeper.task_market 30 | 31 | self.config = config 32 | 33 | def check_admin(self, address): 34 | """Check Admin role for a given address.""" 35 | return self.role_controller.check_role(address, Role.ROLE_ADMIN) 36 | 37 | def check_enterprise(self, id): 38 | """Check Enterprize role for a given address.""" 39 | return self.asset_provider.check_enterprise(id) 40 | 41 | def check_provider(self, id): 42 | """Check Provider role for a given address.""" 43 | return self.asset_provider.check_provider(id) 44 | 45 | def check_op_exist(self, tid): 46 | """Check template existence.""" 47 | _tid = DTHelper.dt_to_id(tid) 48 | return self.op_template.is_template_exist(_tid) 49 | 50 | def check_dt_owner(self, dt, owner_address): 51 | """Check dt owner.""" 52 | _dt = DTHelper.dt_to_id(dt) 53 | return self.dt_factory.get_dt_owner(_dt) == owner_address 54 | 55 | def check_dt_available(self, dt): 56 | """Check dt availability.""" 57 | _dt = DTHelper.dt_to_id(dt) 58 | return self.dt_factory.check_dt_available(_dt) 59 | 60 | def check_cdt_composed(self, cdt): 61 | """Check cdt composability.""" 62 | _cdt = DTHelper.dt_to_id(cdt) 63 | return self.dt_factory.check_cdt_available(_cdt) 64 | 65 | def check_dt_perm(self, dt, grantee): 66 | """Check granted permission.""" 67 | _dt = DTHelper.dt_to_id(dt) 68 | _grantee = DTHelper.dt_to_id(grantee) 69 | return self.dt_factory.check_dt_perm(_dt, _grantee) 70 | 71 | def check_asset_type(self, ddo, asset_type): 72 | """Check asset type for a given ddo.""" 73 | return ddo.asset_type == asset_type 74 | 75 | def verify_signature(self, signer_address, signature, original_msg): 76 | """Check the given address has signed on the given data""" 77 | address = personal_ec_recover(original_msg, signature) 78 | return address.lower() == signer_address.lower() 79 | 80 | def verify_ddo_integrity(self, ddo, checksum_evidence): 81 | """Check the equallty of the ddo checksum and its on-chain evidence.""" 82 | checksum_evidence = remove_0x_prefix( 83 | convert_to_string(checksum_evidence)) 84 | return ddo.proof['checksum'] == checksum_evidence 85 | 86 | def verify_services(self, ddo, wrt_dts=None, integrity_check=True): 87 | """ 88 | Ensure the service constraints are fulfilled. For a given leaf ddo, we check 89 | the parameter consistency of its constraints and used templates. For a given 90 | composable ddo, we first check the availability of its childs, and then check 91 | the fulfilled constraints for each workflow service. 92 | 93 | :param ddo: a candidate DDO object, composable or leaf 94 | :param wrt_dts: a list of dts to be fulfilled, all child dts if None 95 | :param integrity_check: verify child ddo integrity if True 96 | :return: bool 97 | """ 98 | if not ddo.is_cdt: 99 | return validate_leaf_template(ddo, self.op_template) 100 | 101 | if not wrt_dts: 102 | wrt_dts = ddo.child_dts 103 | 104 | for dt in wrt_dts: 105 | data, child_ddo = resolve_asset(dt, self.dt_factory) 106 | if not data or not child_ddo: 107 | return False 108 | 109 | if integrity_check and not self.verify_ddo_integrity(child_ddo, data[2]): 110 | return False 111 | 112 | if not child_ddo.is_cdt: 113 | if not validate_leaf_template(child_ddo, self.op_template): 114 | return False 115 | else: 116 | if not self.check_cdt_composed(child_ddo.dt) or ( 117 | not self.verify_perms_ready(child_ddo)): 118 | return False 119 | 120 | if not validate_service_agreement(ddo, child_ddo): 121 | return False 122 | 123 | return True 124 | 125 | def verify_job_registered(self, job_id, cdt): 126 | """Ensure the cdt is submitted to the market with a given job id.""" 127 | job = self.task_market.get_job(job_id) 128 | if not (job and job[2]): 129 | return False 130 | 131 | return DTHelper.dt_to_id_bytes(cdt) == job[2] 132 | 133 | def verify_perms_ready(self, cdt_ddo, required_dt=None): 134 | """ 135 | Ensure the given cdt has got all child permissions. 136 | 137 | :param cdt_ddo: DDO object for a cdt, previously activated on chain 138 | :param required_dt: dt identifier required to be the cdt child 139 | :return: bool 140 | """ 141 | child_dts = [] 142 | found = False 143 | 144 | for dt in cdt_ddo.child_dts: 145 | child_dts.append(DTHelper.dt_to_id(dt)) 146 | if dt == required_dt: 147 | found = True 148 | 149 | if required_dt and found == False: 150 | return False 151 | 152 | _cdt = DTHelper.dt_to_id(cdt_ddo.dt) 153 | 154 | return self.dt_factory.check_clinks(_cdt, child_dts) 155 | -------------------------------------------------------------------------------- /datatoken/store/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MVP-Labs/data-token/fae7adc8945418590764f284ccd70f9dbb4a4382/datatoken/store/__init__.py -------------------------------------------------------------------------------- /datatoken/store/asset_resolve.py: -------------------------------------------------------------------------------- 1 | """Asset resolve Lib.""" 2 | # Copyright 2021 The DataToken Authors 3 | # SPDX-License-Identifier: LGPL-2.1-only 4 | 5 | from datatoken.core.ddo import DDO 6 | from datatoken.core.dt_helper import DTHelper 7 | from datatoken.core.operator import OpTemplate 8 | from datatoken.store.ipfs_provider import IPFSProvider 9 | 10 | 11 | def resolve_asset(dt, keeper_dt_factory): 12 | """ 13 | Resolve an asset dt to its corresponding DDO. 14 | 15 | :param dt: the asset dt to resolve, e.g., dt:ownership:<32 byte value> 16 | :param keeper_dt_factory: keeper instance of the dt-factory smart contract 17 | 18 | :return data: dt info on the chain 19 | :return ddo: DDO of the resolved asset dt 20 | """ 21 | dt_bytes = DTHelper.dt_to_id_bytes(dt) 22 | data = keeper_dt_factory.get_dt_register(dt_bytes) 23 | if not (data and data[4]): 24 | return None, None 25 | 26 | metadata_url = data[4] 27 | ipfs_client = IPFSProvider() 28 | ddo_json = ipfs_client.get(metadata_url) 29 | if not ddo_json: 30 | return data, None 31 | 32 | ddo = DDO() 33 | ddo.from_dict(ddo_json) 34 | 35 | return data, ddo 36 | 37 | 38 | def resolve_asset_by_url(metadata_url): 39 | if not metadata_url.startswith('Qm'): 40 | return None 41 | 42 | ipfs_client = IPFSProvider() 43 | ddo_json = ipfs_client.get(metadata_url) 44 | if not ddo_json: 45 | return None 46 | 47 | ddo = DDO() 48 | ddo.from_dict(ddo_json) 49 | 50 | return ddo 51 | 52 | 53 | def resolve_op(tid, keeper_op_template): 54 | """ 55 | Resolve a tid to its corresponding OpTemplate. 56 | 57 | :param tid: the op tid to resolve, e.g., dt:ownership:<32 byte value> 58 | :param keeper_op_template: keeper instance of the op-template smart contract 59 | 60 | :return data: tid info on the chain 61 | :return op: OpTemplate of the resolved tid 62 | """ 63 | tid_bytes = DTHelper.dt_to_id_bytes(tid) 64 | 65 | data = keeper_op_template.get_template(tid_bytes) 66 | if not (data and data[3]): 67 | return None, None 68 | 69 | metadata_url = data[3] 70 | ipfs_client = IPFSProvider() 71 | op_json = ipfs_client.get(metadata_url) 72 | if not op_json: 73 | return data, None 74 | 75 | op = OpTemplate() 76 | op.from_dict(op_json) 77 | 78 | return data, op 79 | -------------------------------------------------------------------------------- /datatoken/store/ipfs_provider.py: -------------------------------------------------------------------------------- 1 | """IPFS provider Lib.""" 2 | # Copyright 2021 The DataToken Authors 3 | # SPDX-License-Identifier: LGPL-2.1-only 4 | 5 | import ipfshttpclient 6 | 7 | class IPFSProvider: 8 | """Asset storage provider.""" 9 | 10 | def __init__(self, config=None): 11 | """Initialize the ipfs provider.""" 12 | if config: 13 | self.ipfs_client = ipfshttpclient.connect(config.ipfs_endpoint) 14 | else: 15 | self.ipfs_client = ipfshttpclient.connect() 16 | 17 | def add(self, json): 18 | """ 19 | Add asset values to the storage. 20 | 21 | :param json: dict value 22 | :return hash: ipfs cid 23 | """ 24 | hash = self.ipfs_client.add_json(json) 25 | return hash 26 | 27 | def get(self, hash): 28 | """ 29 | Get asset values for a given cid. 30 | 31 | :param hash: ipfs cid 32 | :return: dict 33 | """ 34 | return self.ipfs_client.get_json(hash) 35 | 36 | def close(self): 37 | """Disable the provider""" 38 | self.ipfs_client.close() 39 | -------------------------------------------------------------------------------- /datatoken/web3/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MVP-Labs/data-token/fae7adc8945418590764f284ccd70f9dbb4a4382/datatoken/web3/__init__.py -------------------------------------------------------------------------------- /datatoken/web3/account.py: -------------------------------------------------------------------------------- 1 | """Accounts module.""" 2 | # Modified from Ocean.py library. 3 | # Copyright 2021 Ocean Protocol Foundation 4 | # SPDX-License-Identifier: Apache-2.0 5 | 6 | import logging 7 | import os 8 | 9 | from datatoken.web3.utils import private_key_to_address 10 | 11 | logger = logging.getLogger("account") 12 | 13 | 14 | class Account: 15 | 16 | """Class representing an account.""" 17 | 18 | def __init__( 19 | self, 20 | address=None, 21 | password=None, 22 | key_file=None, 23 | encrypted_key=None, 24 | private_key=None, 25 | ): 26 | """Hold account address, password and either keyfile path, encrypted key or private key. 27 | :param address: The address of this account 28 | :param password: account's password. This is necessary for decrypting the private key 29 | to be able to sign transactions locally 30 | :param key_file: str path to the encrypted private key file 31 | :param encrypted_key: 32 | :param private_key: 33 | """ 34 | assert ( 35 | key_file or encrypted_key or private_key 36 | ), "Account requires one of `key_file`, `encrypted_key`, or `private_key`." 37 | if key_file or encrypted_key: 38 | assert ( 39 | password 40 | ), "`password` is required when using `key_file` or `encrypted_key`." 41 | 42 | if private_key: 43 | password = None 44 | 45 | self.address = address 46 | self.password = password 47 | self._key_file = key_file 48 | if self._key_file and not encrypted_key: 49 | with open(self.key_file) as _file: 50 | encrypted_key = _file.read() 51 | self._encrypted_key = encrypted_key 52 | self._private_key = private_key 53 | 54 | if self.address is None and self._private_key is not None: 55 | self.address = private_key_to_address(private_key) 56 | 57 | assert self.address is not None 58 | 59 | @property 60 | def key_file(self): 61 | """Holds the key file path""" 62 | return ( 63 | os.path.expandvars(os.path.expanduser(self._key_file)) 64 | if self._key_file 65 | else None 66 | ) 67 | 68 | @property 69 | def private_key(self): 70 | """Holds the private key""" 71 | return self._private_key 72 | 73 | @property 74 | def key(self): 75 | """Returns the private key (if defined) or the encrypted key.""" 76 | if self._private_key: 77 | return self._private_key 78 | 79 | return self._encrypted_key -------------------------------------------------------------------------------- /datatoken/web3/constants.py: -------------------------------------------------------------------------------- 1 | 2 | """ 3 | This module holds following default values for Gas price, Gas limit and more. 4 | """ 5 | # Copyright 2021 Ocean Protocol Foundation 6 | # SPDX-License-Identifier: Apache-2.0 7 | # 8 | 9 | ENV_GAS_PRICE = "GAS_PRICE" 10 | ENV_MAX_GAS_PRICE = "MAX_GAS_PRICE" 11 | 12 | GAS_LIMIT_DEFAULT = 1000000 13 | MIN_GAS_PRICE = 1000000000 14 | 15 | ZERO_ADDRESS = "0x0000000000000000000000000000000000000000" 16 | 17 | DEFAULT_NETWORK_NAME = "ganache" 18 | NETWORK_NAME_MAP = { 19 | 1: "Mainnet", 20 | 2: "Morden", 21 | 3: "Ropsten", 22 | 4: "Rinkeby", 23 | 42: "Kovan", 24 | 100: "xDai", 25 | 137: "Polygon", 26 | } -------------------------------------------------------------------------------- /datatoken/web3/contract_base.py: -------------------------------------------------------------------------------- 1 | """All contracts inherit from `ContractBase` class.""" 2 | # Modified from Ocean.py library. 3 | # Copyright 2021 Ocean Protocol Foundation 4 | # SPDX-License-Identifier: Apache-2.0 5 | 6 | import logging 7 | import os 8 | from typing import Any, Dict, List, Optional 9 | 10 | import requests 11 | from enforce_typing import enforce_types 12 | from eth_typing import BlockIdentifier 13 | from hexbytes import HexBytes 14 | from web3 import Web3 15 | from web3.exceptions import MismatchedABI, ValidationError 16 | from web3._utils.events import get_event_data 17 | from web3._utils.filters import construct_event_filter_params 18 | from web3._utils.threads import Timeout 19 | from websockets import ConnectionClosed 20 | 21 | from datatoken.web3.constants import ENV_GAS_PRICE 22 | from datatoken.web3.contract_handler import ContractHandler 23 | from datatoken.web3.wallet import Wallet 24 | from datatoken.web3.web3_provider import Web3Provider 25 | from datatoken.web3.web3_overrides.contract import CustomContractFunction 26 | 27 | logger = logging.getLogger(__name__) 28 | 29 | 30 | @enforce_types 31 | class ContractBase(object): 32 | 33 | """Base class for all contract objects.""" 34 | 35 | CONTRACT_NAME = None 36 | 37 | def __init__(self, address: Optional[str], abi_path=None): 38 | """Initialises Contract Base object. 39 | The contract name attribute and `abi_path` are required. 40 | """ 41 | self.name = self.contract_name 42 | assert ( 43 | self.name 44 | ), "contract_name property needs to be implemented in subclasses." 45 | if not abi_path: 46 | abi_path = ContractHandler.artifacts_path 47 | 48 | assert abi_path, f"abi_path is required, got {abi_path}" 49 | 50 | self.contract_concise = ContractHandler.get_concise_contract(self.name, address) 51 | self.contract = ContractHandler.get(self.name, address) 52 | 53 | assert not address or ( 54 | self.contract.address == address and self.address == address 55 | ) 56 | assert self.contract_concise is not None 57 | 58 | def __str__(self): 59 | """Returns contract `name @ address.`""" 60 | return f"{self.contract_name} @ {self.address}" 61 | 62 | @classmethod 63 | def configured_address(cls, network, address_file): 64 | """Returns the contract addresses""" 65 | addresses = ContractHandler.get_contracts_addresses(network, address_file) 66 | return addresses.get(cls.CONTRACT_NAME) if addresses else None 67 | 68 | @property 69 | def contract_name(self) -> str: 70 | """Returns the contract name""" 71 | return self.CONTRACT_NAME 72 | 73 | @property 74 | def address(self) -> str: 75 | """Return the ethereum address of the solidity contract deployed in current network.""" 76 | return self.contract.address 77 | 78 | @property 79 | def events(self): 80 | """Expose the underlying contract's events.""" 81 | return self.contract.events 82 | 83 | @property 84 | def function_names(self) -> List[str]: 85 | """Returns the list of functions in the contract""" 86 | return list(self.contract.functions) 87 | 88 | @staticmethod 89 | def to_checksum_address(address: str): 90 | """ 91 | Validate the address provided. 92 | :param address: Address, hex str 93 | :return: address, hex str 94 | """ 95 | return Web3.toChecksumAddress(address) 96 | 97 | @staticmethod 98 | def get_tx_receipt(tx_hash: str, timeout=20): 99 | """ 100 | Get the receipt of a tx. 101 | :param tx_hash: hash of the transaction 102 | :param timeout: int in seconds to wait for transaction receipt 103 | :return: Tx receipt 104 | """ 105 | try: 106 | Web3Provider.get_web3().eth.wait_for_transaction_receipt( 107 | HexBytes(tx_hash), timeout=timeout 108 | ) 109 | except ValueError as e: 110 | logger.error(f"Waiting for transaction receipt failed: {e}") 111 | return None 112 | except Timeout as e: 113 | logger.info(f"Waiting for transaction receipt may have timed out: {e}.") 114 | return None 115 | except ConnectionClosed as e: 116 | logger.info( 117 | f"ConnectionClosed error waiting for transaction receipt failed: {e}." 118 | ) 119 | raise 120 | except Exception as e: 121 | logger.info(f"Unknown error waiting for transaction receipt: {e}.") 122 | raise 123 | 124 | return Web3Provider.get_web3().eth.get_transaction_receipt(tx_hash) 125 | 126 | def is_tx_successful(self, tx_hash: str) -> bool: 127 | """Check if the transaction is successful. 128 | :param tx_hash: hash of the transaction 129 | :return: bool 130 | """ 131 | receipt = self.get_tx_receipt(tx_hash) 132 | return bool(receipt and receipt.status == 1) 133 | 134 | def get_event_signature(self, event_name): 135 | """ 136 | Return signature of event definition to use in the call to eth_getLogs. 137 | The event signature is used as topic0 (first topic) in the eth_getLogs arguments 138 | The signature reflects the event name and argument types. 139 | :param event_name: 140 | :return: 141 | """ 142 | try: 143 | e = getattr(self.events, event_name) 144 | except MismatchedABI: 145 | e = None 146 | 147 | if not e: 148 | raise ValueError( 149 | f"Event {event_name} not found in {self.CONTRACT_NAME} contract." 150 | ) 151 | 152 | abi = e().abi 153 | types = [param["type"] for param in abi["inputs"]] 154 | sig_str = f'{event_name}({",".join(types)})' 155 | return Web3.keccak(text=sig_str).hex() 156 | 157 | def subscribe_to_event( 158 | self, 159 | event_name: str, 160 | timeout, 161 | event_filter, 162 | callback=None, 163 | timeout_callback=None, 164 | args=None, 165 | wait=False, 166 | from_block="latest", 167 | to_block="latest", 168 | ): 169 | """ 170 | Create a listener for the event `event_name` on this contract. 171 | :param event_name: name of the event to subscribe, str 172 | :param timeout: 173 | :param event_filter: 174 | :param callback: 175 | :param timeout_callback: 176 | :param args: 177 | :param wait: if true block the listener until get the event, bool 178 | :param from_block: int or None 179 | :param to_block: int or None 180 | :return: event if blocking is True and an event is received, otherwise returns None 181 | """ 182 | from datatoken.web3.event_listener import EventListener 183 | 184 | return EventListener( 185 | self.CONTRACT_NAME, 186 | event_name, 187 | args, 188 | filters=event_filter, 189 | from_block=from_block, 190 | to_block=to_block, 191 | ).listen_once( 192 | callback, timeout_callback=timeout_callback, timeout=timeout, blocking=wait 193 | ) 194 | 195 | def send_transaction( 196 | self, fn_name: str, fn_args, from_wallet: Wallet, transact: dict = None 197 | ) -> str: 198 | """Calls a smart contract function. 199 | Uses either `personal_sendTransaction` (if passphrase is available) or `ether_sendTransaction`. 200 | :param fn_name: str the smart contract function name 201 | :param fn_args: tuple arguments to pass to function above 202 | :param from_wallet: 203 | :param transact: dict arguments for the transaction such as from, gas, etc. 204 | :return: hex str transaction hash 205 | """ 206 | contract_fn = getattr(self.contract.functions, fn_name)(*fn_args) 207 | contract_function = CustomContractFunction(contract_fn) 208 | _transact = { 209 | "from": from_wallet.address, 210 | "passphrase": from_wallet.password, 211 | "account_key": from_wallet.key, 212 | # 'gas': GAS_LIMIT_DEFAULT 213 | } 214 | 215 | gas_price = os.environ.get(ENV_GAS_PRICE, None) 216 | if gas_price: 217 | _transact["gasPrice"] = gas_price 218 | 219 | if transact: 220 | _transact.update(transact) 221 | 222 | return contract_function.transact(_transact).hex() 223 | 224 | def get_event_argument_names(self, event_name: str): 225 | """Finds the event arguments by `event_name`. 226 | :param event_name: str Name of the event to search in the `contract`. 227 | :return: `event.argument_names` if event is found or None 228 | """ 229 | event = getattr(self.contract.events, event_name, None) 230 | if event: 231 | return event().argument_names 232 | 233 | @classmethod 234 | def deploy(cls, web3, deployer_wallet: Wallet, abi_path: str = "", *args): 235 | """ 236 | Deploy the DataTokenTemplate and DTFactory contracts to the current network. 237 | :param web3: 238 | :param abi_path: 239 | :param deployer_wallet: Wallet instance 240 | :return: smartcontract address of this contract 241 | """ 242 | if not abi_path: 243 | abi_path = ContractHandler.artifacts_path 244 | 245 | assert abi_path, f"abi_path is required, got {abi_path}" 246 | 247 | w3 = web3 248 | _json = ContractHandler.read_abi_from_file(cls.CONTRACT_NAME, abi_path) 249 | 250 | _contract = w3.eth.contract(abi=_json["abi"], bytecode=_json["bytecode"]) 251 | built_tx = _contract.constructor(*args).buildTransaction( 252 | {"from": deployer_wallet.address} 253 | ) 254 | 255 | if "gas" not in built_tx: 256 | built_tx["gas"] = web3.eth.estimate_gas(built_tx) 257 | 258 | raw_tx = deployer_wallet.sign_tx(built_tx) 259 | logging.debug( 260 | f"Sending raw tx to deploy contract {cls.CONTRACT_NAME}, signed tx hash: {raw_tx.hex()}" 261 | ) 262 | tx_hash = web3.eth.send_raw_transaction(raw_tx) 263 | 264 | return cls.get_tx_receipt(tx_hash, timeout=60).contractAddress 265 | 266 | def get_event_logs( 267 | self, event_name, from_block, to_block, filters, web3=None, chunk_size=1000 268 | ): 269 | """ 270 | Fetches the list of event logs between the given block numbers. 271 | :param event_name: str 272 | :param from_block: int 273 | :param to_block: int 274 | :param filters: 275 | :param web3: Wallet instance 276 | :param chunk_size: int 277 | :return: List of event logs. List will have the structure as below. 278 | ```Python 279 | [AttributeDict({ 280 | 'args': AttributeDict({}), 281 | 'event': 'LogNoArguments', 282 | 'logIndex': 0, 283 | 'transactionIndex': 0, 284 | 'transactionHash': HexBytes('...'), 285 | 'address': '0xF2E246BB76DF876Cef8b38ae84130F4F55De395b', 286 | 'blockHash': HexBytes('...'), 287 | 'blockNumber': 3 288 | }), 289 | AttributeDict(...), 290 | ... 291 | ] 292 | ``` 293 | """ 294 | event = getattr(self.events, event_name) 295 | if not web3: 296 | web3 = Web3Provider.get_web3() 297 | 298 | chunk = chunk_size 299 | _from = from_block 300 | _to = _from + chunk - 1 301 | 302 | all_logs = [] 303 | error_count = 0 304 | _to = min(_to, to_block) 305 | while _from <= to_block: 306 | try: 307 | logs = self.getLogs( 308 | event, web3, argument_filters=filters, fromBlock=_from, toBlock=_to 309 | ) 310 | all_logs.extend(logs) 311 | _from = _to + 1 312 | _to = min(_from + chunk - 1, to_block) 313 | error_count = 0 314 | if (_from - from_block) % 1000 == 0: 315 | print( 316 | f" So far processed {len(all_logs)} Transfer events from {_from-from_block} blocks." 317 | ) 318 | except requests.exceptions.ReadTimeout as err: 319 | print(f"ReadTimeout ({_from}, {_to}): {err}") 320 | error_count += 1 321 | 322 | if error_count > 1: 323 | break 324 | 325 | return all_logs 326 | 327 | def getLogs( 328 | self, 329 | event, 330 | web3, 331 | argument_filters: Optional[Dict[str, Any]] = None, 332 | fromBlock: Optional[BlockIdentifier] = None, 333 | toBlock: Optional[BlockIdentifier] = None, 334 | blockHash: Optional[HexBytes] = None, 335 | ): 336 | """Get events for this contract instance using eth_getLogs API. 337 | This is a stateless method, as opposed to createFilter. 338 | It can be safely called against nodes which do not provide 339 | eth_newFilter API, like Infura nodes. 340 | If there are many events, 341 | like ``Transfer`` events for a popular token, 342 | the Ethereum node might be overloaded and timeout 343 | on the underlying JSON-RPC call. 344 | Example - how to get all ERC-20 token transactions 345 | for the latest 10 blocks: 346 | ```python 347 | from = max(mycontract.web3.eth.block_number - 10, 1) 348 | to = mycontract.web3.eth.block_number 349 | events = mycontract.events.Transfer.getLogs(fromBlock=from, toBlock=to) 350 | for e in events: 351 | print(e["args"]["from"], 352 | e["args"]["to"], 353 | e["args"]["value"]) 354 | ``` 355 | The returned processed log values will look like: 356 | ```python 357 | ( 358 | AttributeDict({ 359 | 'args': AttributeDict({}), 360 | 'event': 'LogNoArguments', 361 | 'logIndex': 0, 362 | 'transactionIndex': 0, 363 | 'transactionHash': HexBytes('...'), 364 | 'address': '0xF2E246BB76DF876Cef8b38ae84130F4F55De395b', 365 | 'blockHash': HexBytes('...'), 366 | 'blockNumber': 3 367 | }), 368 | AttributeDict(...), 369 | ... 370 | ) 371 | ``` 372 | See also: :func:`web3.middleware.filter.local_filter_middleware`. 373 | :param argument_filters: 374 | :param fromBlock: block number or "latest", defaults to "latest" 375 | :param toBlock: block number or "latest". Defaults to "latest" 376 | :param blockHash: block hash. blockHash cannot be set at the 377 | same time as fromBlock or toBlock 378 | :yield: Tuple of :class:`AttributeDict` instances 379 | """ 380 | if not self.address: 381 | raise TypeError( 382 | "This method can be only called on " 383 | "an instated contract with an address" 384 | ) 385 | 386 | abi = event._get_event_abi() 387 | 388 | if argument_filters is None: 389 | argument_filters = dict() 390 | 391 | _filters = dict(**argument_filters) 392 | 393 | blkhash_set = blockHash is not None 394 | blknum_set = fromBlock is not None or toBlock is not None 395 | if blkhash_set and blknum_set: 396 | raise ValidationError( 397 | "blockHash cannot be set at the same" " time as fromBlock or toBlock" 398 | ) 399 | 400 | # Construct JSON-RPC raw filter presentation based on human readable Python descriptions 401 | # Namely, convert event names to their keccak signatures 402 | _, event_filter_params = construct_event_filter_params( 403 | abi, 404 | web3.codec, 405 | contract_address=self.address, 406 | argument_filters=_filters, 407 | fromBlock=fromBlock, 408 | toBlock=toBlock, 409 | ) 410 | 411 | if blockHash is not None: 412 | event_filter_params["blockHash"] = blockHash 413 | 414 | # Call JSON-RPC API 415 | logs = web3.eth.get_logs(event_filter_params) 416 | 417 | # Convert raw binary data to Python proxy objects as described by ABI 418 | return tuple(get_event_data(web3.codec, abi, entry) for entry in logs) -------------------------------------------------------------------------------- /datatoken/web3/contract_handler.py: -------------------------------------------------------------------------------- 1 | # Modified from Ocean.py library. 2 | # Copyright 2021 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | 5 | import json 6 | import logging 7 | import os 8 | 9 | from datatoken.web3.web3_provider import Web3Provider 10 | from web3 import Web3 11 | from web3.contract import ConciseContract 12 | 13 | logger = logging.getLogger(__name__) 14 | 15 | 16 | class ContractHandler(object): 17 | """ 18 | Manages loading contracts and also keeps a cache of loaded contracts. 19 | """ 20 | 21 | _contracts = dict() 22 | 23 | artifacts_path = None 24 | network_alias = {"ganache": "development"} 25 | 26 | @staticmethod 27 | def get_contracts_addresses(network, address_file): 28 | if not address_file or not os.path.exists(address_file): 29 | return None 30 | with open(address_file) as f: 31 | addresses = json.load(f) 32 | 33 | network_addresses = addresses.get(network, None) 34 | if network_addresses is None and network in ContractHandler.network_alias: 35 | network_addresses = addresses.get( 36 | ContractHandler.network_alias[network], None 37 | ) 38 | 39 | return network_addresses 40 | 41 | @staticmethod 42 | def set_artifacts_path(artifacts_path): 43 | if artifacts_path and artifacts_path != ContractHandler.artifacts_path: 44 | ContractHandler.artifacts_path = artifacts_path 45 | ContractHandler._contracts.clear() 46 | 47 | @staticmethod 48 | def _get(name, address=None): 49 | """ 50 | Return the contract & its concise version, for a given name. 51 | :param name: Contract name, str 52 | :param address: hex str -- address of contract 53 | :return: tuple of (contract, concise_contract) 54 | """ 55 | key = (name, address) if address else (name) 56 | result = ContractHandler._contracts.get(key) 57 | if result is None: 58 | ContractHandler._load(name, address) 59 | result = ContractHandler._contracts.get(key) 60 | assert result is not None 61 | 62 | ContractHandler._verifyContractsConsistency(name) 63 | return result 64 | 65 | @staticmethod 66 | def get(name, address=None): 67 | """ 68 | Return the Contract instance for a given name. 69 | :param name: Contract name, str 70 | :param address: hex str -- address of smart contract 71 | :return: Contract instance 72 | """ 73 | return ContractHandler._get(name, address)[0] 74 | 75 | @staticmethod 76 | def get_concise_contract(name, address=None): 77 | """ 78 | Return the Concise Contract instance for a given name. 79 | :param name: str -- Contract name 80 | :param address: hex str -- address of smart contract 81 | :return: Concise Contract instance 82 | """ 83 | return ContractHandler._get(name, address)[1] 84 | 85 | @staticmethod 86 | def _set(name, contract): 87 | assert contract.address is not None 88 | 89 | tup = (contract, ConciseContract(contract)) 90 | ContractHandler._contracts[(name, contract.address)] = tup 91 | ContractHandler._contracts[name] = tup 92 | 93 | ContractHandler._verifyContractsConsistency(name) 94 | 95 | @staticmethod 96 | def set(name, contract): 97 | """ 98 | Set a Contract instance for a contract name. 99 | :param name: Contract name, str 100 | :param contract: Contract instance 101 | """ 102 | ContractHandler._set(name, contract) 103 | 104 | @staticmethod 105 | def has(name, address=None): 106 | """ 107 | Check if a contract is the ContractHandler contracts. 108 | :param name: Contract name, str 109 | :param address: hex str -- address of smart contract 110 | :return: True if the contract is there, bool 111 | """ 112 | if address: 113 | return (name, address) in ContractHandler._contracts 114 | return name in ContractHandler._contracts 115 | 116 | @staticmethod 117 | def _load(contract_name, address=None): 118 | """Retrieve the contract instance for `contract_name`. 119 | That instance represents the smart contract in the ethereum network. 120 | Handles two cases: 121 | 1. One deployment of contract, eg DTFactory. 'address' can be None, or specified 122 | 2. 1 deployments, eg DataTokenTemplate. 'address' must be specified. 123 | :param contract_name: str name of the solidity smart contract. 124 | :param address: hex str -- address of smart contract 125 | """ 126 | assert ( 127 | ContractHandler.artifacts_path is not None 128 | ), "artifacts_path should be already set." 129 | contract_definition = ContractHandler.read_abi_from_file( 130 | contract_name, ContractHandler.artifacts_path 131 | ) 132 | 133 | if not address and "address" in contract_definition: 134 | address = contract_definition.get("address") 135 | assert address, "Cannot find contract address in the abi file." 136 | address = Web3.toChecksumAddress(address) 137 | assert address is not None, "address shouldn't be None at this point" 138 | 139 | abi = contract_definition["abi"] 140 | bytecode = contract_definition["bytecode"] 141 | contract = Web3Provider.get_web3().eth.contract( 142 | address=address, abi=abi, bytecode=bytecode 143 | ) 144 | if contract.address is None: # if web3 drops address, fix it 145 | contract.address = address 146 | assert contract.address is not None 147 | 148 | ContractHandler._set(contract_name, contract) 149 | 150 | ContractHandler._verifyContractsConsistency(contract_name) 151 | 152 | @staticmethod 153 | def read_abi_from_file(contract_name, abi_path): 154 | path = None 155 | contract_name = contract_name + ".json" 156 | names = os.listdir(abi_path) 157 | # :HACK: temporary workaround to handle an extra folder that contain the artifact files. 158 | if len(names) == 1 and names[0] == "*": 159 | abi_path = os.path.join(abi_path, "*") 160 | 161 | for name in os.listdir(abi_path): 162 | if name.lower() == contract_name.lower(): 163 | path = os.path.join(abi_path, contract_name) 164 | break 165 | 166 | if path: 167 | with open(path) as f: 168 | return json.loads(f.read()) 169 | 170 | return None 171 | 172 | @staticmethod 173 | def _verifyContractsConsistency(name): 174 | """ 175 | Raise an error if ContractHandler._contracts is inconsistent 176 | for the given contract name. 177 | :param name : str -- name of smart contract 178 | :return: None 179 | """ 180 | (contract1, concise_contract1) = ContractHandler._contracts[name] 181 | assert contract1 is not None 182 | assert contract1.address is not None 183 | assert concise_contract1 is not None 184 | assert concise_contract1.address is not None 185 | 186 | (contract2, concise_contract2) = ContractHandler._contracts[ 187 | (name, contract1.address) 188 | ] 189 | assert id(contract1) == id(contract2) 190 | assert id(concise_contract1) == id(concise_contract2) -------------------------------------------------------------------------------- /datatoken/web3/event_filter.py: -------------------------------------------------------------------------------- 1 | # Modified from Ocean.py library. 2 | # Copyright 2021 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | 5 | import logging 6 | import time 7 | 8 | from datatoken.web3.web3_provider import Web3Provider 9 | 10 | logger = logging.getLogger(__name__) 11 | 12 | 13 | class EventFilter: 14 | def __init__( 15 | self, 16 | event_name, 17 | event, 18 | argument_filters, 19 | from_block, 20 | to_block, 21 | poll_interval=None, 22 | ): 23 | """Initialises EventFilter.""" 24 | self.event_name = event_name 25 | self.event = event 26 | self.argument_filters = argument_filters 27 | self.block_range = (from_block, to_block) 28 | self._filter = None 29 | self._poll_interval = poll_interval if poll_interval else 0.5 30 | self._create_filter() 31 | 32 | @property 33 | def filter_id(self): 34 | return self._filter.filter_id if self._filter else None 35 | 36 | def uninstall(self): 37 | Web3Provider.get_web3().eth.uninstall_filter(self._filter.filter_id) 38 | 39 | def set_poll_interval(self, interval): 40 | self._poll_interval = interval 41 | if self._filter and self._poll_interval is not None: 42 | self._filter.poll_interval = self._poll_interval 43 | 44 | def recreate_filter(self): 45 | self._create_filter() 46 | 47 | def _create_filter(self): 48 | self._filter = self.event().createFilter( 49 | fromBlock=self.block_range[0], 50 | toBlock=self.block_range[1], 51 | argument_filters=self.argument_filters, 52 | ) 53 | if self._poll_interval is not None: 54 | self._filter.poll_interval = self._poll_interval 55 | 56 | def get_new_entries(self, max_tries=1): 57 | return self._get_entries(self._filter.get_new_entries, max_tries=max_tries) 58 | 59 | def get_all_entries(self, max_tries=1): 60 | return self._get_entries(self._filter.get_all_entries, max_tries=max_tries) 61 | 62 | def _get_entries(self, entries_getter, max_tries=1): 63 | i = 0 64 | while i < max_tries: 65 | try: 66 | logs = entries_getter() 67 | if logs: 68 | logger.debug( 69 | f"found event logs: event-name={self.event_name}, " 70 | f"range={self.block_range}, " 71 | f"logs={logs}" 72 | ) 73 | return logs 74 | except ValueError as e: 75 | if "Filter not found" in str(e): 76 | logger.debug( 77 | f"recreating filter (Filter not found): event={self.event_name}, " 78 | f"arg-filter={self.argument_filters}, from/to={self.block_range}" 79 | ) 80 | time.sleep(1) 81 | self._create_filter() 82 | else: 83 | raise 84 | 85 | i += 1 86 | if max_tries > 1 and i < max_tries: 87 | time.sleep(0.5) 88 | 89 | return [] -------------------------------------------------------------------------------- /datatoken/web3/event_listener.py: -------------------------------------------------------------------------------- 1 | # Modified from Ocean.py library. 2 | # Copyright 2021 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | import logging 5 | import time 6 | from datetime import datetime 7 | from threading import Thread 8 | 9 | from datatoken.web3.contract_handler import ContractHandler 10 | from datatoken.web3.event_filter import EventFilter 11 | 12 | logger = logging.getLogger(__name__) 13 | 14 | 15 | class EventListener(object): 16 | 17 | """Class representing an event listener.""" 18 | 19 | def __init__( 20 | self, 21 | contract_name, 22 | event_name, 23 | args=None, 24 | from_block=None, 25 | to_block=None, 26 | filters=None, 27 | ): 28 | """Initialises EventListener object.""" 29 | contract = ContractHandler.get(contract_name) 30 | self.event_name = event_name 31 | self.event = getattr(contract.events, event_name) 32 | self.filters = filters if filters else {} 33 | self.from_block = from_block if from_block is not None else "latest" 34 | self.to_block = to_block if to_block is not None else "latest" 35 | self.event_filter = self.make_event_filter() 36 | self.event_filter.poll_interval = 0.5 37 | self.timeout = 600 # seconds 38 | self.args = args 39 | 40 | def make_event_filter(self): 41 | """Create a new event filter.""" 42 | event_filter = EventFilter( 43 | self.event_name, 44 | self.event, 45 | self.filters, 46 | from_block=self.from_block, 47 | to_block=self.to_block, 48 | ) 49 | event_filter.set_poll_interval(0.5) 50 | return event_filter 51 | 52 | def listen_once( 53 | self, 54 | callback, 55 | timeout=None, 56 | timeout_callback=None, 57 | start_time=None, 58 | blocking=False, 59 | ): 60 | """Listens once for event. 61 | :param callback: a callback function that takes one argument the event dict 62 | :param timeout: float timeout in seconds 63 | :param timeout_callback: a callback function when timeout expires 64 | :param start_time: float start time in seconds, defaults to current time and is used 65 | for calculating timeout 66 | :param blocking: bool blocks this call until the event is detected 67 | :return: event if blocking is True and an event is received, otherwise returns None 68 | """ 69 | if blocking: 70 | assert ( 71 | timeout is not None 72 | ), "`timeout` argument is required when `blocking` is True." 73 | 74 | events = [] 75 | original_callback = callback 76 | 77 | def _callback(event, *args): 78 | events.append(event) 79 | if original_callback: 80 | original_callback(event, *args) 81 | 82 | if blocking: 83 | callback = _callback 84 | 85 | # TODO Review where to close this threads. 86 | Thread( 87 | target=self.watch_one_event, 88 | args=( 89 | self.event_filter, 90 | callback, 91 | timeout_callback, 92 | timeout if timeout is not None else self.timeout, 93 | self.args, 94 | start_time, 95 | ), 96 | daemon=True, 97 | ).start() 98 | if blocking: 99 | while not events: 100 | time.sleep(0.2) 101 | 102 | return events 103 | 104 | return None 105 | 106 | @staticmethod 107 | def watch_one_event( 108 | event_filter, callback, timeout_callback, timeout, args, start_time=None 109 | ): 110 | """ 111 | Start to watch one event. 112 | :param event_filter: 113 | :param callback: 114 | :param timeout_callback: 115 | :param timeout: 116 | :param args: 117 | :param start_time: 118 | :return: 119 | """ 120 | if timeout and not start_time: 121 | start_time = int(datetime.now().timestamp()) 122 | 123 | if not args: 124 | args = [] 125 | 126 | while True: 127 | try: 128 | events = event_filter.get_all_entries() 129 | if events: 130 | callback(events[0], *args) 131 | return 132 | 133 | except (ValueError, Exception) as err: 134 | # ignore error, but log it 135 | logger.debug(f"Got error grabbing keeper events: {str(err)}") 136 | 137 | time.sleep(0.5) 138 | if timeout: 139 | elapsed = int(datetime.now().timestamp()) - start_time 140 | if elapsed > timeout: 141 | if timeout_callback is not None: 142 | timeout_callback(*args) 143 | elif callback is not None: 144 | callback(None, *args) 145 | break -------------------------------------------------------------------------------- /datatoken/web3/transactions.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2021 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | from enforce_typing import enforce_types 6 | from datatoken.web3.wallet import Wallet 7 | from datatoken.web3.web3_provider import Web3Provider 8 | 9 | @enforce_types 10 | def sign_hash(msg_hash, wallet: Wallet) -> str: 11 | """ 12 | This method use `personal_sign`for signing a message. This will always prepend the 13 | `\x19Ethereum Signed Message:\n32` prefix before signing. 14 | :param msg_hash: 15 | :param wallet: Wallet instance 16 | :return: signature 17 | """ 18 | s = wallet.sign(msg_hash) 19 | return s.signature.hex() 20 | 21 | 22 | def send_ether(from_wallet: Wallet, to_address: str, ether_amount: int): 23 | w3 = Web3Provider.get_web3() 24 | if not w3.isChecksumAddress(to_address): 25 | to_address = w3.toChecksumAddress(to_address) 26 | 27 | tx = { 28 | "from": from_wallet.address, 29 | "to": to_address, 30 | "value": w3.toWei(ether_amount, "ether"), 31 | } 32 | _ = w3.eth.estimate_gas(tx) 33 | tx = { 34 | "from": from_wallet.address, 35 | "to": to_address, 36 | "value": w3.toWei(ether_amount, "ether"), 37 | "gas": 500000, 38 | } 39 | wallet = Wallet(w3, private_key=from_wallet.key, address=from_wallet.address) 40 | raw_tx = wallet.sign_tx(tx) 41 | tx_hash = w3.eth.send_raw_transaction(raw_tx) 42 | receipt = w3.eth.wait_for_transaction_receipt(tx_hash, timeout=30) 43 | return receipt 44 | 45 | 46 | def cancel_or_replace_transaction( 47 | from_wallet, nonce_value, gas_price=None, gas_limit=None 48 | ): 49 | w3 = Web3Provider.get_web3() 50 | tx = {"from": from_wallet.address, "to": from_wallet.address, "value": 0} 51 | gas = gas_limit if gas_limit is not None else w3.eth.estimate_gas(tx) 52 | tx = { 53 | "from": from_wallet.address, 54 | "to": from_wallet.address, 55 | "value": 0, 56 | "gas": gas + 1, 57 | } 58 | 59 | wallet = Wallet(w3, private_key=from_wallet.key, address=from_wallet.address) 60 | raw_tx = wallet.sign_tx(tx, fixed_nonce=nonce_value, gas_price=gas_price) 61 | tx_hash = w3.eth.send_raw_transaction(raw_tx) 62 | receipt = w3.eth.wait_for_transaction_receipt(tx_hash, timeout=30) 63 | return receipt -------------------------------------------------------------------------------- /datatoken/web3/utils.py: -------------------------------------------------------------------------------- 1 | # Modified from Ocean.py library. 2 | # Copyright 2021 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | 5 | import logging 6 | from collections import namedtuple 7 | from decimal import Decimal 8 | 9 | from enforce_typing import enforce_types 10 | from eth_keys import keys 11 | from eth_utils import big_endian_to_int, decode_hex 12 | from datatoken.web3.constants import DEFAULT_NETWORK_NAME, NETWORK_NAME_MAP 13 | from datatoken.web3.web3_provider import Web3Provider 14 | from datatoken.web3.web3_overrides.signature import SignatureFix 15 | 16 | Signature = namedtuple("Signature", ("v", "r", "s")) 17 | 18 | logger = logging.getLogger(__name__) 19 | 20 | 21 | def generate_multi_value_hash(types, values): 22 | """ 23 | Return the hash of the given list of values. 24 | This is equivalent to packing and hashing values in a solidity smart contract 25 | hence the use of `soliditySha3`. 26 | :param types: list of solidity types expressed as strings 27 | :param values: list of values matching the `types` list 28 | :return: bytes 29 | """ 30 | assert len(types) == len(values) 31 | return Web3Provider.get_web3().solidityKeccak(types, values) 32 | 33 | 34 | def prepare_prefixed_hash(msg_hash): 35 | """ 36 | :param msg_hash: 37 | :return: 38 | """ 39 | return generate_multi_value_hash( 40 | ["string", "bytes32"], ["\x19Ethereum Signed Message:\n32", msg_hash] 41 | ) 42 | 43 | 44 | def add_ethereum_prefix_and_hash_msg(text): 45 | """ 46 | This method of adding the ethereum prefix seems to be used in web3.personal.sign/ecRecover. 47 | :param text: str any str to be signed / used in recovering address from a signature 48 | :return: hash of prefixed text according to the recommended ethereum prefix 49 | """ 50 | prefixed_msg = f"\x19Ethereum Signed Message:\n{len(text)}{text}" 51 | return Web3Provider.get_web3().keccak(text=prefixed_msg) 52 | 53 | 54 | def to_32byte_hex(web3, val): 55 | """ 56 | :param web3: 57 | :param val: 58 | :return: 59 | """ 60 | return web3.toBytes(val).rjust(32, b"\0") 61 | 62 | 63 | def split_signature(web3, signature): 64 | """ 65 | :param web3: 66 | :param signature: signed message hash, hex str 67 | :return: 68 | """ 69 | assert len(signature) == 65, ( 70 | f"invalid signature, " f"expecting bytes of length 65, got {len(signature)}" 71 | ) 72 | v = web3.toInt(signature[-1]) 73 | r = to_32byte_hex(web3, int.from_bytes(signature[:32], "big")) 74 | s = to_32byte_hex(web3, int.from_bytes(signature[32:64], "big")) 75 | if v != 27 and v != 28: 76 | v = 27 + v % 2 77 | 78 | return Signature(v, r, s) 79 | 80 | 81 | @enforce_types 82 | def private_key_to_address(private_key: str) -> str: 83 | return Web3Provider.get_web3().eth.account.from_key(private_key).address 84 | 85 | 86 | @enforce_types 87 | def private_key_to_public_key(private_key: str) -> str: 88 | private_key_bytes = decode_hex(private_key) 89 | private_key_object = keys.PrivateKey(private_key_bytes) 90 | return private_key_object.public_key 91 | 92 | 93 | @enforce_types 94 | def get_network_name(network_id: int = None) -> str: 95 | """ 96 | Return the network name based on the current ethereum network id. 97 | Return `ganache` for every network id that is not mapped. 98 | :param network_id: Network id, int 99 | :return: Network name, str 100 | """ 101 | if not network_id: 102 | network_id = get_network_id() 103 | return NETWORK_NAME_MAP.get(network_id, DEFAULT_NETWORK_NAME).lower() 104 | 105 | 106 | @enforce_types 107 | def get_network_id() -> int: 108 | """ 109 | Return the ethereum network id calling the `web3.version.network` method. 110 | :return: Network id, int 111 | """ 112 | return int(Web3Provider.get_web3().net.version) 113 | 114 | 115 | @enforce_types 116 | def ec_recover(message, signed_message): 117 | """ 118 | This method does not prepend the message with the prefix `\x19Ethereum Signed Message:\n32`. 119 | The caller should add the prefix to the msg/hash before calling this if the signature was 120 | produced for an ethereum-prefixed message. 121 | :param message: 122 | :param signed_message: 123 | :return: 124 | """ 125 | w3 = Web3Provider.get_web3() 126 | v, r, s = split_signature(w3, w3.toBytes(hexstr=signed_message)) 127 | signature_object = SignatureFix(vrs=(v, big_endian_to_int(r), big_endian_to_int(s))) 128 | return w3.eth.account.recoverHash( 129 | message, signature=signature_object.to_hex_v_hacked() 130 | ) 131 | 132 | 133 | @enforce_types 134 | def personal_ec_recover(message, signed_message): 135 | prefixed_hash = add_ethereum_prefix_and_hash_msg(message) 136 | return ec_recover(prefixed_hash, signed_message) 137 | 138 | 139 | @enforce_types 140 | def get_ether_balance(address: str) -> int: 141 | """ 142 | Get balance of an ethereum address. 143 | :param address: address, bytes32 144 | :return: balance, int 145 | """ 146 | return Web3Provider.get_web3().eth.get_balance(address, block_identifier="latest") 147 | 148 | 149 | def from_wei(wei_value: int) -> Decimal: 150 | return Web3Provider.get_web3().fromWei(wei_value, "ether") -------------------------------------------------------------------------------- /datatoken/web3/wallet.py: -------------------------------------------------------------------------------- 1 | # Modified from Ocean.py library. 2 | # Copyright 2021 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | 5 | import logging 6 | import os 7 | from typing import Optional 8 | 9 | from enforce_typing import enforce_types 10 | from datatoken.web3.constants import ENV_MAX_GAS_PRICE, MIN_GAS_PRICE 11 | from datatoken.web3.utils import private_key_to_address, private_key_to_public_key 12 | 13 | logger = logging.getLogger(__name__) 14 | 15 | @enforce_types 16 | class Wallet: 17 | 18 | """ 19 | The wallet is responsible for signing transactions and messages by using an account's 20 | private key. 21 | The private key is always read from the encrypted keyfile and is never saved in memory beyond 22 | the life span of the signing function. 23 | The use of this wallet allows DataToken tools to send rawTransactions which keeps the user 24 | key and password safe and they are never sent outside. Another advantage of this is that 25 | we can interact directly with remote network nodes without having to run a local parity 26 | node since we only send the raw transaction hash so the user info is safe. 27 | Usage: 28 | 1. `wallet = Wallet(ocean.web3, private_key=private_key)` 29 | """ 30 | 31 | _last_tx_count = dict() 32 | 33 | def __init__( 34 | self, 35 | web3, 36 | private_key: Optional[str] = None, 37 | encrypted_key: dict = None, 38 | password: Optional[str] = None, 39 | address: Optional[str] = None, 40 | ): 41 | """Initialises Wallet object.""" 42 | assert private_key or ( 43 | encrypted_key and password 44 | ), "private_key or encrypted_key and password is required." 45 | 46 | self._web3 = web3 47 | self._last_tx_count.clear() 48 | 49 | self._password = password 50 | self._address = address 51 | self._key = private_key 52 | if encrypted_key and not private_key: 53 | assert self._password 54 | self._key = self._web3.eth.account.decrypt(encrypted_key, self._password) 55 | if not isinstance(self._key, str): 56 | self._key = self._key.hex() 57 | 58 | if self._key: 59 | address = private_key_to_address(self._key) 60 | assert self._address is None or self._address == address 61 | self._address = address 62 | self._password = None 63 | 64 | assert self.private_key, ( 65 | "something is not right, private key is not available. " 66 | "please check the arguments are valid." 67 | ) 68 | 69 | self._max_gas_price = os.getenv(ENV_MAX_GAS_PRICE, None) 70 | 71 | @property 72 | def web3(self): 73 | return self._web3 74 | 75 | @property 76 | def address(self): 77 | return self._address 78 | 79 | @property 80 | def password(self): 81 | return self._password 82 | 83 | @property 84 | def private_key(self): 85 | return self._key 86 | 87 | @property 88 | def key(self): 89 | return self._key 90 | 91 | @staticmethod 92 | def reset_tx_count(): 93 | Wallet._last_tx_count = dict() 94 | 95 | def __get_key(self): 96 | return self._key 97 | 98 | def validate(self): 99 | account = self._web3.eth.account.from_key(self._key) 100 | return account.address == self._address 101 | 102 | @staticmethod 103 | def _get_nonce(web3, address): 104 | # We cannot rely on `web3.eth.get_transaction_count` because when sending multiple 105 | # transactions in a row without wait in between the network may not get the chance to 106 | # update the transaction count for the account address in time. 107 | # So we have to manage this internally per account address. 108 | if address not in Wallet._last_tx_count: 109 | Wallet._last_tx_count[address] = web3.eth.get_transaction_count(address) 110 | else: 111 | Wallet._last_tx_count[address] += 1 112 | 113 | return Wallet._last_tx_count[address] 114 | 115 | def sign_tx(self, tx, fixed_nonce=None, gas_price=None): 116 | account = self._web3.eth.account.from_key(self.private_key) 117 | if fixed_nonce is not None: 118 | nonce = fixed_nonce 119 | logger.debug( 120 | f"Signing transaction using a fixed nonce {fixed_nonce}, tx params are: {tx}" 121 | ) 122 | else: 123 | nonce = Wallet._get_nonce(self._web3, account.address) 124 | 125 | if not gas_price: 126 | gas_price = int(self._web3.eth.gas_price * 1.1) 127 | gas_price = max(gas_price, MIN_GAS_PRICE) 128 | 129 | if gas_price and self._max_gas_price: 130 | gas_price = min(gas_price, self._max_gas_price) 131 | 132 | logger.debug( 133 | f"`Wallet` signing tx: sender address: {account.address} nonce: {nonce}, " 134 | f"eth.gasPrice: {self._web3.eth.gas_price}" 135 | ) 136 | tx["gasPrice"] = gas_price 137 | tx["nonce"] = nonce 138 | signed_tx = self._web3.eth.account.sign_transaction(tx, self.private_key) 139 | logger.debug(f"Using gasPrice: {gas_price}") 140 | logger.debug(f"`Wallet` signed tx is {signed_tx}") 141 | return signed_tx.rawTransaction 142 | 143 | def sign(self, msg_hash): 144 | """Sign a transaction.""" 145 | account = self._web3.eth.account.from_key(self.private_key) 146 | return account.signHash(msg_hash) 147 | 148 | def keys_str(self): 149 | s = [] 150 | s += [f"address: {self.address}"] 151 | if self.private_key is not None: 152 | s += [f"private key: {self.private_key}"] 153 | s += [f"public key: {private_key_to_public_key(self.private_key)}"] 154 | s += [""] 155 | return "\n".join(s) -------------------------------------------------------------------------------- /datatoken/web3/web3_overrides/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MVP-Labs/data-token/fae7adc8945418590764f284ccd70f9dbb4a4382/datatoken/web3/web3_overrides/__init__.py -------------------------------------------------------------------------------- /datatoken/web3/web3_overrides/contract.py: -------------------------------------------------------------------------------- 1 | # Modified from Ocean.py library. 2 | # Copyright 2018 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | 5 | import logging 6 | import time 7 | 8 | from web3.contract import prepare_transaction 9 | 10 | from datatoken.web3.wallet import Wallet 11 | 12 | 13 | class CustomContractFunction: 14 | 15 | def __init__(self, contract_function): 16 | self._contract_function = contract_function 17 | 18 | def transact(self, transaction=None): 19 | """ 20 | Customize calling smart contract transaction functions to use `personal_sendTransaction` 21 | instead of `eth_sendTransaction` and to estimate gas limit. This function 22 | is largely copied from web3 ContractFunction with important addition. 23 | 24 | Note: will fallback to `eth_sendTransaction` if `passphrase` is not provided in the 25 | `transaction` dict. 26 | 27 | :param transaction: dict which has the required transaction arguments per 28 | `personal_sendTransaction` requirements. 29 | :return: hex str transaction hash 30 | """ 31 | if transaction is None: 32 | transact_transaction = {} 33 | else: 34 | transact_transaction = dict(**transaction) 35 | 36 | if 'data' in transact_transaction: 37 | raise ValueError("Cannot set data in transact transaction") 38 | 39 | cf = self._contract_function 40 | if cf.address is not None: 41 | transact_transaction.setdefault('to', cf.address) 42 | # if cf.web3.eth.defaultAccount is not empty: 43 | # transact_transaction.setdefault('from', cf.web3.eth.defaultAccount) 44 | 45 | if 'to' not in transact_transaction: 46 | if isinstance(self, type): 47 | raise ValueError( 48 | "When using `Contract.transact` from a contract factory you " 49 | "must provide a `to` address with the transaction" 50 | ) 51 | else: 52 | raise ValueError( 53 | "Please ensure that this contract instance has an address." 54 | ) 55 | 56 | if 'gas' not in transact_transaction: 57 | tx = transaction.copy() 58 | if 'passphrase' in tx: 59 | tx.pop('passphrase') 60 | if 'account_key' in tx: 61 | tx.pop('account_key') 62 | gas = cf.estimateGas(tx) 63 | transact_transaction['gas'] = gas 64 | 65 | return transact_with_contract_function( 66 | cf.address, 67 | cf.web3, 68 | cf.function_identifier, 69 | transact_transaction, 70 | cf.contract_abi, 71 | cf.abi, 72 | *cf.args, 73 | **cf.kwargs 74 | ) 75 | 76 | 77 | def transact_with_contract_function( 78 | address, 79 | web3, 80 | function_name=None, 81 | transaction=None, 82 | contract_abi=None, 83 | fn_abi=None, 84 | *args, 85 | **kwargs): 86 | """ 87 | Helper function for interacting with a contract function by sending a 88 | transaction. This is copied from web3 `transact_with_contract_function` 89 | so we can use `personal_sendTransaction` when possible. 90 | """ 91 | transact_transaction = prepare_transaction( 92 | address, 93 | web3, 94 | fn_identifier=function_name, 95 | contract_abi=contract_abi, 96 | transaction=transaction, 97 | fn_abi=fn_abi, 98 | fn_args=args, 99 | fn_kwargs=kwargs, 100 | ) 101 | 102 | passphrase = None 103 | account_key = None 104 | if transaction and 'passphrase' in transaction: 105 | passphrase = transaction['passphrase'] 106 | transact_transaction.pop('passphrase') 107 | if 'account_key' in transaction: 108 | account_key = transaction['account_key'] 109 | transact_transaction.pop('account_key') 110 | 111 | if account_key: 112 | raw_tx = Wallet(web3, private_key=account_key).sign_tx( 113 | transact_transaction) 114 | logging.debug( 115 | f'sending raw tx: function: {function_name}, tx hash: {raw_tx.hex()}') 116 | txn_hash = web3.eth.sendRawTransaction(raw_tx) 117 | elif passphrase: 118 | txn_hash = web3.personal.sendTransaction( 119 | transact_transaction, passphrase) 120 | else: 121 | txn_hash = web3.eth.sendTransaction(transact_transaction) 122 | 123 | wait_for_tx(txn_hash, web3, 5) 124 | return txn_hash 125 | 126 | 127 | def wait_for_tx(tx_hash, web3, timeout=30): 128 | start = time.time() 129 | while True: 130 | try: 131 | web3.eth.waitForTransactionReceipt(tx_hash, timeout=timeout) 132 | break 133 | except Exception: 134 | time.sleep(0.2) 135 | 136 | if time.time() - start > timeout: 137 | break 138 | 139 | return 140 | -------------------------------------------------------------------------------- /datatoken/web3/web3_overrides/http_provider.py: -------------------------------------------------------------------------------- 1 | # Modified from Ocean.py library. 2 | # Copyright 2018 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | 5 | from web3 import HTTPProvider 6 | 7 | from datatoken.web3.web3_overrides.request import make_post_request 8 | 9 | 10 | class CustomHTTPProvider(HTTPProvider): 11 | """ 12 | Override requests to control the connection pool to make it blocking. 13 | """ 14 | 15 | def make_request(self, method, params): 16 | self.logger.debug("Making request HTTP. URI: %s, Method: %s", 17 | self.endpoint_uri, method) 18 | request_data = self.encode_rpc_request(method, params) 19 | raw_response = make_post_request( 20 | self.endpoint_uri, 21 | request_data, 22 | **self.get_request_kwargs() 23 | ) 24 | response = self.decode_rpc_response(raw_response) 25 | self.logger.debug("Getting response HTTP. URI: %s, " 26 | "Method: %s, Response: %s", 27 | self.endpoint_uri, method, response) 28 | return response 29 | -------------------------------------------------------------------------------- /datatoken/web3/web3_overrides/request.py: -------------------------------------------------------------------------------- 1 | # Modified from Web3 python library. 2 | # SPDX-License-Identifier: MIT 3 | 4 | import lru 5 | import requests 6 | from requests.adapters import HTTPAdapter 7 | from web3._utils.caching import generate_cache_key 8 | 9 | def _remove_session(key, session): 10 | session.close() 11 | 12 | 13 | _session_cache = lru.LRU(8, callback=_remove_session) 14 | 15 | 16 | def _get_session(*args, **kwargs): 17 | cache_key = generate_cache_key((args, kwargs)) 18 | if cache_key not in _session_cache: 19 | # This is the main change from original Web3 `_get_session` 20 | session = requests.sessions.Session() 21 | session.mount('http://', HTTPAdapter(pool_connections=25, 22 | pool_maxsize=25, pool_block=True)) 23 | session.mount('https://', 24 | HTTPAdapter(pool_connections=25, pool_maxsize=25, pool_block=True)) 25 | _session_cache[cache_key] = session 26 | return _session_cache[cache_key] 27 | 28 | 29 | def make_post_request(endpoint_uri, data, *args, **kwargs): 30 | kwargs.setdefault('timeout', 10) 31 | session = _get_session(endpoint_uri) 32 | response = session.post(endpoint_uri, data=data, *args, **kwargs) 33 | response.raise_for_status() 34 | 35 | return response.content 36 | -------------------------------------------------------------------------------- /datatoken/web3/web3_overrides/signature.py: -------------------------------------------------------------------------------- 1 | # Modified from Ocean.py library. 2 | # Copyright 2021 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | # 5 | import codecs 6 | 7 | from enforce_typing import enforce_types 8 | from eth_keys.datatypes import Signature 9 | from eth_keys.utils.numeric import int_to_byte 10 | from eth_keys.utils.padding import pad32 11 | from eth_utils import int_to_big_endian 12 | 13 | 14 | @enforce_types 15 | class SignatureFix(Signature): 16 | 17 | """ 18 | Hack the Signature class to allow rebuilding of signature with a 19 | v value of 27 or 28 instead of 0 or 1 20 | """ 21 | 22 | def __init__(self, signature_bytes=None, vrs=None, backend=None) -> None: 23 | """Initialises SignatureFix object.""" 24 | v, r, s = vrs 25 | if v == 27 or v == 28: 26 | v -= 27 27 | 28 | vrs = (v, r, s) 29 | Signature.__init__(self, signature_bytes, vrs, backend) 30 | 31 | def to_hex_v_hacked(self) -> str: 32 | # Need the 'type: ignore' comment below because of 33 | # https://github.com/python/typeshed/issues/300 34 | return "0x" + codecs.decode( 35 | codecs.encode(self.to_bytes_v_hacked(), "hex"), "ascii" 36 | ) # type: ignore 37 | 38 | def to_bytes_v_hacked(self) -> bytes: 39 | v = self.v 40 | if v == 0 or v == 1: 41 | v += 27 42 | vb = int_to_byte(v) 43 | rb = pad32(int_to_big_endian(self.r)) 44 | sb = pad32(int_to_big_endian(self.s)) 45 | # FIXME: Enable type checking once we have type annotations in eth_utils 46 | return b"".join((rb, sb, vb)) # type: ignore -------------------------------------------------------------------------------- /datatoken/web3/web3_provider.py: -------------------------------------------------------------------------------- 1 | # Modified from Ocean.py library. 2 | # Copyright 2021 Ocean Protocol Foundation 3 | # SPDX-License-Identifier: Apache-2.0 4 | 5 | from web3 import Web3 6 | from datatoken.web3.web3_overrides.http_provider import CustomHTTPProvider 7 | 8 | class Web3Provider(object): 9 | 10 | """Provides the Web3 instance.""" 11 | 12 | _web3 = None 13 | 14 | @staticmethod 15 | def init_web3(network_url=None, provider=None): 16 | """One of `network_url` or `provider` is required. 17 | If `provider` is given, `network_url` will be ignored. 18 | :param network_url: 19 | :param provider: 20 | :return: 21 | """ 22 | if not provider: 23 | assert network_url, "network_url or a provider instance is required." 24 | provider = CustomHTTPProvider(network_url) 25 | 26 | Web3Provider._web3 = Web3(provider) 27 | 28 | # Reset attributes to avoid lint issue about no attribute 29 | Web3Provider._web3.eth = getattr(Web3Provider._web3, "eth") 30 | Web3Provider._web3.net = getattr(Web3Provider._web3, "net") 31 | Web3Provider._web3.version = getattr(Web3Provider._web3, "version") 32 | Web3Provider._web3.parity = getattr(Web3Provider._web3, "parity") 33 | Web3Provider._web3.testing = getattr(Web3Provider._web3, "testing") 34 | 35 | @staticmethod 36 | def get_web3(network_url=None, provider=None): 37 | """Return the web3 instance to interact with the ethereum client.""" 38 | if Web3Provider._web3 is None: 39 | Web3Provider.init_web3(network_url, provider) 40 | return Web3Provider._web3 41 | 42 | @staticmethod 43 | def set_web3(web3): 44 | """Set web3 instance.""" 45 | Web3Provider._web3 = web3 -------------------------------------------------------------------------------- /docs/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MVP-Labs/data-token/fae7adc8945418590764f284ccd70f9dbb4a4382/docs/.DS_Store -------------------------------------------------------------------------------- /docs/figures/1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MVP-Labs/data-token/fae7adc8945418590764f284ccd70f9dbb4a4382/docs/figures/1.png -------------------------------------------------------------------------------- /docs/figures/2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MVP-Labs/data-token/fae7adc8945418590764f284ccd70f9dbb4a4382/docs/figures/2.png -------------------------------------------------------------------------------- /docs/figures/test.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MVP-Labs/data-token/fae7adc8945418590764f284ccd70f9dbb4a4382/docs/figures/test.png -------------------------------------------------------------------------------- /docs/figures/tree.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MVP-Labs/data-token/fae7adc8945418590764f284ccd70f9dbb4a4382/docs/figures/tree.png -------------------------------------------------------------------------------- /docs/grants_cn.md: -------------------------------------------------------------------------------- 1 | # DataToken 2 | 3 | 联系方式:qblee@zju.edu.cn 4 | 5 | 本项目为Ownership Labs团队开发的Compute-to-Data项目,入选了Platon的Grants计划项目。本项目将基于PlatON和Rosetta,实现一套跨域的分布式数据管理和远程计算中间件(DataToken SDK),以确保数据共享利用的全流程可以被精准追踪和审计。 6 | 7 | ## 项目目标 8 | 9 | 随着大数据技术的高速发展,数据已然成为众多公司的重要资产和商业引擎。但数据流通存在壁垒,传统的IT技术栈无法同时满足数据跨域计算、用户隐私保护和数据监管审计三者。而区块链的多方对等共识、信息不可篡改等特性, 使其在数据确权、数据授权、数据溯源等方面有巨大的应用潜力。为了实现数据资产不出私域下的安全可信流动,本项目将基于区块链和密码学,实现一套跨域的分布式数据管理和远程计算中间件(DataToken),并确保数据共享利用的全流程可以被精准追踪和审计。 10 | 11 | ## 项目价值 12 | 13 | 当前业务部门间、机构间相互隔离,跨域数据共享难的原因在于,一旦数据资产离开私域,将面临第三方的数据沉淀收集甚至转卖等风险,难以保证所有者的核心利益。同时,数据集中通常包含了许多用户数据,监管机构对规范用户数据共享的要求正日益增长,用户隐私保护和数据主权的法规也正在不断设立。当前的隐私计算方案实现了原始数据不出私域下的多方数据协作,但并未确保私域数据的融合计算经过严格的权限管理,难以满足监管对数据应用主体和数据托管主体的合规要求,同时用户仍不具备其数据使用全流程的知情权。 14 | 15 | 本项目将提供简单易用的跨域数据融合计算SDK,实现数据不动、算法动的分布式计算服务规范(compute-to-data)。在企业/用户完全控制其数据资产的前提下,允许数据需求方将可信的远程算法发送到数据侧进行本地计算,同时确保数据操作全流程可追踪可审计。数据所有主体将“预先”获知其数据资产将被谁、被如何处理分发,从而对数据的本地计算权进行自主授权,数据应用主体也可以更合规地利用跨域数据资产来进行数据协同分析。 16 | 17 | 不同于Ocean Protocol的面向单域数据的远程计算方案,本项目提出的DataToken SDK 可以在多个数据源(及其用户子数据)和多个算力上定义可信的分布式计算工作流,能够追踪数据跨域融合计算的全过程。相信只有通过可信全面的数据共享使用信息,市场的价格发现机制才能为数据进行合理的定价,从而构建万亿美元数据市场。 18 | 19 | ## 核心原理 20 | 21 | ### 问题描述 22 | 在本项目的设定中,一个多方数据协作任务的参与主体包括数据源方、算力提供方、算法提供方和数据应用方。联合计算问题通常可以用下图的嵌套结构来表示:一个数据源可由多个子数据源构成,一个远程算法可应用在多个数据源和多个计算力上,一个任务也可以由多个算法阶段构成。 23 | 24 |
25 | 26 |
27 | 28 | 数据、算力、算法等都可以被理解为资产,用数据Token(DataToken, DT)来进行唯一标识,不同资产具有不同的元信息(metadata),用分布式文档对象DDO来表示。链上维护了一份DT注册表,可以快速定位链下DDO的IPFS存储位置: 29 | 30 | ``` 31 | DT标识符在链上注册为:{DT, owner_address, storage_path, proof} 32 | DDO在链下存储为:{DT,proof,services: [type, endpoint, child_dts, supported_ops, workflows, extra_params]} 33 | ``` 34 | 35 | 其中proof=hash(metadata),type区分数据、算力、算法资源,endpoint为服务端点,extra_params里可包含名称、描述、提供方、价格等信息。child_dts=[{0: dt_0}, {1: dt_1}, ..., {n: dt_n}],为空则表示底层资源。一个企业数据源DDO可以用这样的结构来表示,其中包含多个用户数据构成的DT列表。 36 | 37 | ### 远程计算规范 38 | 39 | 为了实现跨域的分布式计算,资产DDO的数据结构不仅包含多个子DT标识符,还应包含对这些资源的操作代码哈希。以两家银⾏联合建模为例,第三⽅科技公司提供算法: 40 | 41 |
42 | 43 |
44 | 45 | 其中,op_1、op_2为秘密共享操作,op_3、op_4为SS碎片上的联合AI操作。算法DDO中包含了对四个资源的工作流存证,表示私域数据库中的原始数据经过秘密共享后,发送到联邦域算力上进行联合建模。 46 | 47 | 例如金融科技公司C为两家银行A、B提供客户画像和联合风控服务。假设银行的原始客户数据位于私域网络的数据库中,可以通过秘密共享SS将客户数据加密传递到银行间的联邦域网络,在密文基础上进行MPC联合建模。 48 | 49 | 资产所有者应能快速设置DDO中的远程计算规范,如支持的操作、分布式工作流等,为此首先要规范化操作代码,形成一系列可信的远程代码模版。操作代码也可以用其哈希来唯一标识并上链,代码脚本存储在IPFS当中,这个步骤可以由系统管理员来执行。这样,DDO中的supported_ops和workflows就可以用统一的代码标识符来进行配置: 50 | 51 | ``` 52 | supported_ops: [{0:op_0},...,{m:op_m}] 53 | workflows: [set_ops:[{0:dt_0_op},...,{n:dt_n_op}],configs] 54 | ``` 55 | 56 | 当child_dts列表为空时,workflows也应该为空。此时supported_ops表示该底层资产支持的本地代码操作(的链上标识符集合),例如支持联邦学习的移动端用户数据。当child_dts列表不为空时,supported_ops表示该资产的所有子dt一致支持的操作;同时可在workflow中定义更为复杂的分布式工作流,set_ops中包含了对各个子dt的具体操作,configs中可附带运行参数或指明工作流的计算顺序。 57 | 58 | ### 任务工作市场 59 | 高层资产在执行实际计算前,需要获取低层资产的使用授权。而低层资产通常会验证高层资产DDO中的操作代码是否符合自己的支持条款,满足即在链上授权。在复杂的实际问题中,可以使用层次化的代理结构,例如,dt_1授权给dt_2,dt_2授权给dt_3,可以认为dt_3所有者获取了dt_1资产的本地操作权限。通过这种方式,算法提供方可以在企业数据源下的用户数据本地执行计算。 60 | 61 | 值得一提的是,在多方数据协作任务中,获取授权不意味着可以立即发起远程计算,否则低层资产无法追踪其全流程使用情况,只有所有参与方都授权后才可执行实际计算。我们设计了一个链上的任务工作市场,作为算法DT的终止状态,当一可信机构将该DT存证到任务工作中后,意味着有人为该远程计算担保,并已取得所有授权。低层资产也可以获知其将如何被使用、以及被谁使用(具体操作是,在链上查询自己是否授权过该算法DT或其子DT的...的子DT)。 62 | 63 | ## MVP交互流程 64 | DataToken SDK允许在分布式资源上进行可信可追溯的计算,并将提供三个场景MVP:1)企业间数据协作;2)用户级边缘计算;3)可追溯的隐私AI。 65 | 66 | ### 企业间数据协作 67 | 68 | 考虑简单的两方纵向联邦学习,例如金融科技公司C为两家银行A、B提供联合风控服务。假设银行的原始客户数据位于私域网络的数据库中(安全性等级高),在保证数据安全且操作可审计的情况下,可以通过密码学方案(如秘密共享SS)将客户数据加密传递到银行间的联邦域网络(安全性等级稍低),在密文基础上进一步实现MPC联合建模。在这里,银行A、B同时作为数据源方和算力提供方,金融科技公司C则是算法提供方和数据应用方。MVP流程如下: 69 | 70 | 1. 合约部署方为系统管理员,添加A、B、C的机构名和账户关系,同时将SS和MPC代码哈希注册到链上,脚本存储在IPFS中; 71 | 2. 银行A、B用DT/DDO描述私域数据和联邦域算力的服务条款,前者应支持SS操作,后者应支持MPC操作,生成四个dt标识符和四个ddo。将ddo存储在IPFS中,将dt、storage_path和proof注册到链上; 72 | 3. 公司C对数据/算力DT进行组合,填充child_dts以及对各资源的代码哈希,生成算法dt后存IPFS并上链; 73 | 4. 银行A、B分别验证算法dt是否满足资源使用条款,验证通过后,在链上将数据/算力dt授权给算法dt; 74 | 5. 公司C在链上任务市场新建一个任务,并将算法dt提交到该任务的工作下; 75 | 6. 公司C远程操作银行A、B的数据/算力。各银行需要核实该算法dt是否取得授权、所有者签名、任务工作状态等。通过的话,从IPFS下载代码脚本,并执行本地计算。 76 | 77 | ### 用户级边缘计算 78 | 考虑简单的移动端横向联邦学习,例如智慧医疗公司B希望在穿戴式设备提供商A的所有用户数据上进行心脏病预测模型训练。用户数据都在移动设备本地,用u_1,u_2,...,u_n表示。在这里,提供商A是数据源方,包含了诸多用户数据,公司B则是算法提供方和数据应用方。计算过程都在数据私域完成,没有显示的算力提供方,即设备商A运行梯度聚合(如FedAverage),用户设备运行横向模型计算(EdgeComp)。MVP流程如下: 79 | 80 | 1. 合约部署方为系统管理员,添加A、B的机构名和账户关系,同时将EdgeComp和FedAverage代码哈希注册到链上,脚本存储在IPFS中; 81 | 2. 设备商A为其生态用户添加身份,设置一个链上用户注册表,用于控制资产发布; 82 | 3. n个用户和设备商B都注册数据资产。用户ddo中应支持EdgeComp操作,设备商DDO中应对n个用户dt进行组合,并定义工作流, workflows=[set_ops:[{0:ec},...,{n:ec},{n+1(self):fa}]],即在用户设备上运行横向模型,在设备商的私域聚合梯度; 83 | 4. 医疗公司B注册算法资产,其中包含设备商dt,并定义工作流,workflows=[set_ops:[{0:self}]],表示该算法直接使用数据源B的工作流; 84 | 5. 用户验证设备商的数据源dt,设备商A则验证算法dt,均判断满足资源使用条款后,在链上完成dt间的授权关系; 85 | 6. 医疗公司B在链上任务市场新建一个任务,并将算法dt提交到该任务的工作下; 86 | 7. 医疗公司B向设备商A发起远程操作请求。设备商A核实算法,通过的话,首先告知所有用户,随后运行FedAverage; 87 | 8. 用户接收设备商A的消息,具有数据利用的知情权(可根据链上的任务工作市场和dt授权关系信息进行追溯),随后运行EdgeComp。 88 | 89 | 在以上的过程中,设备商A和医疗公司B都不接触到原始的用户数据,只拿到中间数据或结果数据。各方都知道自己的资源被如何使用,用户也在确认授权后才执行本地计算。 90 | 91 | ### 可追溯计算的隐私AI 92 | 本项目在Rosetta基础上实现可追溯计算的隐私AI,rtt-tracer。该MVP将结合DataToken组件进行开发,实现安全多方计算下的联合模型。除了DT全功能外,还涉及简单的资产服务部署和联合计算功能: 93 | 94 | 1. 基于rtt的联合模型,提供用户信用违约等样例,在链上注册为可信OP模版; 95 | 2. 基于flask来部署资产服务,包括私域数据的本地计算和联邦域算力的代理计算。 96 | 97 | 在这个过程中,数据应用方会向多个资产方的flask服务发送实际的授权请求和远程计算请求。资产方验证请求后,下载可信rtt代码进行本地计算或联合计算。监管机构也可以查询资产使用情况。 98 | 99 | ### 里程碑与展望 100 | 完整的项目将基于Platon区块链和Rosetta隐私计算框架,涉及dt-contracts、dt-web3、dt-asset、dt-sdk、rtt-tracer等5个代码仓库。关键里程碑可以分为M1-M4几个阶段: 101 | 102 | - M1—链上合约(dt-contracts)和链下交互过程(dt-web3):前者主要包括管理员和机构注册、DT上链和授权、可信OP注册、任务工作市场;后者主要包括迁移Ocean的keeper-py-lib、链上合约对应的链下实例类 103 | - M2—链下元数据管理和计算协议工具集(dt-asset):主要包括DT标识符生成、DDO元数据管理、创建计算服务工作流、IPFS存储连接器 104 | - M3—多方数据协作的业务层开发工具(dt-sdk):主要包括几个业务参与方的功能封装,即系统管理员、数据提供方、算力提供方、算法提供方,同时完成MVP1和MVP2 105 | - M4,可追溯计算的隐私AI(rtt-tracer):完成MVP3 106 | 107 | 总结来说,该项目更多是对多方数据协作的全流程进行资产化上链。后续将跟Rosetta进行深度绑定,形成一系列可信的分布式计算OP模版,并提供一站式多方数据协作门户平台,用可视化的交互界面来进行数据协作和数据追溯。 108 | 109 | 110 | 111 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | enforce_typing==1.0.0.post1 2 | web3==5.19.0 3 | ipfshttpclient==0.8.0a2 -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MVP-Labs/data-token/fae7adc8945418590764f284ccd70f9dbb4a4382/setup.cfg -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MVP-Labs/data-token/fae7adc8945418590764f284ccd70f9dbb4a4382/setup.py -------------------------------------------------------------------------------- /tests/template/add_op.py: -------------------------------------------------------------------------------- 1 | import json 2 | import argparse 3 | 4 | if __name__ == '__main__': 5 | print('hello world, data token') 6 | 7 | parser = argparse.ArgumentParser() 8 | parser.add_argument('--config', type=str, required=True) 9 | 10 | args = parser.parse_args() 11 | op_args = json.load(open(args.config)) 12 | 13 | print(op_args['arg1'] + op_args['arg2']) 14 | -------------------------------------------------------------------------------- /tests/template/args.json: -------------------------------------------------------------------------------- 1 | { 2 | "arg1": {}, 3 | "arg2": {} 4 | } -------------------------------------------------------------------------------- /tests/test.py: -------------------------------------------------------------------------------- 1 | # """Demo""" 2 | 3 | from datatoken.config import Config 4 | from datatoken.web3.wallet import Wallet 5 | from datatoken.web3.utils import add_ethereum_prefix_and_hash_msg 6 | from datatoken.service.system import SystemService 7 | from datatoken.service.asset import AssetService 8 | from datatoken.service.job import JobService 9 | from datatoken.service.tracer import TracerService 10 | 11 | 12 | config = Config(filename='./config.ini') 13 | 14 | system_account = Wallet( 15 | config.web3, private_key='0xd5b87119980bc80944760f1027d7643dc9bdfff8307cae1e831ff7f74f11ebd3') 16 | org1_account = Wallet( 17 | config.web3, private_key='0xaca737275831497429a47bcd5766950a69a0fa8a1511a8cf656005de1c11546e') 18 | org2_account = Wallet( 19 | config.web3, private_key='0xc68daf21bb748605396992aaf95a28eba74b5ec53706ce251e35957baccf7e80') 20 | org3_account = Wallet( 21 | config.web3, private_key='0x858dc470755f747d50053b2e8e3bfca78d5fd9f75ef5a63398d4e8390792e026') 22 | 23 | # print(system_account.address) 24 | # print(org1_account.address) 25 | # print(org2_account.address) 26 | # print(org3_account.address) 27 | 28 | system_service = SystemService(config) 29 | asset_service = AssetService(config) 30 | job_service = JobService(config) 31 | tracer_service = TracerService(config) 32 | 33 | ############ 34 | system_service.register_enterprise( 35 | org1_account.address, 'org1', 'test_org1', system_account) 36 | system_service.add_provider(org1_account.address, system_account) 37 | 38 | system_service.register_enterprise( 39 | org2_account.address, 'org2', 'test_org2', system_account) 40 | system_service.add_provider(org2_account.address, system_account) 41 | 42 | system_service.register_enterprise( 43 | org3_account.address, 'org3', 'test_org3', system_account) 44 | system_service.add_provider(org3_account.address, system_account) 45 | 46 | 47 | metadata = {'main': {'name': 'add_op', 48 | 'desc': 'test add op', 'type': 'Operation'}} 49 | with open('./tests/template/add_op.py', 'r') as f: 50 | operation = f.read() 51 | with open('./tests/template/args.json', 'r') as f: 52 | params = f.read() 53 | 54 | op1 = system_service.publish_template( 55 | metadata, operation, params, system_account) 56 | 57 | ############ 58 | metadata = {'main': {'name': 'leaf data1', 59 | 'desc': 'test leaf1', 'type': 'Dataset'}} 60 | service = { 61 | 'index': 'sid0_for_dt1', 62 | 'endpoint': 'ip:port', 63 | 'descriptor': { 64 | 'template': op1.tid, 65 | 'constraint': { 66 | 'arg1': 1, 67 | 'arg2': {} 68 | } 69 | }, 70 | 'attributes': { 71 | 'price': 10 72 | } 73 | } 74 | 75 | ddo1 = asset_service.generate_ddo( 76 | metadata, [service], org1_account.address, verify=True) 77 | asset_service.publish_dt(ddo1, org1_account) 78 | 79 | metadata = {'main': {'name': 'leaf data2', 80 | 'desc': 'test leaf2', 'type': 'Dataset'}} 81 | service = { 82 | 'index': 'sid0_for_dt2', 83 | 'endpoint': 'ip:port', 84 | 'descriptor': { 85 | 'template': op1.tid, 86 | 'constraint': { 87 | 'arg1': {}, 88 | 'arg2': 2 89 | } 90 | }, 91 | 'attributes': { 92 | 'price': 10 93 | } 94 | } 95 | 96 | ddo2 = asset_service.generate_ddo( 97 | metadata, [service], org2_account.address, verify=True) 98 | asset_service.publish_dt(ddo2, org2_account) 99 | 100 | metadata = {'main': {'type': 'Dataset', 101 | 'desc': 'test union1', 'name': 'data union1'}} 102 | child_dts = [ 103 | ddo1.dt, 104 | ddo2.dt 105 | ] 106 | service = { 107 | 'index': 'sid0_for_cdt1', 108 | 'endpoint': 'ip:port', 109 | 'descriptor': { 110 | 'workflow': { 111 | ddo1.dt: { 112 | 'service': 'sid0_for_dt1', 113 | 'constraint': { 114 | 'arg1': 1, 115 | 'arg2': 3 116 | } 117 | }, 118 | ddo2.dt: { 119 | 'service': 'sid0_for_dt2', 120 | 'constraint': { 121 | 'arg1': {}, 122 | 'arg2': 2 123 | } 124 | } 125 | } 126 | }, 127 | 'attributes': { 128 | 'price': 20, 129 | 'op_name': "federated" 130 | } 131 | } 132 | 133 | service1 = { 134 | 'index': 'sid1_for_cdt2', 135 | 'endpoint': 'ip:port', 136 | 'descriptor': { 137 | 'workflow': { 138 | ddo1.dt: { 139 | 'service': 'sid0_for_dt1', 140 | 'constraint': { 141 | 'arg1': 1, 142 | 'arg2': 3 143 | } 144 | }, 145 | ddo2.dt: { 146 | 'service': 'sid0_for_dt2', 147 | 'constraint': { 148 | 'arg1': 2, 149 | 'arg2': 2 150 | } 151 | } 152 | } 153 | }, 154 | 'attributes': { 155 | 'price': 30, 156 | 'op_name': "download" 157 | } 158 | } 159 | 160 | ddo3 = asset_service.generate_ddo( 161 | metadata, [service, service1], org3_account.address, child_dts=child_dts, verify=True) 162 | asset_service.publish_dt(ddo3, org3_account) 163 | 164 | msg = f'{org3_account.address}{ddo3.dt}' 165 | msg_hash = add_ethereum_prefix_and_hash_msg(msg) 166 | signature = org3_account.sign(msg_hash).signature.hex() 167 | 168 | print(asset_service.check_service_terms( 169 | ddo3.dt, ddo1.dt, org1_account.address, signature)) 170 | print(asset_service.check_service_terms( 171 | ddo3.dt, ddo2.dt, org2_account.address, signature)) 172 | 173 | asset_service.grant_dt_perm(ddo1.dt, ddo3.dt, org1_account) 174 | asset_service.grant_dt_perm(ddo2.dt, ddo3.dt, org2_account) 175 | asset_service.activate_cdt(ddo3.dt, ddo3.child_dts, org3_account) 176 | 177 | metadata = {'main': {'type': 'Algorithm', 178 | 'name': 'algorithm1', 'desc': 'test algo1'}} 179 | child_dts = [ 180 | ddo3.dt, 181 | ] 182 | service1 = { 183 | 'index': 'sid0_for_cdt2', 184 | 'endpoint': 'ip:port', 185 | 'descriptor': { 186 | 'workflow': { 187 | ddo3.dt: { 188 | 'service': 'sid0_for_cdt1', 189 | 'constraint': { 190 | ddo1.dt: { 191 | 'arg1': 1, 192 | 'arg2': 3, 193 | }, 194 | ddo2.dt: { 195 | 'arg1': 1, 196 | 'arg2': 2 197 | } 198 | } 199 | } 200 | } 201 | }, 202 | 'attributes': { 203 | 'price': 30 204 | } 205 | } 206 | 207 | ddo4 = asset_service.generate_ddo( 208 | metadata, [service1], org3_account.address, child_dts=child_dts, verify=True) 209 | asset_service.publish_dt(ddo4, org3_account) 210 | 211 | msg = f'{org3_account.address}{ddo4.dt}' 212 | msg_hash = add_ethereum_prefix_and_hash_msg(msg) 213 | signature = org3_account.sign(msg_hash).signature.hex() 214 | 215 | print(asset_service.check_service_terms( 216 | ddo4.dt, ddo3.dt, org3_account.address, signature)) 217 | 218 | asset_service.grant_dt_perm(ddo3.dt, ddo4.dt, org3_account) 219 | asset_service.activate_cdt(ddo4.dt, ddo4.child_dts, org3_account) 220 | 221 | metadata = {'main': {'type': 'Algorithm', 222 | 'name': 'algorithm2', 'desc': 'test algo2'}} 223 | child_dts = [ 224 | ddo3.dt, 225 | ] 226 | service1 = { 227 | 'index': 'sid0_for_cdt3', 228 | 'endpoint': 'ip:port', 229 | 'descriptor': { 230 | 'workflow': { 231 | ddo3.dt: { 232 | 'service': 'sid0_for_cdt1', 233 | 'constraint': { 234 | ddo1.dt: { 235 | 'arg1': 1, 236 | 'arg2': 3, 237 | }, 238 | ddo2.dt: { 239 | 'arg1': 4, 240 | 'arg2': 2 241 | } 242 | } 243 | } 244 | } 245 | }, 246 | 'attributes': { 247 | 'price': 30 248 | } 249 | } 250 | 251 | ddo5 = asset_service.generate_ddo( 252 | metadata, [service1], org3_account.address, child_dts=child_dts, verify=True) 253 | asset_service.publish_dt(ddo5, org3_account) 254 | 255 | msg = f'{org3_account.address}{ddo5.dt}' 256 | msg_hash = add_ethereum_prefix_and_hash_msg(msg) 257 | signature = org3_account.sign(msg_hash).signature.hex() 258 | 259 | print(asset_service.check_service_terms( 260 | ddo5.dt, ddo3.dt, org3_account.address, signature)) 261 | 262 | asset_service.grant_dt_perm(ddo3.dt, ddo5.dt, org3_account) 263 | asset_service.activate_cdt(ddo5.dt, ddo5.child_dts, org3_account) 264 | 265 | 266 | task_id = job_service.create_task('test', 'test_task', org3_account) 267 | job_id = job_service.add_job(task_id, ddo4.dt, org3_account) 268 | job_id = job_service.add_job(task_id, ddo4.dt, org3_account) 269 | 270 | msg = f'{org3_account.address}{job_id}' 271 | msg_hash = add_ethereum_prefix_and_hash_msg(msg) 272 | signature = org3_account.sign(msg_hash).signature.hex() 273 | 274 | print(job_service.check_remote_compute(ddo4.dt, ddo3.dt, 275 | job_id, org3_account.address, signature)) 276 | 277 | job_id = job_service.add_job(task_id, ddo5.dt, org3_account) 278 | 279 | found = tracer_service.trace_dt_lifecycle(ddo1.dt, prefix=[]) 280 | job_list = tracer_service.job_list_format(found) 281 | print(job_list) 282 | tree = tracer_service.tree_format(found) 283 | if tree: 284 | tracer_service.print_tree(tree, indent=[], final_node=True) 285 | 286 | 287 | print(tracer_service.get_marketplace_stat()) 288 | print(asset_service.get_dt_details(ddo4.dt)) 289 | print(asset_service.get_dt_marketplace()) -------------------------------------------------------------------------------- /tests/test_web3.py: -------------------------------------------------------------------------------- 1 | # """Demo""" 2 | 3 | from datatoken.web3.wallet import Wallet 4 | from datatoken.model.keeper import Keeper 5 | from datatoken.model.constants import Role, Operation 6 | 7 | 8 | keeper = Keeper() 9 | 10 | system = Wallet( 11 | keeper.web3, private_key='0xd5b87119980bc80944760f1027d7643dc9bdfff8307cae1e831ff7f74f11ebd3') 12 | org1 = Wallet( 13 | keeper.web3, private_key='0xaca737275831497429a47bcd5766950a69a0fa8a1511a8cf656005de1c11546e') 14 | user1 = Wallet( 15 | keeper.web3, private_key='0xc68daf21bb748605396992aaf95a28eba74b5ec53706ce251e35957baccf7e80') 16 | user2 = Wallet( 17 | keeper.web3, private_key='0x858dc470755f747d50053b2e8e3bfca78d5fd9f75ef5a63398d4e8390792e026') 18 | 19 | ##### 20 | print('add role') 21 | if not keeper.role_controller.check_role( 22 | org1.address, Role.ROLE_ENTERPRISE): 23 | keeper.role_controller.add_role( 24 | org1.address, Role.ROLE_ENTERPRISE, system) 25 | 26 | print(keeper.role_controller.check_role( 27 | org1.address, Role.ROLE_ENTERPRISE)) 28 | 29 | print(keeper.role_controller.check_permission( 30 | org1.address, Operation.MODIFY_ASSET)) 31 | 32 | if not keeper.role_controller.check_role( 33 | user1.address, Role.ROLE_PROVIDER): 34 | keeper.role_controller.add_role( 35 | user1.address, Role.ROLE_PROVIDER, system) 36 | 37 | print(keeper.role_controller.check_role( 38 | user1.address, Role.ROLE_PROVIDER)) 39 | print(keeper.role_controller.check_permission( 40 | user1.address, Operation.MODIFY_AUTHORIZE)) 41 | 42 | ##### 43 | print('register enterprise') 44 | if not keeper.asset_provider.check_enterprise(org1.address): 45 | keeper.asset_provider.register_enterprise( 46 | org1.address, 'org1', 'test_org1', system) 47 | 48 | print(keeper.asset_provider.check_enterprise(org1.address)) 49 | print(keeper.asset_provider.get_enterprise(org1.address)) 50 | 51 | keeper.asset_provider.update_enterprise( 52 | org1.address, 'org1', 'test_org1_update', system) 53 | print(keeper.asset_provider.get_enterprise(org1.address)) 54 | 55 | ##### 56 | print('add provider') 57 | if not keeper.asset_provider.check_provider(user1.address): 58 | keeper.asset_provider.add_provider(user1.address, system) 59 | if not keeper.asset_provider.check_provider(user2.address): 60 | keeper.asset_provider.add_provider(user2.address, system) 61 | if not keeper.asset_provider.check_provider(org1.address): 62 | keeper.asset_provider.add_provider(org1.address, system) 63 | 64 | print(keeper.asset_provider.check_provider(user1.address)) 65 | 66 | ##### 67 | print('publish template') 68 | tid = '0x7465737400000000000000000000000000000000000000000000000000000000' 69 | checksum_test = '0x7465737400000000000000000000000000000000000000000000000000000000' 70 | 71 | if not keeper.op_template.is_template_exist(tid): 72 | keeper.op_template.publish_template( 73 | tid, 'op1', checksum_test, 'ipfs_path_url1', system) 74 | 75 | print(keeper.op_template.is_template_exist(tid)) 76 | print(keeper.op_template.get_template(tid)) 77 | 78 | print('update template') 79 | keeper.op_template.update_template( 80 | tid, 'op2', checksum_test, 'ipfs_path_url2', system) 81 | print(keeper.op_template.get_template(tid)) 82 | 83 | # ##### 84 | print('mint dts') 85 | dt1 = '0x7465737400000000000000000000000000000000000000000000000000000011' 86 | dt2 = '0x7465737400000000000000000000000000000000000000000000000000000022' 87 | dt3 = '0x7465737400000000000000000000000000000000000000000000000000000033' 88 | dt4 = '0x7465737400000000000000000000000000000000000000000000000000000044' 89 | checksum_test = '0x7465737400000000000000000000000000000000000000000000000000000000' 90 | 91 | keeper.dt_factory.mint_dt(dt1, user1.address, 92 | True, checksum_test, 'ipfs_path_url1', org1) 93 | print(keeper.dt_factory.check_dt_available(dt1)) 94 | print(keeper.dt_factory.get_dt_register(dt1)) 95 | 96 | keeper.dt_factory.mint_dt(dt2, user2.address, 97 | True, checksum_test, 'ipfs_path_url2', org1) 98 | print(keeper.dt_factory.check_dt_available(dt2)) 99 | print(keeper.dt_factory.get_dt_register(dt2)) 100 | 101 | keeper.dt_factory.mint_dt(dt3, org1.address, 102 | False, checksum_test, 'ipfs_path_url3', org1) 103 | print(keeper.dt_factory.check_dt_available(dt3)) 104 | print(keeper.dt_factory.get_dt_register(dt3)) 105 | 106 | keeper.dt_factory.mint_dt(dt4, org1.address, 107 | False, checksum_test, 'ipfs_path_url4', org1) 108 | 109 | print(keeper.dt_factory.get_owner_assets(org1.address)) 110 | 111 | ##### 112 | print('grant asset') 113 | keeper.dt_factory.grant_dt(dt1, dt3, user1) 114 | keeper.dt_factory.grant_dt(dt2, dt3, user2) 115 | keeper.dt_factory.grant_dt(dt1, dt4, user1) 116 | print(keeper.dt_factory.check_dt_perm(dt1, dt2)) 117 | print(keeper.dt_factory.check_dt_perm(dt1, dt3)) 118 | print(keeper.dt_factory.check_dt_perm(dt2, dt3)) 119 | print(keeper.dt_factory.check_dt_perm(dt1, dt4)) 120 | 121 | print(keeper.dt_factory.get_dt_grantees(keeper.web3.toBytes(hexstr=dt1))) 122 | 123 | print('mint composable dt, succeed') 124 | print(keeper.dt_factory.check_cdt_available(dt3)) 125 | keeper.dt_factory.start_compose_dt(dt3, [dt1, dt2], org1) 126 | print(keeper.dt_factory.check_cdt_available(dt3)) 127 | 128 | ##### 129 | print('create task and job') 130 | task_id = keeper.task_market.create_task('test', 'test_task', org1) 131 | print(task_id) 132 | print(keeper.task_market.get_task(task_id)) 133 | job_id = keeper.task_market.add_job(dt3, task_id, org1) 134 | print(job_id) 135 | print(keeper.task_market.get_job(job_id)) 136 | 137 | job_id = keeper.task_market.add_job(dt3, task_id, org1) 138 | print(job_id) 139 | print(keeper.task_market.get_job(job_id)) 140 | 141 | print(keeper.task_market.get_cdt_jobs(keeper.web3.toBytes(hexstr=dt3))) 142 | 143 | ##### 144 | print('marketplace info') 145 | print(keeper.dt_factory.get_dt_num()) 146 | print(keeper.op_template.get_template_num()) 147 | print(keeper.task_market.get_job_num()) 148 | print(keeper.task_market.get_task_num()) 149 | 150 | dt_idx, owners, issuers, checksums, isLeafs, ipfsPaths, _ = keeper.dt_factory.get_available_dts() 151 | print(dt_idx) 152 | 153 | print(keeper.asset_provider.get_issuer_names(issuers)) 154 | 155 | ##### 156 | print() 157 | print('successfully finished') 158 | --------------------------------------------------------------------------------