├── .github
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── ISSUE_TEMPLATE
│ ├── bug_report.md
│ ├── config.yml
│ └── feature_request.md
├── SECURITY.md
├── SUPPORT.md
├── pull_request_template.md
└── workflows
│ ├── audit.yml
│ ├── build-and-test.yml
│ ├── clippy.yml
│ └── format.yml
├── .gitignore
├── .rustfmt.toml
├── Cargo.toml
├── LICENSE
├── README.md
├── documentation
├── .gitignore
├── EXTERNAL_DOCS_CONFIG
├── EXTERNAL_DOCS_DROPDOWN_CONFIG
├── README.md
├── config.json
├── docs
│ ├── contribute.md
│ ├── getting_started.md
│ ├── libraries
│ │ ├── c
│ │ │ ├── api_reference.md
│ │ │ ├── examples.md
│ │ │ └── getting_started.md
│ │ ├── overview.md
│ │ ├── rust
│ │ │ ├── api_reference.md
│ │ │ ├── examples.md
│ │ │ └── getting_started.md
│ │ └── wasm
│ │ │ ├── api_reference.md
│ │ │ ├── examples.md
│ │ │ └── getting_started.md
│ ├── overview.md
│ ├── specs.md
│ ├── theme
│ │ ├── book.js
│ │ ├── css
│ │ │ ├── chrome.css
│ │ │ ├── custom
│ │ │ │ └── header.css
│ │ │ ├── general.css
│ │ │ ├── print.css
│ │ │ └── variables.css
│ │ ├── favicon.png
│ │ ├── header.hbs
│ │ ├── highlight.css
│ │ ├── highlight.js
│ │ └── index.hbs
│ ├── troubleshooting.md
│ └── welcome.md
├── docusaurus.config.js
├── package-lock.json
├── package.json
├── sidebars.js
└── static
│ ├── .nojekyll
│ ├── css
│ └── custom.css
│ └── img
│ ├── bg-2ab9b09901d67717ad0179ee92d7a3c1.svg
│ ├── bg.svg
│ ├── iota_logo.svg
│ ├── libraries.png
│ ├── libraries
│ ├── accounts_addresses.svg
│ └── screenshot_faucet.png
│ ├── logo
│ ├── Logo_Swirl_Dark.png
│ └── favicon.ico
│ ├── overview
│ └── layered_overview.svg
│ └── specs
│ └── erdIOTA.svg
├── lets
├── Cargo.toml
├── README.md
├── benches
│ └── tangle_clients.rs
└── src
│ ├── address.rs
│ ├── error.rs
│ ├── id
│ ├── did
│ │ ├── data_wrapper.rs
│ │ ├── did.rs
│ │ ├── mod.rs
│ │ └── url_info.rs
│ ├── ed25519.rs
│ ├── identifier.rs
│ ├── identity.rs
│ ├── mod.rs
│ ├── permission.rs
│ └── psk.rs
│ ├── lib.rs
│ ├── message
│ ├── content.rs
│ ├── hdf.rs
│ ├── message.rs
│ ├── mod.rs
│ ├── pcf.rs
│ ├── preparsed.rs
│ ├── topic.rs
│ ├── transport.rs
│ └── version.rs
│ └── transport
│ ├── bucket.rs
│ ├── mod.rs
│ ├── tangle.rs
│ └── utangle.rs
├── specification
└── Streams_Specification_1_0A.pdf
├── spongos
├── Cargo.toml
├── README.md
└── src
│ ├── core
│ ├── mod.rs
│ ├── prng.rs
│ ├── prp
│ │ ├── keccak.rs
│ │ └── mod.rs
│ ├── spongos.rs
│ └── tests.rs
│ ├── ddml
│ ├── commands
│ │ ├── mod.rs
│ │ ├── sizeof
│ │ │ ├── absorb.rs
│ │ │ ├── absorb_external.rs
│ │ │ ├── commit.rs
│ │ │ ├── dump.rs
│ │ │ ├── ed25519.rs
│ │ │ ├── fork.rs
│ │ │ ├── join.rs
│ │ │ ├── mask.rs
│ │ │ ├── mod.rs
│ │ │ ├── repeated.rs
│ │ │ ├── skip.rs
│ │ │ ├── squeeze.rs
│ │ │ └── x25519.rs
│ │ ├── test.rs
│ │ ├── unwrap
│ │ │ ├── absorb.rs
│ │ │ ├── absorb_external.rs
│ │ │ ├── commit.rs
│ │ │ ├── dump.rs
│ │ │ ├── ed25519.rs
│ │ │ ├── fork.rs
│ │ │ ├── guard.rs
│ │ │ ├── join.rs
│ │ │ ├── mask.rs
│ │ │ ├── mod.rs
│ │ │ ├── repeated.rs
│ │ │ ├── skip.rs
│ │ │ ├── squeeze.rs
│ │ │ └── x25519.rs
│ │ └── wrap
│ │ │ ├── absorb.rs
│ │ │ ├── absorb_external.rs
│ │ │ ├── commit.rs
│ │ │ ├── dump.rs
│ │ │ ├── ed25519.rs
│ │ │ ├── fork.rs
│ │ │ ├── guard.rs
│ │ │ ├── join.rs
│ │ │ ├── mask.rs
│ │ │ ├── mod.rs
│ │ │ ├── repeated.rs
│ │ │ ├── skip.rs
│ │ │ ├── squeeze.rs
│ │ │ └── x25519.rs
│ ├── io.rs
│ ├── mod.rs
│ ├── modifiers.rs
│ └── types
│ │ ├── bytes.rs
│ │ ├── mac.rs
│ │ ├── maybe.rs
│ │ ├── mod.rs
│ │ ├── nbytes.rs
│ │ ├── size.rs
│ │ └── uint.rs
│ ├── error.rs
│ └── lib.rs
├── streams.png
└── streams
├── Cargo.toml
├── README.md
├── examples
└── full-example
│ ├── example.env
│ ├── main.rs
│ └── scenarios
│ ├── basic.rs
│ ├── did.rs
│ ├── filter.rs
│ ├── lean.rs
│ ├── mod.rs
│ └── utils.rs
└── src
├── api
├── cursor_store.rs
├── message.rs
├── message_builder.rs
├── messages.rs
├── mod.rs
├── selector.rs
├── send_response.rs
├── user.rs
└── user_builder.rs
├── error.rs
├── lib.rs
└── message
├── announcement.rs
├── branch_announcement.rs
├── keyload.rs
├── message_types.rs
├── mod.rs
├── signed_packet.rs
├── subscription.rs
├── tagged_packet.rs
└── unsubscription.rs
/.github/ISSUE_TEMPLATE/bug_report.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Report a bug in Streams
3 | about: Create a report to help us improve
4 | title: ""
5 | labels: bug
6 | ---
7 |
8 | ## Bug description
9 |
10 | Briefly describe the bug.
11 |
12 | ## Rust version
13 |
14 | Which version of Rust are you running?
15 |
16 | - Rust version:
17 |
18 | ## Hardware specification
19 |
20 | What hardware are you using?
21 |
22 | - Operating system:
23 | - RAM:
24 | - Cores:
25 | - Device:
26 |
27 | ## Steps To reproduce the bug
28 |
29 | Explain how the maintainer can reproduce the bug.
30 |
31 | 1.
32 | 2.
33 | 3.
34 |
35 | ## Expected behaviour
36 |
37 | Describe what you expect to happen.
38 |
39 | ## Actual behaviour
40 |
41 | Describe what actually happens.
42 |
43 | ## Errors
44 |
45 | Paste any errors that you see, including logs, errors, or screenshots.
46 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/config.yml:
--------------------------------------------------------------------------------
1 | blank_issues_enabled: false
2 | contact_links:
3 | - name: Discord
4 | url: https://discord.iota.org/
5 | about: Please ask and answer questions here.
6 | - name: Security vulnerabilities
7 | url: security@iota.org
8 | about: Please report security vulnerabilities here.
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/feature_request.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Request a feature or enhancement for Streams
3 | about: Request a feature or enhancement
4 | ---
5 |
6 | ## Description
7 |
8 | Briefly describe the feature or enhancement that you are requesting.
9 |
10 | ## Motivation
11 |
12 | Explain why this feature is needed.
13 |
14 | ## Requirements
15 |
16 | Write a list of what you want this feature to do.
17 |
18 | 1.
19 | 2.
20 | 3.
21 |
22 | ## Open questions (optional)
23 |
24 | Use this section to ask any questions that are related to the feature.
25 |
26 | ## Are you planning to do it yourself in a pull request?
27 |
28 | Yes/No.
29 |
--------------------------------------------------------------------------------
/.github/SECURITY.md:
--------------------------------------------------------------------------------
1 |
Responsible disclosure policy
2 |
3 | At the IOTA Foundation, we consider the security of our systems a top priority. But no matter how much effort we put into system security, there can still be vulnerabilities present. If you've discovered a vulnerability, please follow the guidelines below to report it to our security team:
4 |
5 |
6 | E-mail your findings to security@iota.org. If the report contains highly sensitive information, please consider encrypting your findings using our security@iota.org (1F211CB6E8A158722F9053D3E27A933040CF05B9) PGP key.
7 |
8 | Please follow these rules when testing/reporting vulnerabilities:
9 |
10 | Do not take advantage of the vulnerability you have discovered, for example by downloading more data than is necessary to demonstrate the vulnerability.
11 | Do not read, modify or delete data that you don't own.
12 | Do not disclose the problem to third parties until it has been resolved.
13 | The scope of the program is limited to technical vulnerabilities in IOTA Foundations's web applications and open source software packages distributed through GitHub, please do not try to test physical security or attempt phishing attacks against our employees, and so on.
14 | Out of concern for the availability of our services to all users, please do not attempt to carry out DoS attacks, leverage black hat SEO techniques, spam people, and do other similarly questionable things. We also discourage the use of any vulnerability testing tools that automatically generate significant volumes of traffic.
15 |
16 | What we promise:
17 |
18 | We will respond to your report within 3 business days with our evaluation of the report and an expected resolution date.
19 | If you have followed the instructions above, we will not take any legal action against you in regard to the report.
20 | We will keep you informed during all stages of resolving the problem.
21 | To show our appreciation for your effort and cooperation during the report, we will list your name and a link to a personal website/social network profile on the page below so that the public can know you've helped keep the IOTA Foundation secure.
22 |
23 | We sincerely appreciate the efforts of security researchers in keeping our community safe.
24 |
--------------------------------------------------------------------------------
/.github/SUPPORT.md:
--------------------------------------------------------------------------------
1 | # Community resources
2 |
3 | If you have a general or technical question, you can use one of the following resources instead of submitting an issue:
4 |
5 | - [**Developer documentation:**](https://wiki.iota.org/) For official information about developing with IOTA technology
6 | - [**Discord:**](https://discord.iota.org/) For real-time chats with the developers and community members
7 | - [**IOTA cafe:**](https://iota.cafe/) For technical discussions with the Research and Development Department at the IOTA Foundation
8 | - [**StackExchange:**](https://iota.stackexchange.com/) For technical and troubleshooting questions
--------------------------------------------------------------------------------
/.github/pull_request_template.md:
--------------------------------------------------------------------------------
1 | # Description of change
2 |
3 | Please write a summary of your changes and why you made them. Be sure to reference any related issues by adding `fixes # (issue)`.
4 |
5 | ## Type of change
6 |
7 | Choose a type of change, and delete any options that are not relevant.
8 |
9 | - Bug fix (a non-breaking change which fixes an issue)
10 | - Enhancement (a non-breaking change which adds functionality)
11 | - Breaking change (fix or feature that would cause existing functionality to not work as expected)
12 | - Documentation Fix
13 |
14 | ## How the change has been tested
15 |
16 | Describe the tests that you ran to verify your changes.
17 |
18 | Make sure to provide instructions for the maintainer as well as any relevant configurations.
19 |
20 | ## Change checklist
21 |
22 | Add an `x` to the boxes that are relevant to your changes, and delete any items that are not.
23 |
24 | - [] I have followed the contribution guidelines for this project
25 | - [] I have performed a self-review of my own code
26 | - [] I have commented my code, particularly in hard-to-understand areas
27 | - [] I have made corresponding changes to the documentation
28 | - [] I have added tests that prove my fix is effective or that my feature works
29 | - [] New and existing unit tests pass locally with my changes
30 |
--------------------------------------------------------------------------------
/.github/workflows/audit.yml:
--------------------------------------------------------------------------------
1 | name: Audit
2 |
3 | on:
4 | schedule:
5 | - cron: "0 0 * * *"
6 | push:
7 | branches:
8 | - master
9 | - develop
10 | - v2.0-dev
11 | pull_request:
12 | branches:
13 | - master
14 | - develop
15 | - v2.0-dev
16 |
17 | jobs:
18 | audit:
19 | runs-on: ubuntu-latest
20 | steps:
21 | - uses: actions/checkout@v2
22 | - uses: actions-rs/audit-check@v1
23 | with:
24 | token: ${{ secrets.GITHUB_TOKEN }}
25 |
--------------------------------------------------------------------------------
/.github/workflows/build-and-test.yml:
--------------------------------------------------------------------------------
1 | name: Build and run tests
2 |
3 | on:
4 | push:
5 | branches:
6 | - master
7 | - develop
8 | - v2.0-dev
9 | pull_request:
10 | branches:
11 | - master
12 | - develop
13 | - v2.0-dev
14 | paths-ignore:
15 | - "specification/**"
16 | - "docs/**"
17 |
18 | jobs:
19 | crate:
20 | runs-on: ${{ matrix.os }}
21 | strategy:
22 | fail-fast: false
23 | matrix:
24 | os: [ubuntu-latest, macos-latest, windows-latest]
25 |
26 | steps:
27 | - uses: actions/checkout@v2
28 |
29 | - name: Install toolchain
30 | uses: actions-rs/toolchain@v1
31 | with:
32 | toolchain: stable
33 | override: true
34 |
35 | - name: Get current date
36 | run: echo "CURRENT_DATE=$(date +'%Y-%m-%d')" >> $GITHUB_ENV
37 | if: matrix.os == 'macos-latest' || matrix.os == 'ubuntu-latest'
38 |
39 | - name: Get current date
40 | if: matrix.os == 'windows-latest'
41 | run: echo "CURRENT_DATE=$(Get-Date -Format "yyyy-MM-dd")" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append
42 |
43 | - name: Install required packages (Ubuntu)
44 | if: matrix.os == 'ubuntu-latest'
45 | run: |
46 | sudo apt-get update
47 | sudo apt-get install libudev-dev libusb-1.0-0-dev
48 |
49 | - name: Cache cargo
50 | uses: actions/cache@v2
51 | with:
52 | path: |
53 | ~/.cargo/bin/
54 | ~/.cargo/registry/index/
55 | ~/.cargo/registry/cache/
56 | ~/.cargo/git/db/
57 | target/
58 | key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
59 | - name: Run tests
60 | timeout-minutes: 40
61 | uses: actions-rs/cargo@v1
62 | with:
63 | command: test
64 | args: --all-features --release
65 | - name: Run example (Ubuntu only)
66 | if: matrix.os == 'ubuntu-latest'
67 | uses: actions-rs/cargo@v1
68 | with:
69 | command: run
70 | args: --release --example full-example
71 |
--------------------------------------------------------------------------------
/.github/workflows/clippy.yml:
--------------------------------------------------------------------------------
1 | name: Clippy
2 |
3 | on:
4 | push:
5 | branches:
6 | - master
7 | - develop
8 | - v2.0-dev
9 | pull_request:
10 | branches:
11 | - master
12 | - develop
13 | - v2.0-dev
14 | paths-ignore:
15 | - "docs/**"
16 | - "specification/**"
17 |
18 | jobs:
19 | clippy:
20 | runs-on: ubuntu-latest
21 | strategy:
22 | fail-fast: false
23 |
24 | steps:
25 | - uses: actions/checkout@v2
26 | - name: Install clippy with stable toolchain
27 | uses: actions-rs/toolchain@v1
28 | with:
29 | profile: minimal
30 | toolchain: stable
31 | override: true
32 | components: clippy
33 | - uses: actions-rs/clippy-check@v1
34 | with:
35 | token: ${{ secrets.GITHUB_TOKEN }}
36 | args: --all-features
37 | name: clippy check
38 |
--------------------------------------------------------------------------------
/.github/workflows/format.yml:
--------------------------------------------------------------------------------
1 | name: Format
2 |
3 | on:
4 | push:
5 | branches:
6 | - master
7 | - develop
8 | - v2.0-dev
9 | pull_request:
10 | branches:
11 | - master
12 | - develop
13 | - v2.0-dev
14 | paths-ignore:
15 | - "docs/**"
16 | - "specification/**"
17 |
18 | jobs:
19 | format:
20 | runs-on: ubuntu-latest
21 | strategy:
22 | fail-fast: false
23 | steps:
24 | - uses: actions/checkout@v2
25 | - name: Install rustfmt with nightly toolchain
26 | uses: actions-rs/toolchain@v1
27 | with:
28 | profile: minimal
29 | toolchain: nightly
30 | override: true
31 | components: rustfmt
32 | - uses: actions-rs/cargo@v1
33 | with:
34 | command: fmt
35 | args: --check
36 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | /target
2 | /debug
3 | /act
4 | **/target/
5 | **/*.rs.bk
6 | *.lock
7 | *.rs~
8 | *.toml~
9 | *.md~
10 | book
11 | .vscode
12 | .project
13 | .env
14 | .DS_Store
--------------------------------------------------------------------------------
/.rustfmt.toml:
--------------------------------------------------------------------------------
1 | unstable_features = true
2 | imports_granularity = "crate"
3 | max_width = 120
4 | comment_width = 100 # make sure examples in docs fit when rendered
5 | wrap_comments = true
6 | format_code_in_doc_comments = true
7 | format_macro_bodies = true
8 | format_macro_matchers = true
9 | normalize_comments = true
10 | normalize_doc_attributes = true
11 | use_field_init_shorthand = true
12 | version = "Two"
--------------------------------------------------------------------------------
/Cargo.toml:
--------------------------------------------------------------------------------
1 | [workspace]
2 |
3 | members = [
4 | "spongos",
5 | "lets",
6 | "streams",
7 | ]
8 |
9 | resolver = "2"
10 |
11 | [profile.dev]
12 | incremental = true
13 |
--------------------------------------------------------------------------------
/documentation/.gitignore:
--------------------------------------------------------------------------------
1 | # Dependencies
2 | /node_modules
3 |
4 | # Production
5 | /build
6 |
7 | # Generated files
8 | .docusaurus
9 | .cache-loader
10 |
11 | # Misc
12 | .DS_Store
13 | .env.local
14 | .env.development.local
15 | .env.test.local
16 | .env.production.local
17 |
18 | npm-debug.log*
19 | yarn-debug.log*
20 | yarn-error.log*
21 |
22 | local-wiki
23 |
--------------------------------------------------------------------------------
/documentation/EXTERNAL_DOCS_CONFIG:
--------------------------------------------------------------------------------
1 | [
2 | "@docusaurus/plugin-content-docs",
3 | {
4 | id: "streams",
5 | path: "external/streams/documentation/docs",
6 | routeBasePath: "streams",
7 | sidebarPath: require.resolve("./external/streams/documentation/sidebars.js"),
8 | }
9 | ],
10 |
--------------------------------------------------------------------------------
/documentation/EXTERNAL_DOCS_DROPDOWN_CONFIG:
--------------------------------------------------------------------------------
1 | {
2 | label: "Streams",
3 | to: "streams/welcome",
4 | className: "icon-streams",
5 | activeBaseRegex: 'streams/.*'
6 | },
7 |
--------------------------------------------------------------------------------
/documentation/README.md:
--------------------------------------------------------------------------------
1 | # Documentation
2 |
3 | The documentation is built using [Docusaurus 2](https://docusaurus.io/). The deployment is done through a centralized build from [IOTA WIKI](https://github.com/iota-community/iota-wiki). To run a local instance the [IOTA WIKI CLI](https://github.com/iota-community/iota-wiki-cli) is used.
4 |
5 | ## Prerequisites
6 |
7 | - [Node.js v14.14+](https://nodejs.org/en/)
8 | - [yarn](https://yarnpkg.com/getting-started/install)
9 | - `IOTA WIKI CLI` installed with npm
10 |
11 | ## Installation
12 |
13 | ```console
14 | npm i
15 | npm run setup
16 | ```
17 |
18 | This command checks out a local copy of the wiki and creates links to the content.
19 |
20 | ## Local Development
21 |
22 | ```console
23 | npm start
24 | ```
25 |
26 | This command starts a local development server and opens up a browser window. Most changes are reflected live without having to restart the server.
27 |
28 | ## Tear Down
29 |
30 | ```console
31 | npm run clean
32 | ```
33 |
34 | This command deletes the local wiki and local links.
35 |
36 |
37 | ## Including .md file
38 |
39 | ```console
40 | {@import }
41 | ```
42 |
43 | Example:
44 |
45 | ```console
46 | {@import ../../../../bindings/wasm/docs/api-reference.md}
47 | ```
--------------------------------------------------------------------------------
/documentation/config.json:
--------------------------------------------------------------------------------
1 | {
2 | "repoName": "streams",
3 | "contentFolder": "documentation",
4 | "localWikiFolder": "local-wiki",
5 | "excludeList": ["node_modules", "target", ".git/", "local-wiki"]
6 | }
7 |
--------------------------------------------------------------------------------
/documentation/docs/contribute.md:
--------------------------------------------------------------------------------
1 | ---
2 | description: Contribute to the IOTA Client Library joining the IOTA Libraries Initiative, contributing to the official GitHub repository or sharing your knowledge on Discord.
3 | image: /img/logo/iota_mark_light.png
4 | keywords:
5 | - join
6 | - documentation
7 | - project
8 | - contribute
9 | - discord
10 | - GitHub
11 | ---
12 | # Contribute to the Project
13 |
14 | **Thanks for thinking about contributing to the project! We have the following ways that you can contribute.**
15 |
16 | ## Join the IOTA Libraries Initiative
17 |
18 | The [IOTA Libraries Initiative](https://github.com/iota-community/X-Team_IOTA_Streams) is a collaborative effort to help improve the developer experience.
19 |
20 | - Quality assurance and review
21 | - Documentation
22 | - Code samples
23 |
24 | If you'd like to get involved, join the #experience channel on [Discord](https://discord.iota.org).
25 |
26 | ## Contribute to the project's GitHub repository
27 |
28 | All the code is open source and hosted on [GitHub](https://github.com/iotaledger/streams) where you can do the following:
29 |
30 | - Report a bug
31 | - Suggest a new feature
32 | - Contribute to the documentation
33 |
34 | ## Contribute to the documentation
35 |
36 | This documentation is also open source and hosted on GitHub.
37 |
38 | If you want to contribute new documentation or fix an error, see the [contribution guidelines](https://github.com/iotaledger/documentation/blob/develop/.github/CONTRIBUTING.md).
39 |
40 | ## Share your knowledge
41 |
42 | Helping others is an important part of any open source ecosystem.
43 |
44 | By sharing your knowledge with others, you can provide a lot of value to the community and maybe inspire someone else to learn and contribute.
45 |
46 | Take a look at what discussions are going on in the #clients-discussion channel on [Discord](https://discord.iota.org).
47 |
48 | Thanks :heart:
--------------------------------------------------------------------------------
/documentation/docs/getting_started.md:
--------------------------------------------------------------------------------
1 | ---
2 | description: Choose your binding and get started with the Streams Library.
3 | image: /img/logo/iota_mark_light.png
4 | keywords:
5 | - rust
6 | - wasm
7 | - c
8 | - IDE
9 | ---
10 | # Getting Started
11 |
12 | To check out our examples and write and test your own code, you would need an IDE or a code editor of your choice and a stable internet connection. You also need to set up your environment by following instructions for one of the languages: [Rust](./libraries/rust/getting_started.md), [Wasm](./libraries/wasm/getting_started.md) or [C](./libraries/c/getting_started.md)
13 |
14 | We assume that you already know the basics of the programming language that you chose. Companies and communities behind each language provide their own documentation for beginners: [Rust](https://www.rust-lang.org/learn/get-started), [JavaScript](https://www.w3schools.com/js/) and [Node.js](https://nodejs.org/en/docs/guides/). You could start with that first, or refer to these guides as you read through our streams documentation. If you have never programmed in your life, MIT has published an open [introductory course to programming](https://ocw.mit.edu/courses/intro-programming/#general). Check it out!
--------------------------------------------------------------------------------
/documentation/docs/libraries/c/getting_started.md:
--------------------------------------------------------------------------------
1 | ---
2 | description: Getting started with the official IOTA Client Library C binding.
3 | image: /img/logo/iota_mark_light.png
4 | keywords:
5 | - C
6 | - cmake
7 | - std
8 | ---
9 | # Getting Started
10 | The C bindings allow for you to build a Streams API which can be pulled into other languages.
11 | The streams instance underlying the bindings is built with the `sync-client` flag to
12 | ensure a compatible client interface using the `iota.rs iota-client` crate.
13 |
14 | Before building anything you'll need to make sure you have `cmake` installed on your
15 | machine.
16 |
17 | To build the library, first make sure you're in the c directory:
18 | ```
19 | cd bindings/c
20 | ```
21 | Update the flags in the `CMakeLists.txt` and run ```cmake .``` to
22 | prepare the installation files.
23 |
24 | #### Options for CMakeLlists.txt
25 | - `NO_STD`: Enable no_std build, without iota_client (when ON, `SYNC_CLIENT` isnt supported)
26 | - `SYNC_CLIENT`: Enable sync transport via iota_client, otherwise it's going to be Bucket which can only be used for tests
27 | - `STATIC`: Build static library when ON, otherwise dynamic library
28 | - `RELEASE`: Build in release or debug mode (when ON, builds release, when OFF, build debug)
29 |
30 | To build the library run:
31 | ```bash
32 | make
33 | ```
34 |
35 | This generates a binary library to be included into a project. This can be either:
36 | - `iota_streams_c_static`
37 | - `iota_streams_c.so` (Unix)
38 | - `iota_streams_c.dll` (Windows)
39 |
40 | An example of the header file can be found in `include/channels.h`.
41 |
42 | ### Starting a Channel
43 | Once the package has been built, you can pull it into a script file like so:
44 | ```c
45 | #include "iota_streams/channels.h"
46 | #include
47 |
48 | int main()
49 | {
50 | uint8_t multi_branching = 0;
51 | char seed[] = "Some unique seed";
52 | char const encoding[] = "utf-8";
53 | const size_t size = 1024;
54 | char const *url = "https://chrysalis-nodes.iota.org";
55 |
56 | transport_t *tsp = tsp_client_new_from_url(url);
57 | // Author constructor requires: (seed, encoding, payload size, multi branching, transport client)
58 | author_t *auth = auth_new(seed, encoding, size, multi_branching, tsp);
59 | address_t const *ann_link = auth_send_announce(auth);
60 | printf("Announcement message sent");
61 |
62 | char const *ann_address_inst_str = get_address_inst_str(ann_link);
63 | char const *ann_address_id_str = get_address_id_str(ann_link);
64 | // Link used by subscribers to attach to instance
65 | printf("Link: %s:%s\n", ann_address_inst_str, ann_address_id_str);
66 |
67 | // Clean up
68 | drop_str(ann_address_inst_str);
69 | drop_str(ann_address_id_str);
70 | drop_address(ann_link);
71 | auth_drop(auth);
72 | tsp_drop(tsp);
73 | }
74 | ```
75 |
--------------------------------------------------------------------------------
/documentation/docs/libraries/overview.md:
--------------------------------------------------------------------------------
1 | ---
2 | description: The `streams` library is written in Rust. You can also find bindings written for wasm and C.
3 | image: /img/overview/layered_overview.svg
4 | keywords:
5 | - bindings
6 | - library
7 | - rust
8 | - wasm
9 | - c
10 | ---
11 | # IOTA Streams libraries
12 |
13 | The `streams` library is currently available in the following languages:
14 |
15 | - [Rust](rust/getting_started)
16 | - [Wasm](wasm/getting_started)
17 | - [C](c/getting_started)
18 |
19 | ## Getting Started
20 |
21 | The recommended approach to start your interactions with IOTA is to use a developer network. A public API load balancer is provided here: [api.lb-0.h.chrysalis-devnet.iota.cafe](api.lb-0.h.chrysalis-devnet.iota.cafe)
22 |
23 | The test network explorer is available at the [IOTA Tangle explorer](https://explorer.iota.org/devnet/).
--------------------------------------------------------------------------------
/documentation/docs/libraries/rust/api_reference.md:
--------------------------------------------------------------------------------
1 | # API Reference
2 |
3 | Users are broken down into two types: `Author` and `Subscriber`. An `Author` is the user
4 | that generates the channel, accepts subscription requests and can perform access granting
5 | and restriction methods. A `Subscriber` is an instance that can attach to a channel to read
6 | from and write to depending on the access privileges they've been granted.
7 |
8 | You can generate the api reference with:
9 | ```
10 | cargo doc --document
11 | ```
12 |
13 |
14 |
15 |
16 |
17 |
--------------------------------------------------------------------------------
/documentation/docs/libraries/rust/examples.md:
--------------------------------------------------------------------------------
1 | ---
2 | description: Official IOTA Streams Rust API examples.
3 | image: /img/logo/iota_mark_light.png
4 | keywords:
5 | - api
6 | - Rust
7 | - examples
8 | ---
9 | # Examples
10 | A list of example implementations can be found [here](https://github.com/iotaledger/streams-examples)
11 |
12 | Additionally there are a couple of local examples present [here](https://github.com/iotaledger/streams/tree/develop/examples).
13 | To run these examples simply update the `.env` file with the node url you would like
14 | to use, and run the command:
15 | ```
16 | cargo run --release
17 | ```
18 |
--------------------------------------------------------------------------------
/documentation/docs/libraries/wasm/examples.md:
--------------------------------------------------------------------------------
1 | ---
2 | description: Official IOTA Streams Wasm API examples.
3 | image: /img/logo/iota_mark_light.png
4 | keywords:
5 | - api
6 | - wasm
7 | - js
8 | - javascript
9 | - node
10 | - examples
11 | ---
12 | # Examples
13 | An overview example of the available api tools can be found [here](../../../../bindings/wasm/examples/node.js).
14 | The general API is simply an abstraction over the rust library, so the examples found
15 | [here](../rust/examples.md) still apply (with some minor modifications, see: [api_reference](api_reference.md))
16 |
17 | ## Core Functionality
18 |
19 | ### Author Generation
20 | Create an Author and generate a new channel:
21 | ```javascript
22 | let node = "https://chrysalis-nodes.iota.org/";
23 | let options = new streams.SendOptions(node, 3, true, 1);
24 | let multi_branching = false;
25 | let auth = new streams.Author("Unique Seed", options, multi_branching);
26 |
27 | let response = await auth.clone().send_announce();
28 | let ann_link = response.get_link();
29 | // Link used by subscribers to attach to instance
30 | console.log("Announced at: ", ann_link.to_string());
31 | ```
32 |
33 | ### Subscriber Generation
34 | Create a Subscriber and attach to a channel:
35 | ```javascript
36 | let node = "https://chrysalis-nodes.iota.org/";
37 | let options = new streams.SendOptions(node, 3, true, 1);
38 | let sub = new streams.Subscriber("Unique Seed", options);
39 |
40 | let ann_link = streams.Address.from_str("AnnouncementLink:Here");
41 | await sub.clone().receive_announcement();
42 | ```
43 |
44 | ### Subscription
45 | Subscriber sends a subscription message:
46 | ```javascript
47 | let response = sub.clone().send_subscribe(ann_link);
48 | let sub_link = response.get_link();
49 | // Link to be provided to the Author for subscription
50 | console.log("Subscription link: ", sub_link.to_string());
51 | ```
52 | Author accepts and processes subscription:
53 | ```javascript
54 | let sub_link = streams.Address.from_str("SubLink:Here");
55 | await author.clone().receive_subscribe(sub_link);
56 | ```
57 |
58 | ### Keyload
59 | Author sends a keyload for all participants in the channel:
60 | ```javascript
61 | let response = author.clone().send_keyload_for_everyone(ann_link);
62 | let keyload_link = response.get_link();
63 | // Keyload message can now act as starting point for a protected branch
64 | console.log("Keyload link for everyone: ", keyload_link.to_string());
65 | ```
66 | Author sends a keyload for just one subscriber in the channel:
67 | ```javascript
68 | let response = author.clone().send_keyload(ann_link, [], ["SubA_PublicKey"]);
69 | let sub_A_keyload_link = response.get_link();
70 | // Keyload message can now act as starting point for a protected branch
71 | console.log("Keyload link for SubA: ", sub_A_keyload_link.to_string());
72 | ```
73 |
74 | ### Sending Messages
75 | Messages are required to be linked to a previous message that the user had access to.
76 | In a single branch implementation this means the latest message in the branch, in multi
77 | branch implementations, this can mean any message in a branch that they have had access
78 | to.
79 |
80 | *Note: In a multi publisher implementation (i.e. multiple publishers in a single branch),
81 | it is required that each publisher make sure to sync their state before publishing to ensure
82 | that the instance stays in sync with the other publishers*
83 |
84 | ```javascript
85 | await sub.clone().syncState();
86 | let masked_payload = to_bytes("Masked Payload") <- Payloads must be converted to bytes
87 | let public_payload = to_bytes("Public Payload")
88 |
89 | let response = subA.clone().send_signed_packet(
90 | sub_A_keyload_link,
91 | public_payload,
92 | masked_payload
93 | );
94 | let msg_link = resposne.get_link();
95 | console.log("New message sent by Sub A at: ", msg_link.to_string());
96 | ```
97 |
98 | ### Message Fetching
99 | #### Forward
100 | When new messages are available to retrieve from the channel, you can fetch the next
101 | message sent by each publisher like so:
102 | ```javascript
103 | let next_msgs = await sub.clone().fetchNextMsgs();
104 |
105 | for (const msg of next_msgs) {
106 | console.log("Found a message...");
107 | console.log(
108 | "Public: ",
109 | from_bytes(next_msgs[i].get_message().get_public_payload()),
110 | "\tMasked: ",
111 | from_bytes(next_msgs[i].get_message().get_masked_payload())
112 | );
113 | }
114 | ```
115 |
116 | If no new messages are present, the returned array will be empty.
117 |
118 | You can also fetch all previous messages:
119 |
120 | #### Backwards
121 | ```javascript
122 | let num_messages = 10;
123 | let prev_msgs = sub.clone().fetch_prev_msgs(latest_msg_link, num_messages);
124 |
125 | for (var i = 0; i < prev_msgs.length; i++) {
126 | console.log("Found a message...");
127 | console.log(
128 | "Public: ",
129 | from_bytes(prev_msgs[i].get_message().get_public_payload()),
130 | "\tMasked: ",
131 | from_bytes(prev_msgs[i].get_message().get_masked_payload())
132 | );
133 | }
134 | ```
135 |
--------------------------------------------------------------------------------
/documentation/docs/libraries/wasm/getting_started.md:
--------------------------------------------------------------------------------
1 | # Getting Started
2 | Before building anything you'll need to make sure you have `npm` installed on your
3 | machine.
4 |
5 | ### Install the library
6 | To install the library, you could run:
7 |
8 | ```npm i @iota/streams```
9 |
10 |
11 | ### Starting a Channel
12 | Once the package has been built, you can pull it into a script file like so:
13 | ```javascript
14 | const streams = require("@iota/streams/node");
15 |
16 | let node = "https://chrysalis-nodes.iota.org/";
17 |
18 | // Options include: (node-url, local pow)
19 | let options = new streams.SendOptions(node, true);
20 |
21 | let author = new streams.Author("Unique Seed Here", options.clone(), streams.ChannelType.MultiBranch );
22 |
23 | // Response formatting: {link, sequence link, msg }
24 | let response = await author.clone().send_announce();
25 |
26 | let ann_link = response.link;
27 |
28 | console.log("Channel Announcement at: ", ann_link.toString());
29 | ```
30 |
--------------------------------------------------------------------------------
/documentation/docs/specs.md:
--------------------------------------------------------------------------------
1 | ---
2 | description: "The Streams framework is intended to be a secure message verification and protection protocol
3 | for sending data over a given transport layer"
4 | image: /img/logo/wallet_light.png
5 | keywords:
6 | - Rust
7 | - streams
8 | - spec
9 | ---
10 | # Specifications document
11 |
12 | The specs doc can be found separate from docs here:
13 | [Spec.pdf](https://github.com/iotaledger/streams/blob/develop/specification/Streams_Specification_1_0A.pdf)
14 |
--------------------------------------------------------------------------------
/documentation/docs/theme/css/custom/header.css:
--------------------------------------------------------------------------------
1 | .logo-wrapper {
2 | /* transform: translateX(-50%); */
3 | margin-top: 60px;
4 | margin-bottom: -50px;
5 |
6 | }
7 |
8 | #logo {
9 | display: block;
10 | margin: auto;
11 | height: 30px;
12 | }
--------------------------------------------------------------------------------
/documentation/docs/theme/css/general.css:
--------------------------------------------------------------------------------
1 | /* Base styles and content styles */
2 |
3 | @import 'variables.css';
4 |
5 | :root {
6 | /* Browser default font-size is 16px, this way 1 rem = 10px */
7 | font-size: 62.5%;
8 | }
9 |
10 | html {
11 | font-family: "Open Sans", sans-serif;
12 | color: var(--fg);
13 | background-color: var(--bg);
14 | text-size-adjust: none;
15 | }
16 |
17 | body {
18 | margin: 0;
19 | font-size: 1.6rem;
20 | overflow-x: hidden;
21 | }
22 |
23 | code {
24 | font-family: "Source Code Pro", Consolas, "Ubuntu Mono", Menlo, "DejaVu Sans Mono", monospace, monospace !important;
25 | font-size: 0.875em; /* please adjust the ace font size accordingly in editor.js */
26 | }
27 |
28 | /* Don't change font size in headers. */
29 | h1 code, h2 code, h3 code, h4 code, h5 code, h6 code {
30 | font-size: unset;
31 | }
32 |
33 | .left { float: left; }
34 | .right { float: right; }
35 | .boring { opacity: 0.6; }
36 | .hide-boring .boring { display: none; }
37 | .hidden { display: none; }
38 |
39 | h2, h3 { margin-top: 2.5em; }
40 | h4, h5 { margin-top: 2em; }
41 |
42 | .header + .header h3,
43 | .header + .header h4,
44 | .header + .header h5 {
45 | margin-top: 1em;
46 | }
47 |
48 | h1 a.header:target::before,
49 | h2 a.header:target::before,
50 | h3 a.header:target::before,
51 | h4 a.header:target::before {
52 | display: inline-block;
53 | content: "»";
54 | margin-left: -30px;
55 | width: 30px;
56 | }
57 |
58 | h1 a.header:target,
59 | h2 a.header:target,
60 | h3 a.header:target,
61 | h4 a.header:target {
62 | scroll-margin-top: calc(var(--menu-bar-height) + 0.5em);
63 | }
64 |
65 | .page {
66 | outline: 0;
67 | padding: 0 var(--page-padding);
68 | margin-top: calc(0px - var(--menu-bar-height)); /* Compensate for the #menu-bar-hover-placeholder */
69 | }
70 | .page-wrapper {
71 | box-sizing: border-box;
72 | }
73 | .js:not(.sidebar-resizing) .page-wrapper {
74 | transition: margin-left 0.3s ease, transform 0.3s ease; /* Animation: slide away */
75 | }
76 |
77 | .content {
78 | overflow-y: auto;
79 | padding: 0 15px;
80 | padding-bottom: 50px;
81 | }
82 | .content main {
83 | margin-left: auto;
84 | margin-right: auto;
85 | max-width: var(--content-max-width);
86 | }
87 | .content p { line-height: 1.45em; }
88 | .content ol { line-height: 1.45em; }
89 | .content ul { line-height: 1.45em; }
90 | .content a { text-decoration: none; }
91 | .content a:hover { text-decoration: underline; }
92 | .content img { max-width: 100%; }
93 | .content .header:link,
94 | .content .header:visited {
95 | color: var(--fg);
96 | }
97 | .content .header:link,
98 | .content .header:visited:hover {
99 | text-decoration: none;
100 | }
101 |
102 | table {
103 | margin: 0 auto;
104 | border-collapse: collapse;
105 | }
106 | table td {
107 | padding: 3px 20px;
108 | border: 1px var(--table-border-color) solid;
109 | }
110 | table thead {
111 | background: var(--table-header-bg);
112 | }
113 | table thead td {
114 | font-weight: 700;
115 | border: none;
116 | }
117 | table thead th {
118 | padding: 3px 20px;
119 | }
120 | table thead tr {
121 | border: 1px var(--table-header-bg) solid;
122 | }
123 | /* Alternate background colors for rows */
124 | table tbody tr:nth-child(2n) {
125 | background: var(--table-alternate-bg);
126 | }
127 |
128 |
129 | blockquote {
130 | margin: 20px 0;
131 | padding: 0 20px;
132 | color: var(--fg);
133 | background-color: var(--quote-bg);
134 | border-top: .1em solid var(--quote-border);
135 | border-bottom: .1em solid var(--quote-border);
136 | }
137 |
138 |
139 | :not(.footnote-definition) + .footnote-definition,
140 | .footnote-definition + :not(.footnote-definition) {
141 | margin-top: 2em;
142 | }
143 | .footnote-definition {
144 | font-size: 0.9em;
145 | margin: 0.5em 0;
146 | }
147 | .footnote-definition p {
148 | display: inline;
149 | }
150 |
151 | .tooltiptext {
152 | position: absolute;
153 | visibility: hidden;
154 | color: #fff;
155 | background-color: #333;
156 | transform: translateX(-50%); /* Center by moving tooltip 50% of its width left */
157 | left: -8px; /* Half of the width of the icon */
158 | top: -35px;
159 | font-size: 0.8em;
160 | text-align: center;
161 | border-radius: 6px;
162 | padding: 5px 8px;
163 | margin: 5px;
164 | z-index: 1000;
165 | }
166 | .tooltipped .tooltiptext {
167 | visibility: visible;
168 | }
169 |
--------------------------------------------------------------------------------
/documentation/docs/theme/css/print.css:
--------------------------------------------------------------------------------
1 |
2 | #sidebar,
3 | #menu-bar,
4 | .nav-chapters,
5 | .mobile-nav-chapters {
6 | display: none;
7 | }
8 |
9 | #page-wrapper.page-wrapper {
10 | transform: none;
11 | margin-left: 0px;
12 | overflow-y: initial;
13 | }
14 |
15 | #content {
16 | max-width: none;
17 | margin: 0;
18 | padding: 0;
19 | }
20 |
21 | .page {
22 | overflow-y: initial;
23 | }
24 |
25 | code {
26 | background-color: #666666;
27 | border-radius: 5px;
28 |
29 | /* Force background to be printed in Chrome */
30 | -webkit-print-color-adjust: exact;
31 | }
32 |
33 | pre > .buttons {
34 | z-index: 2;
35 | }
36 |
37 | a, a:visited, a:active, a:hover {
38 | color: #4183c4;
39 | text-decoration: none;
40 | }
41 |
42 | h1, h2, h3, h4, h5, h6 {
43 | page-break-inside: avoid;
44 | page-break-after: avoid;
45 | }
46 |
47 | pre, code {
48 | page-break-inside: avoid;
49 | white-space: pre-wrap;
50 | }
51 |
52 | .fa {
53 | display: none !important;
54 | }
55 |
--------------------------------------------------------------------------------
/documentation/docs/theme/favicon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/iotaledger-archive/streams/deb3f2dd4b52873c7721bd70c42dd69f15dd6999/documentation/docs/theme/favicon.png
--------------------------------------------------------------------------------
/documentation/docs/theme/highlight.css:
--------------------------------------------------------------------------------
1 | /* Base16 Atelier Dune Light - Theme */
2 | /* by Bram de Haan (http://atelierbram.github.io/syntax-highlighting/atelier-schemes/dune) */
3 | /* Original Base16 color scheme by Chris Kempson (https://github.com/chriskempson/base16) */
4 |
5 | /* Atelier-Dune Comment */
6 | .hljs-comment,
7 | .hljs-quote {
8 | color: #AAA;
9 | }
10 |
11 | /* Atelier-Dune Red */
12 | .hljs-variable,
13 | .hljs-template-variable,
14 | .hljs-attribute,
15 | .hljs-tag,
16 | .hljs-name,
17 | .hljs-regexp,
18 | .hljs-link,
19 | .hljs-name,
20 | .hljs-selector-id,
21 | .hljs-selector-class {
22 | color: #d73737;
23 | }
24 |
25 | /* Atelier-Dune Orange */
26 | .hljs-number,
27 | .hljs-meta,
28 | .hljs-built_in,
29 | .hljs-builtin-name,
30 | .hljs-literal,
31 | .hljs-type,
32 | .hljs-params {
33 | color: #b65611;
34 | }
35 |
36 | /* Atelier-Dune Green */
37 | .hljs-string,
38 | .hljs-symbol,
39 | .hljs-bullet {
40 | color: #60ac39;
41 | }
42 |
43 | /* Atelier-Dune Blue */
44 | .hljs-title,
45 | .hljs-section {
46 | color: #6684e1;
47 | }
48 |
49 | /* Atelier-Dune Purple */
50 | .hljs-keyword,
51 | .hljs-selector-tag {
52 | color: #b854d4;
53 | }
54 |
55 | .hljs {
56 | display: block;
57 | overflow-x: auto;
58 | background: #f1f1f1;
59 | color: #6e6b5e;
60 | padding: 0.5em;
61 | }
62 |
63 | .hljs-emphasis {
64 | font-style: italic;
65 | }
66 |
67 | .hljs-strong {
68 | font-weight: bold;
69 | }
70 |
71 | .hljs-addition {
72 | color: #22863a;
73 | background-color: #f0fff4;
74 | }
75 |
76 | .hljs-deletion {
77 | color: #b31d28;
78 | background-color: #ffeef0;
79 | }
80 |
--------------------------------------------------------------------------------
/documentation/docs/troubleshooting.md:
--------------------------------------------------------------------------------
1 | ---
2 | description: Troubleshooting the Streams Library.
3 | image: /img/logo/iota_mark_light.png
4 | keywords:
5 | - discussion
6 | - channel
7 | - problem
8 | - solution
9 | - discord
10 | - stackexchange
11 | ---
12 | # Troubleshooting
13 |
14 |
15 | ## StackExchange
16 |
17 | > [https://iota.stackexchange.com](https://iota.stackexchange.com/)
18 |
19 | The IOTA StackExchange a a nice tool for developers to find answers for a problem. Just search your problem and find your answer! If there is no one, submit your question and share it in the discussion channel below.
20 |
21 | ## Joining the discussion
22 |
23 | If you want to get involved in discussions about this library, or you're looking for support, go to the #streams channel on [Discord](https://discord.iota.org).
--------------------------------------------------------------------------------
/documentation/docs/welcome.md:
--------------------------------------------------------------------------------
1 | ---
2 | description: Official IOTA Streams which can be used to easily integrate an IOTA Wallet into your application
3 | image: /img/logo/wallet_light.png
4 | keywords:
5 | - requirements
6 | - streams
7 | - channels
8 | - software
9 | - library
10 | - rust
11 | - nodejs
12 | ---
13 | # Welcome
14 |
15 | This is the documentation for the official IOTA Streams software. You can read more about core principles behind IOTA Streams in the following blog [post](https://blog.iota.org/iota-streams-alpha-7e91ee326ac0/).
16 |
17 | Streams is an organizational tool for structuring and navigating secure data through the Tangle. Streams organizes data by ordering it in a uniform and interoperable structure. Needless to say, it is also based on our official *one source code of truth* [IOTA Rust library](https://github.com/iotaledger/iota.rs).
18 |
19 | :::caution
20 |
21 | This library is in active development. The library targets the Chrysalis network and does not work with the IOTA legacy network.
22 |
23 | :::
24 |
25 | More information about Chrysalis components is available at [documentation portal](https://wiki.iota.org/chrysalis-docs/welcome).
26 |
27 | ## Joining the discussion
28 |
29 | If you want to get involved in discussions about this library, or you're looking for support, go to the #streams-discussion channel on [Discord](https://discord.iota.org).
30 |
31 | ## What you will find here
32 |
33 | This documentation has five paths:
34 |
35 | 1. The Overview: a detailed overview of the streams library.
36 | 2. Libraries: all available programming languages and their resources.
37 | 3. The Specification: detailed explanation requirements and functionality.
38 | 4. Contribute: how you can work on the streams software.
39 | 5. Get in touch: join the community and become part of the X-Team!
40 |
--------------------------------------------------------------------------------
/documentation/docusaurus.config.js:
--------------------------------------------------------------------------------
1 | const lightCodeTheme = require('prism-react-renderer/themes/github');
2 | const darkCodeTheme = require('prism-react-renderer/themes/dracula');
3 |
4 | /** @type {import('@docusaurus/types').DocusaurusConfig} */
5 | module.exports = {
6 | title: 'streams',
7 | tagline: 'Official IOTA Streams library',
8 | url: 'https://wiki.iota.org/streams/welcome/',
9 | baseUrl: '/',
10 | onBrokenLinks: 'warn',
11 | onBrokenMarkdownLinks: 'throw',
12 | favicon: '/img/logo/favicon.ico',
13 | organizationName: 'iotaledger', // Usually your GitHub org/user name.
14 | projectName: 'streams', // Usually your repo name.
15 | stylesheets: [
16 | 'https://fonts.googleapis.com/css?family=Material+Icons',
17 | ],
18 | themeConfig: {
19 | colorMode: {
20 | defaultMode: "dark",
21 | },
22 | navbar: {
23 | title: 'streams',
24 | logo: {
25 | alt: 'IOTA',
26 | src: 'img/logo/Logo_Swirl_Dark.png',
27 | },
28 | items: [{
29 | type: 'doc',
30 | docId: 'welcome',
31 | position: 'left',
32 | label: 'Documentation',
33 | },
34 | // {to: '/blog', label: 'Blog', position: 'left'},
35 | {
36 | href: 'https://github.com/iotaledger/streams',
37 | label: 'GitHub',
38 | position: 'right',
39 | },
40 | ],
41 | },
42 | footer: {
43 | style: 'dark',
44 | links: [{
45 | title: 'Documentation',
46 | items: [{
47 | label: 'Welcome',
48 | to: '/docs/welcome',
49 | },
50 | {
51 | label: 'Overview',
52 | to: '/docs/overview/',
53 | },
54 | {
55 | label: 'Libraries',
56 | to: '/docs/libraries/overview',
57 | },
58 | {
59 | label: 'Specification',
60 | to: '/docs/specs',
61 | },
62 | {
63 | label: 'Contribute',
64 | to: '/docs/contribute',
65 | },
66 | ],
67 | },
68 | {
69 | title: 'Community',
70 | items: [
71 | {
72 | label: 'Discord',
73 | href: 'https://discord.iota.org/',
74 | },
75 | ],
76 | },
77 | {
78 | title: 'Contribute',
79 | items: [
80 | {
81 | label: 'GitHub',
82 | href: 'https://github.com/iotaledger/streams',
83 | },
84 | ],
85 | },
86 | ],
87 | copyright: `Copyright © ${new Date().getFullYear()} IOTA Foundation, Built with Docusaurus.`,
88 | },
89 | prism: {
90 | additionalLanguages: ['rust'],
91 | theme: lightCodeTheme,
92 | darkTheme: darkCodeTheme,
93 | },
94 | },
95 | presets: [
96 | [
97 | '@docusaurus/preset-classic',
98 | {
99 | docs: {
100 | remarkPlugins: [require('remark-code-import'), require('remark-import-partial')],
101 | sidebarPath: require.resolve('./sidebars.js'),
102 | editUrl: 'https://github.com/iotaledger/streams/tree/dev/documentation/',
103 | },
104 | theme: {
105 | customCss: require.resolve('./src/css/iota.css'),
106 | },
107 | },
108 | ],
109 | ],
110 | plugins: [
111 | ],
112 | };
113 |
--------------------------------------------------------------------------------
/documentation/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "documentation",
3 | "version": "0.0.0",
4 | "private": "true",
5 | "scripts": {
6 | "start": "iota-wiki-cli start",
7 | "clean": "iota-wiki-cli clean",
8 | "setup": "iota-wiki-cli setup",
9 | "test": "echo \"Error: no test specified\" && exit 1"
10 | },
11 | "license": "UNLICENSED",
12 | "engines": {
13 | "node": ">=14.14.0"
14 | },
15 | "dependencies": {
16 | "iota-wiki-cli": "git+https://github.com/iota-community/iota-wiki-cli#39b71bcfe5ae9c1b017196a674bf5a6d91b663ee"
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/documentation/sidebars.js:
--------------------------------------------------------------------------------
1 | /**
2 | * * Creating a sidebar enables you to:
3 | - create an ordered group of docs
4 | - render a sidebar for each doc of that group
5 | - provide next/previous navigation
6 |
7 | The sidebars can be generated from the filesystem, or explicitly defined here.
8 |
9 | Create as many sidebars as you want.
10 | */
11 |
12 | module.exports = {
13 | docs: [{
14 | type: 'doc',
15 | id: 'welcome',
16 | },
17 | {
18 | type: 'doc',
19 | id: 'overview',
20 | },
21 | {
22 | type: 'doc',
23 | id: 'getting_started',
24 | },
25 | {
26 | type: 'category',
27 | label: 'Libraries',
28 | collapsed: false,
29 | items: [{
30 | type: 'doc',
31 | id: 'libraries/overview',
32 | label: 'Overview',
33 | },
34 | {
35 | type: 'category',
36 | label: 'Rust',
37 | items: [
38 | {
39 | type: 'doc',
40 | id: 'libraries/rust/getting_started',
41 | label: 'Getting Started',
42 | },
43 | {
44 | type: 'doc',
45 | id: 'libraries/rust/examples',
46 | label: 'Examples'
47 | },
48 | {
49 | type: 'doc',
50 | id: 'libraries/rust/api_reference',
51 | label: 'API Reference'
52 | },
53 | ]
54 | },
55 | {
56 | type: 'category',
57 | label: 'Wasm',
58 | items: [
59 | {
60 | type: 'doc',
61 | id: 'libraries/wasm/getting_started',
62 | label: 'Getting Started'
63 | },
64 | {
65 | type: 'doc',
66 | id: 'libraries/wasm/examples',
67 | label: 'Examples'
68 | },
69 | {
70 | type: 'doc',
71 | id: 'libraries/wasm/api_reference',
72 | label: 'API Reference'
73 | },
74 | ]
75 | },
76 | {
77 | type: 'category',
78 | label: 'C',
79 | items: [
80 | {
81 | type: 'doc',
82 | id: 'libraries/c/getting_started',
83 | label: 'Getting Started'
84 | },
85 | {
86 | type: 'doc',
87 | id: 'libraries/c/examples',
88 | label: 'Examples'
89 | },
90 | {
91 | type: 'doc',
92 | id: 'libraries/c/api_reference',
93 | label: 'API Reference'
94 | },
95 | ]
96 | }
97 | ]
98 | },
99 | {
100 | type: 'doc',
101 | id: 'specs',
102 | label: 'Specification',
103 | },
104 | {
105 | type: 'doc',
106 | id: 'troubleshooting',
107 | label: 'Troubleshooting'
108 | },
109 | {
110 | type: 'doc',
111 | id: 'contribute',
112 | label: 'Contribute',
113 | }
114 | ]
115 | };
116 |
--------------------------------------------------------------------------------
/documentation/static/.nojekyll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/iotaledger-archive/streams/deb3f2dd4b52873c7721bd70c42dd69f15dd6999/documentation/static/.nojekyll
--------------------------------------------------------------------------------
/documentation/static/img/iota_logo.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/documentation/static/img/libraries.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/iotaledger-archive/streams/deb3f2dd4b52873c7721bd70c42dd69f15dd6999/documentation/static/img/libraries.png
--------------------------------------------------------------------------------
/documentation/static/img/libraries/screenshot_faucet.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/iotaledger-archive/streams/deb3f2dd4b52873c7721bd70c42dd69f15dd6999/documentation/static/img/libraries/screenshot_faucet.png
--------------------------------------------------------------------------------
/documentation/static/img/logo/Logo_Swirl_Dark.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/iotaledger-archive/streams/deb3f2dd4b52873c7721bd70c42dd69f15dd6999/documentation/static/img/logo/Logo_Swirl_Dark.png
--------------------------------------------------------------------------------
/documentation/static/img/logo/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/iotaledger-archive/streams/deb3f2dd4b52873c7721bd70c42dd69f15dd6999/documentation/static/img/logo/favicon.ico
--------------------------------------------------------------------------------
/lets/Cargo.toml:
--------------------------------------------------------------------------------
1 | [package]
2 | authors = [
3 | "Vlad Semenov ",
4 | "Dyrell Chapman ",
5 | "Brord van Wierst ",
6 | "Arnau Orriols ",
7 | ]
8 | description = "A Rust framework for developing cryptographic communication protocols"
9 | edition = "2018"
10 | keywords = ["iota", "LETS Framework", "LETS", "Streams", "Communication Protocol"]
11 | license = "Apache-2.0/MIT"
12 | name = "lets"
13 | readme = "README.md"
14 | version = "0.2.0"
15 |
16 | [features]
17 | default = ["utangle-client"]
18 | std = ["spongos/std"]
19 | # Enable the IOTA-Tangle transport client (implies `std` features)
20 | tangle-client = ["iota-client/async", "futures", "iota-crypto/blake2b"]
21 | # Enable the wasm-compatible IOTA-Tangle transport client (incompatile with `tangle-client` feature due to `iota-client/async` using `tokio`. Implies `std` feature)
22 | tangle-client-wasm = ["iota-client/wasm", "futures"]
23 | # Enable the Streams-specific uTangle Client
24 | utangle-client = ["reqwest", "bee-ternary", "serde", "rayon", "iota-crypto/curl-p"]
25 | # Enable Iota Identity for use with Streams
26 | did = ["identity_iota", "serde"]
27 |
28 | [dependencies]
29 | # Local dependencies
30 | # TODO: remove osrng feature once x25519 is not performed here
31 | spongos = {path = "../spongos", default-features = false, features = ["osrng"]}
32 |
33 | # IOTA dependencies
34 | iota-crypto = {version = "0.9.1", default-features = false, features = ["x25519", "ed25519", "sha", "blake2b"]}
35 |
36 | # 3rd-party dependencies
37 | anyhow = {version = "1.0", default-features = false}
38 | async-trait = {version = "0.1", default-features = false}
39 | hex = {version = "0.4", default-features = false}
40 |
41 | # Optional dependencies
42 | bee-ternary = {version = "0.5.2", default-features = false, optional = true}
43 | futures = {version = "0.3.8", default-features = false, optional = true}
44 | identity_iota = {git = "https://github.com/iotaledger/identity.rs", rev = "d3920c2", default-features = false, optional = true}
45 | iota-client = {version = "1.1.1", default-features = false, optional = true}
46 | parking_lot = {version = "0.11.2", default-features = false, optional = true}
47 | reqwest = {version = "0.11.11", optional = true, default-features = false, features = ["json", "rustls-tls"]}
48 | serde = {version = "1.0", default-features = false, features = ["derive"], optional = true}
49 | serde-big-array = { version = "0.4", default-features = false}
50 | spin = {version = "0.9.2", default-features = false, features = ["mutex", "spin_mutex"], optional = true}
51 | rayon = {version = "1.5.3", default-features = false, optional = true}
52 |
53 | # Error
54 | thiserror-no-std = {version = "2.0.2", default-features = false}
55 |
56 | [dev-dependencies]
57 | chrono = {version = "0.4.19", default-features = false, features = ["clock"]}
58 | criterion = {version = "0.3.5", features = ["async_tokio", "html_reports"]}
59 | serde_json = {version = "1.0.81", default-features = false}
60 | tokio = {version = "1.19.2", default-features = false}
61 |
62 | [[bench]]
63 | harness = false
64 | name = "tangle_clients"
65 | required-features = ["tangle-client", "utangle-client"]
66 |
--------------------------------------------------------------------------------
/lets/README.md:
--------------------------------------------------------------------------------
1 | # IOTA Streams Application layer: core definitions and Channels Application.
2 |
3 | ## Streams Application
4 |
5 | Streams Application is a message-oriented cryptographic protocol. Application defines protocol parties, their roles,
6 | syntax and semantic of protocol messages. Messages are declared in DDML syntax and are processed according to DDML
7 | rules. Streams Message consists of Header and Application-specific Content.
8 |
9 | ## Channels Application
10 |
11 | Channels Application has evolved from previous versions of Streams. There are two roles: Author and Subscriber. Author
12 | is a channel instance owner capable of proving her identity by signing messages. Subscribers in this sense are anonymous
13 | as their public identity (ed25519 public key) is not revealed publicly. Author can share session key information
14 | (Keyload) with a set of Subscribers. Author as well as allowed Subscribers can then interact privately and securely.
15 |
16 | ## Customization
17 |
18 | There are a few known issues that araise in practice. Streams makes an attempt at tackling them by tweaking run-time and
19 | compile-time parameters. If Channels Application is not suitable for your needs you can implement your own Application,
20 | and DDML implementation as a EDSL allows you to easily wrap and unwrap messages of your Application. And when DDML is
21 | not powerful enough, it can be extended with custom commands.
22 |
--------------------------------------------------------------------------------
/lets/benches/tangle_clients.rs:
--------------------------------------------------------------------------------
1 | // Rust
2 | use std::convert::TryFrom;
3 |
4 | // 3rd-party
5 | use anyhow::Result;
6 | use chrono::Utc;
7 | use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion, Throughput};
8 | use serde::Deserialize;
9 |
10 | // IOTA
11 | use iota_client::bee_message::Message;
12 |
13 | // Streams
14 | use lets::{
15 | address::{Address, AppAddr, MsgId},
16 | id::Identifier,
17 | message::{Topic, TransportMessage},
18 | transport::{tangle, utangle, Transport},
19 | };
20 |
21 | const DEFAULT_NODE: &str = "https://chrysalis-nodes.iota.org";
22 |
23 | async fn send_message(client: &mut T, payload_size: usize) -> Result<()>
24 | where
25 | T: for<'a> Transport<'a, Msg = TransportMessage, SendResponse = Ignore>,
26 | {
27 | let msg = TransportMessage::new(vec![12u8; payload_size]);
28 | let address = Address::new(
29 | AppAddr::default(),
30 | MsgId::gen(
31 | AppAddr::default(),
32 | Identifier::default(),
33 | &Topic::default(),
34 | Utc::now().timestamp_millis() as usize,
35 | ),
36 | );
37 | client.send_message(address, msg).await?;
38 | Ok(())
39 | }
40 |
41 | fn bench_clients(c: &mut Criterion) {
42 | let url = std::env::var("NODE_URL").unwrap_or_else(|_| String::from(DEFAULT_NODE));
43 | let mut group = c.benchmark_group("Send Message by Size");
44 | let runtime = tokio::runtime::Runtime::new().unwrap();
45 | for i in [32, 64, 128, 256, 512, 1024] {
46 | group.throughput(Throughput::Bytes(i as u64));
47 | group.bench_with_input(BenchmarkId::new("iota.rs", i), &i, |b, payload_size| {
48 | b.iter_batched(
49 | || runtime.block_on(tangle::Client::for_node(&url)).unwrap(),
50 | |mut client| {
51 | runtime.block_on(async {
52 | send_message(&mut client, *payload_size).await.unwrap();
53 | })
54 | },
55 | criterion::BatchSize::SmallInput,
56 | )
57 | });
58 | group.bench_with_input(BenchmarkId::new("uTangle", i), &i, |b, payload_size| {
59 | b.iter_batched(
60 | || utangle::Client::new(&url),
61 | |mut client| {
62 | runtime.block_on(async {
63 | send_message(&mut client, *payload_size).await.unwrap();
64 | })
65 | },
66 | criterion::BatchSize::SmallInput,
67 | )
68 | });
69 | }
70 | group.finish();
71 | }
72 |
73 | #[derive(Deserialize)]
74 | struct Ignore {}
75 |
76 | impl TryFrom for Ignore {
77 | type Error = create::error::Error;
78 | fn try_from(_: Message) -> Result {
79 | Ok(Ignore {})
80 | }
81 | }
82 |
83 | criterion_group!(benches, bench_clients);
84 | criterion_main!(benches);
85 |
--------------------------------------------------------------------------------
/lets/src/id/did/data_wrapper.rs:
--------------------------------------------------------------------------------
1 | // 3rd-party
2 | use serde::Serialize;
3 |
4 | use identity_iota::{
5 | crypto::{GetSignature, GetSignatureMut, Proof, SetSignature},
6 | did::{MethodUriType, TryMethod},
7 | };
8 |
9 | /// Wrapper for processing `DID` signatures
10 | #[derive(Serialize)]
11 | pub(crate) struct DataWrapper<'a> {
12 | /// Hash of message
13 | data: &'a [u8],
14 | /// `DID` signature
15 | signature: Option,
16 | }
17 |
18 | impl<'a> DataWrapper<'a> {
19 | /// Create a new [`DataWrapper`] for a message hash with an empty `signature` field
20 | ///
21 | /// # Arguments
22 | /// * `data`: The raw hash of the message to be signed
23 | pub(crate) fn new(data: &'a [u8]) -> Self {
24 | Self { data, signature: None }
25 | }
26 |
27 | /// Inject a signature into the [`DataWrapper`]
28 | ///
29 | /// # Arguments
30 | /// * `signature`: The signature of the message hash
31 | pub(crate) fn with_signature(mut self, signature: Proof) -> Self {
32 | self.signature = Some(signature);
33 | self
34 | }
35 |
36 | /// Consumes the [`DataWrapper`], returning the signature field
37 | pub(crate) fn into_signature(self) -> Option {
38 | self.signature
39 | }
40 | }
41 |
42 | impl<'a> GetSignature for DataWrapper<'a> {
43 | fn signature(&self) -> Option<&Proof> {
44 | self.signature.as_ref()
45 | }
46 | }
47 |
48 | impl<'a> GetSignatureMut for DataWrapper<'a> {
49 | fn signature_mut(&mut self) -> Option<&mut Proof> {
50 | self.signature.as_mut()
51 | }
52 | }
53 |
54 | impl<'a> SetSignature for DataWrapper<'a> {
55 | fn set_signature(&mut self, signature: Proof) {
56 | self.signature = Some(signature)
57 | }
58 | }
59 |
60 | impl<'a> TryMethod for DataWrapper<'a> {
61 | const TYPE: MethodUriType = MethodUriType::Absolute;
62 | }
63 |
--------------------------------------------------------------------------------
/lets/src/id/did/mod.rs:
--------------------------------------------------------------------------------
1 | /// Wrapper around data for signature validation via `DID`
2 | mod data_wrapper;
3 | /// Base `DID` functionality and types
4 | mod did;
5 | /// Details required for `DID` resolution
6 | mod url_info;
7 |
8 | pub use did::{DIDInfo, DID};
9 | pub use url_info::DIDUrlInfo;
10 |
11 | pub(crate) use data_wrapper::DataWrapper;
12 | pub(crate) use did::resolve_document;
13 |
--------------------------------------------------------------------------------
/lets/src/id/ed25519.rs:
--------------------------------------------------------------------------------
1 | // Rust
2 | use core::hash::Hash;
3 |
4 | // 3rd-party
5 |
6 | // IOTA
7 | use crypto::signatures::ed25519;
8 |
9 | // Streams
10 | use spongos::{KeccakF1600, SpongosRng};
11 |
12 | // Local
13 |
14 | /// Wrapper for [`ed25519::SecretKey`]
15 | pub struct Ed25519(ed25519::SecretKey);
16 |
17 | impl Ed25519 {
18 | /// Creates a new [`Ed25519`] wrapper around the provided secret key
19 | ///
20 | /// # Arguments
21 | /// * `secret`: The [`ed25519::SecretKey`] to be wrapped
22 | pub fn new(secret: ed25519::SecretKey) -> Self {
23 | Self(secret)
24 | }
25 |
26 | /// Generates a new [`ed25519::SecretKey`] from a unique seed. The seed is used as a foundation
27 | /// for a [`SpongosRng`] generated value that is then used as a seed for generating the key.
28 | ///
29 | /// # Arguments
30 | /// * `seed`: Unique seed to generate secret key from
31 | pub fn from_seed(seed: T) -> Self
32 | where
33 | T: AsRef<[u8]>,
34 | {
35 | Self(ed25519::SecretKey::generate_with(&mut SpongosRng::::new(
36 | seed,
37 | )))
38 | }
39 |
40 | /// Returns a reference to the inner [`ed25519::SecretKey`]
41 | pub(crate) fn inner(&self) -> &ed25519::SecretKey {
42 | &self.0
43 | }
44 | }
45 |
46 | impl PartialEq for Ed25519 {
47 | fn eq(&self, other: &Self) -> bool {
48 | self.0.as_slice() == other.0.as_slice()
49 | }
50 | }
51 |
52 | impl Eq for Ed25519 {}
53 |
54 | impl PartialOrd for Ed25519 {
55 | fn partial_cmp(&self, other: &Self) -> Option {
56 | Some(self.cmp(other))
57 | }
58 | }
59 |
60 | impl Ord for Ed25519 {
61 | fn cmp(&self, other: &Self) -> core::cmp::Ordering {
62 | self.0.as_slice().cmp(other.0.as_slice())
63 | }
64 | }
65 |
66 | impl Hash for Ed25519 {
67 | fn hash(&self, state: &mut H) {
68 | self.0.as_slice().hash(state);
69 | }
70 | }
71 |
72 | impl AsRef<[u8]> for Ed25519 {
73 | fn as_ref(&self) -> &[u8] {
74 | self.0.as_slice()
75 | }
76 | }
77 |
78 | impl From for Ed25519 {
79 | fn from(secret_key: ed25519::SecretKey) -> Self {
80 | Self(secret_key)
81 | }
82 | }
83 |
--------------------------------------------------------------------------------
/lets/src/id/mod.rs:
--------------------------------------------------------------------------------
1 | /// Ed25519 functions and types
2 | mod ed25519;
3 | /// User Identifier functions and types
4 | mod identifier;
5 | /// User Identity functions and types
6 | mod identity;
7 | mod permission;
8 | mod psk;
9 |
10 | pub use self::identity::Identity;
11 | pub use ed25519::Ed25519;
12 | pub use identifier::Identifier;
13 | pub use permission::{PermissionDuration, Permissioned};
14 | pub use psk::{Psk, PskId};
15 |
16 | /// Iota Identity functions and types
17 | #[cfg(feature = "did")]
18 | pub mod did;
19 |
--------------------------------------------------------------------------------
/lets/src/id/psk.rs:
--------------------------------------------------------------------------------
1 | use core::fmt::{Display, LowerHex, UpperHex};
2 |
3 | use spongos::{
4 | ddml::{
5 | commands::{sizeof, unwrap, wrap, Mask},
6 | io,
7 | types::NBytes,
8 | },
9 | error::Result as SpongosResult,
10 | KeccakF1600, Spongos, PRP,
11 | };
12 |
13 | /// A Pre-Shared Key for use in Read based permissioning
14 | #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
15 | pub struct Psk([u8; 32]);
16 |
17 | impl Psk {
18 | /// Creates a new [`Psk`] wrapper around the provided bytes
19 | ///
20 | /// # Arguments
21 | /// * `array`: Fixed-size 32 byte array
22 | pub fn new(array: [u8; 32]) -> Self {
23 | Self(array)
24 | }
25 |
26 | /// Generates a new [`Psk`] by using [`Spongos`] to sponge the provided seed bytes into a fixed
27 | /// 32 byte array, and wrapping it.
28 | ///
29 | /// # Arguments
30 | /// * `seed`: A unique variable sized seed slice
31 | pub fn from_seed(seed: T) -> Self
32 | where
33 | T: AsRef<[u8]>,
34 | {
35 | let mut spongos = Spongos::::init();
36 | spongos.absorb("PSK");
37 | spongos.sponge(seed)
38 | }
39 |
40 | /// Creates a [`PskId`] by using [`Spongos`] to sponge the [`Psk`] into a fixed 16 byte array
41 | pub fn to_pskid(self) -> PskId {
42 | let mut spongos = Spongos::::init();
43 | spongos.absorb("PSKID");
44 | spongos.sponge(self)
45 | }
46 | }
47 |
48 | impl AsRef<[u8]> for Psk {
49 | fn as_ref(&self) -> &[u8] {
50 | &self.0
51 | }
52 | }
53 |
54 | impl AsMut<[u8]> for Psk {
55 | fn as_mut(&mut self) -> &mut [u8] {
56 | &mut self.0
57 | }
58 | }
59 |
60 | #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default, Debug)]
61 | pub struct PskId([u8; 16]);
62 |
63 | impl PskId {
64 | /// Creates a new [`PskId`] wrapper around the provided bytes
65 | ///
66 | /// # Arguments
67 | /// * `array`: Fixed-size 16 byte array
68 | pub fn new(array: [u8; 16]) -> Self {
69 | Self(array)
70 | }
71 |
72 | /// Generates a new [`PskId`] by using [`Spongos`] to sponge the provided seed bytes into a
73 | /// fixed 16 byte array, and wrapping it.
74 | ///
75 | /// # Arguments
76 | /// * `seed`: A unique variable sized seed slice
77 | pub fn from_seed(seed: T) -> Self
78 | where
79 | T: AsRef<[u8]>,
80 | {
81 | Psk::from_seed::(seed).to_pskid()
82 | }
83 | }
84 |
85 | impl AsRef<[u8]> for PskId {
86 | fn as_ref(&self) -> &[u8] {
87 | &self.0
88 | }
89 | }
90 |
91 | impl AsMut<[u8]> for PskId {
92 | fn as_mut(&mut self) -> &mut [u8] {
93 | &mut self.0
94 | }
95 | }
96 |
97 | impl Display for PskId {
98 | fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
99 | LowerHex::fmt(self, f)
100 | }
101 | }
102 |
103 | impl LowerHex for PskId {
104 | fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
105 | write!(f, "{}", hex::encode(self))
106 | }
107 | }
108 |
109 | impl UpperHex for PskId {
110 | fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
111 | write!(f, "{}", hex::encode_upper(self))
112 | }
113 | }
114 |
115 | impl Mask<&PskId> for sizeof::Context {
116 | fn mask(&mut self, pskid: &PskId) -> SpongosResult<&mut Self> {
117 | self.mask(NBytes::new(pskid))
118 | }
119 | }
120 |
121 | impl Mask<&PskId> for wrap::Context
122 | where
123 | F: PRP,
124 | OS: io::OStream,
125 | {
126 | fn mask(&mut self, pskid: &PskId) -> SpongosResult<&mut Self> {
127 | self.mask(NBytes::new(pskid))
128 | }
129 | }
130 |
131 | impl Mask<&mut PskId> for unwrap::Context
132 | where
133 | F: PRP,
134 | IS: io::IStream,
135 | {
136 | fn mask(&mut self, pskid: &mut PskId) -> SpongosResult<&mut Self> {
137 | self.mask(NBytes::new(pskid))
138 | }
139 | }
140 |
141 | impl Mask<&Psk> for sizeof::Context {
142 | fn mask(&mut self, psk: &Psk) -> SpongosResult<&mut Self> {
143 | self.mask(NBytes::new(psk))
144 | }
145 | }
146 |
147 | impl Mask<&Psk> for wrap::Context
148 | where
149 | F: PRP,
150 | OS: io::OStream,
151 | {
152 | fn mask(&mut self, psk: &Psk) -> SpongosResult<&mut Self> {
153 | self.mask(NBytes::new(psk))
154 | }
155 | }
156 |
157 | impl Mask<&mut Psk> for unwrap::Context
158 | where
159 | F: PRP,
160 | IS: io::IStream,
161 | {
162 | fn mask(&mut self, psk: &mut Psk) -> SpongosResult<&mut Self> {
163 | self.mask(NBytes::new(psk))
164 | }
165 | }
166 |
--------------------------------------------------------------------------------
/lets/src/lib.rs:
--------------------------------------------------------------------------------
1 | //! # LETS
2 | //! The `lets` crate houses message-oriented cryptographic protocols. Identification, transportation
3 | //! and generic message handling protocols in these modules can be used to build streaming
4 | //! applications. Signature and encryption operations are handled via the `id` module, while
5 | //! `message` encoding operations are managed via the `message` module. Messages are indexed by an
6 | //! `Address` composed of an `Application Address` and `Message Identifier`, and the library
7 | //! provides a `Transport` trait to allow for agnostic transport client creation.
8 | //!
9 | //! A Streams Message must contain an `HDF` (Header) and `PCF` (Payload), and must be declared in
10 | //! `DDML` syntax in order to be processed correctly. Message internal processes follow `DDML`
11 | //! rules.
12 |
13 | #![allow(clippy::module_inception)]
14 | #![no_std]
15 |
16 | #[macro_use]
17 | extern crate alloc;
18 |
19 | // Uncomment to enable printing for development
20 | // #[macro_use]
21 | // extern crate std;
22 |
23 | /// Message definitions and utils for wrapping/unwrapping.
24 | pub mod message;
25 |
26 | /// Message addressing and linking
27 | pub mod address;
28 |
29 | /// Transport-related abstractions.
30 | pub mod transport;
31 |
32 | /// Identity based Signature/Verification utilities
33 | pub mod id;
34 |
35 | /// Errors specific for LETS
36 | pub mod error;
37 |
--------------------------------------------------------------------------------
/lets/src/message/content.rs:
--------------------------------------------------------------------------------
1 | // TODO: MOVE TO SPONGOS?
2 |
3 | // Rust
4 | use alloc::boxed::Box;
5 |
6 | // 3rd-party
7 | use async_trait::async_trait;
8 |
9 | // IOTA
10 |
11 | // Streams
12 |
13 | // Local
14 | use spongos::error::Result;
15 |
16 | /// Used to determine the encoding size of the object `T`
17 | #[async_trait(?Send)]
18 | pub trait ContentSizeof {
19 | async fn sizeof(&mut self, content: &T) -> Result<&mut Self>;
20 | }
21 |
22 | /// Used for encoding the object `T` into a `Context` stream
23 | #[async_trait(?Send)]
24 | pub trait ContentWrap {
25 | async fn wrap(&mut self, content: &mut T) -> Result<&mut Self>;
26 | }
27 |
28 | /// Used for decoding the object `T` from a `Context` stream
29 | #[async_trait(?Send)]
30 | pub trait ContentUnwrap {
31 | async fn unwrap(&mut self, content: &mut T) -> Result<&mut Self>;
32 | }
33 |
34 | /// Used to determine the encoding size of the signature operation for object `T`
35 | #[async_trait(?Send)]
36 | pub trait ContentSignSizeof {
37 | async fn sign_sizeof(&mut self, ctx: &T) -> Result<&mut Self>;
38 | }
39 |
40 | /// Used to sign the `Context` `Spongos` state hash and encode the signature into the `Context`
41 | /// stream
42 | #[async_trait(?Send)]
43 | pub trait ContentSign {
44 | async fn sign(&mut self, signer: &T) -> Result<&mut Self>;
45 | }
46 |
47 | /// Used to authenticate the signature from the `Context` stream
48 | #[async_trait(?Send)]
49 | pub trait ContentVerify {
50 | async fn verify(&mut self, verifier: &T) -> Result<&mut Self>;
51 | }
52 |
53 | /// Used to determine the encoding size of the encryption operation for a key slice for recipient
54 | /// `T`
55 | #[async_trait(?Send)]
56 | pub trait ContentEncryptSizeOf {
57 | async fn encrypt_sizeof(&mut self, recipient: &T, key: &[u8]) -> Result<&mut Self>;
58 | }
59 |
60 | /// Used to encrypt a key slice for recipient `T`
61 | #[async_trait(?Send)]
62 | pub trait ContentEncrypt {
63 | async fn encrypt(&mut self, recipient: &T, key: &[u8]) -> Result<&mut Self>;
64 | }
65 |
66 | /// Used to decrypt a key slice for recipient `T`
67 | #[async_trait(?Send)]
68 | pub trait ContentDecrypt {
69 | async fn decrypt(&mut self, recipient: &T, key: &mut [u8]) -> Result<&mut Self>;
70 | }
71 |
--------------------------------------------------------------------------------
/lets/src/message/message.rs:
--------------------------------------------------------------------------------
1 | // Rust
2 |
3 | // IOTA
4 |
5 | // Streams
6 | use spongos::{
7 | ddml::commands::{sizeof, wrap, Commit},
8 | Spongos, PRP,
9 | };
10 |
11 | // Local
12 | use crate::{
13 | error::Result,
14 | message::{
15 | content::{ContentSizeof, ContentWrap},
16 | hdf::HDF,
17 | pcf::PCF,
18 | transport::TransportMessage,
19 | },
20 | };
21 |
22 | /// Streams Message comprised of a Header ([`HDF`]) and Payload([`PCF`])
23 | #[derive(Clone, PartialEq, Eq, Hash, Default, Debug)]
24 | pub struct Message {
25 | /// Header of the message
26 | header: HDF,
27 | /// Body of the message
28 | payload: PCF,
29 | }
30 |
31 | impl Message {
32 | /// Creates a new [`Message`] wrapper around the provided message components.
33 | ///
34 | /// # Arguments
35 | /// * `header`: The header of the message
36 | /// * `payload`: The body of te message
37 | pub fn new(header: HDF, payload: PCF) -> Self {
38 | Self { header, payload }
39 | }
40 |
41 | /// Inject a header into the [`Message`] wrapper
42 | ///
43 | /// # Arguments
44 | /// * `header`: The header of the message
45 | pub fn with_header(&mut self, header: HDF) -> &mut Self {
46 | self.header = header;
47 | self
48 | }
49 |
50 | /// Inject a payload into the [`Message`] wrapper
51 | ///
52 | /// # Arguments
53 | /// * `payload`: The body of the message
54 | pub fn with_content(&mut self, content: Payload) -> &mut Self {
55 | self.payload.change_content(content);
56 | self
57 | }
58 |
59 | /// Returns a reference to the [`Message`] [header](`HDF`)
60 | pub fn header(&self) -> &HDF {
61 | &self.header
62 | }
63 |
64 | /// Returns a reference to the [`Message`] [payload](`PCF`)
65 | pub fn payload(&self) -> &PCF {
66 | &self.payload
67 | }
68 |
69 | /// Consumes the [`Message`], returning the [payload](`PCF`)
70 | pub fn into_payload(self) -> PCF {
71 | self.payload
72 | }
73 |
74 | /// Consumes the [`Message`], returning a tuple comprised of the [header](`HDF`) and
75 | /// [payload](`PCF`)
76 | pub fn into_parts(self) -> (HDF, PCF) {
77 | (self.header, self.payload)
78 | }
79 |
80 | /// Encodes the message for transport, wrapping the [`HDF`] and [`PCF`] into one binary message,
81 | /// returning that [`TransportMessage`] and the context [`Spongos`] state.
82 | pub async fn wrap(&mut self) -> Result<(TransportMessage, Spongos)>
83 | where
84 | F: PRP + Default,
85 | for<'b> wrap::Context<&'b mut [u8], F>: ContentWrap + ContentWrap>,
86 | sizeof::Context: ContentSizeof + ContentSizeof>,
87 | {
88 | let mut ctx = sizeof::Context::new();
89 | ctx.sizeof(&self.header).await?.commit()?.sizeof(&self.payload).await?;
90 | let buf_size = ctx.finalize();
91 |
92 | let mut buf = vec![0; buf_size];
93 |
94 | let mut ctx = wrap::Context::new(&mut buf[..]);
95 | ctx.wrap(&mut self.header)
96 | .await?
97 | .commit()?
98 | .wrap(&mut self.payload)
99 | .await?;
100 | // If buffer is not empty, it's an implementation error, panic
101 | assert!(
102 | ctx.stream().is_empty(),
103 | "Missmatch between buffer size expected by SizeOf ({buf_size}) and actual size of Wrap ({})",
104 | ctx.stream().len()
105 | );
106 | let spongos = ctx.finalize();
107 |
108 | Ok((TransportMessage::new(buf), spongos))
109 | }
110 | }
111 |
--------------------------------------------------------------------------------
/lets/src/message/mod.rs:
--------------------------------------------------------------------------------
1 | /// Traits for implementing Spongos de/serialization
2 | mod content;
3 | /// Header Description Frame
4 | mod hdf;
5 | /// Payload Carrying Frame
6 | mod pcf;
7 | /// Abstract linked-message representation
8 | mod transport;
9 | /// Protocol versioning tools
10 | mod version;
11 |
12 | /// Linked Message with header already parsed
13 | mod preparsed;
14 | /// Branch Identifier
15 | pub mod topic;
16 |
17 | mod message;
18 |
19 | pub use content::{
20 | ContentDecrypt, ContentEncrypt, ContentEncryptSizeOf, ContentSign, ContentSignSizeof, ContentSizeof, ContentUnwrap,
21 | ContentVerify, ContentWrap,
22 | };
23 | pub use hdf::HDF;
24 | pub use message::Message;
25 | pub use pcf::PCF;
26 | pub use preparsed::PreparsedMessage;
27 | pub use topic::{Topic, TopicHash};
28 | pub use transport::TransportMessage;
29 |
--------------------------------------------------------------------------------
/lets/src/message/preparsed.rs:
--------------------------------------------------------------------------------
1 | // Rust
2 | use core::fmt;
3 |
4 | // IOTA
5 |
6 | // Streams
7 | use spongos::{ddml::commands::unwrap, KeccakF1600, Spongos, PRP};
8 |
9 | // Local
10 | use crate::{
11 | error::Result,
12 | message::{content::ContentUnwrap, hdf::HDF, message::Message, pcf::PCF, transport::TransportMessage},
13 | };
14 |
15 | /// Message context preparsed for unwrapping.
16 | #[derive(Clone, PartialEq, Eq, Hash, Default)]
17 | pub struct PreparsedMessage {
18 | /// The message bytes wrapper
19 | transport_msg: TransportMessage,
20 | /// Parsed header of the message
21 | header: HDF,
22 | /// Spongos state for the `Context` of the pre-parsed message
23 | spongos: Spongos,
24 | /// Streaming position within `Context`, marking the end of the `HDF` and beginning of the
25 | /// `PCF`. Used in partial processing.
26 | cursor: usize,
27 | }
28 |
29 | impl PreparsedMessage {
30 | /// Create a new [`PreparsedMessage`] wrapper around a [`TransportMessage`] after the header has
31 | /// been parsed from the message context.
32 | ///
33 | /// # Arguments
34 | /// * `transport_msg`: The message wrapper that has been preparsed
35 | /// * `header`: The `HDF` parsed from the transport message
36 | /// * `spongos`: The `Context` state following the `HDF` parsing
37 | /// * `cursor`: The read position of the `Context` stream following the `HDF` parsing
38 | pub(crate) fn new(transport_msg: TransportMessage, header: HDF, spongos: Spongos, cursor: usize) -> Self {
39 | Self {
40 | transport_msg,
41 | header,
42 | spongos,
43 | cursor,
44 | }
45 | }
46 |
47 | /// Returns a reference to the message [`HDF`]
48 | pub fn header(&self) -> &HDF {
49 | &self.header
50 | }
51 |
52 | /// Returns a reference to the raw [`TransportMessage`]
53 | pub fn transport_msg(&self) -> &TransportMessage {
54 | &self.transport_msg
55 | }
56 |
57 | /// Consumes the [`PreparsedMessage`], returning a tuple containing the message `HDF`, raw
58 | /// `TransportMessage` and read position cursor
59 | pub fn into_parts(self) -> (HDF, TransportMessage, Spongos, usize) {
60 | (self.header, self.transport_msg, self.spongos, self.cursor)
61 | }
62 |
63 | /// Returns a reference to the message read state cursor
64 | pub fn cursor(&self) -> usize {
65 | self.cursor
66 | }
67 |
68 | /// Returns the remainder of the message bytes starting from the read position cursor as a slice
69 | fn remaining_message(&self) -> &[u8] {
70 | &self.transport_msg.as_ref()[self.cursor..]
71 | }
72 |
73 | /// Decode the `PCF` from the remainder of the message bytes, starting from the cursor position.
74 | /// Returns a new [`Message`] wrapper around the [`HDF`] and [`PCF`], as well as the spongos
75 | /// state following the unwrapping operations.
76 | ///
77 | /// # Arguments
78 | /// * `content` - An implementation of a [`PCF`] [`unwrap::Context`]
79 | pub async fn unwrap(self, content: Content) -> Result<(Message, Spongos)>
80 | where
81 | for<'a> unwrap::Context<&'a [u8], F>: ContentUnwrap>,
82 | F: PRP,
83 | {
84 | let mut pcf = PCF::<()>::default().with_content(content);
85 | let spongos = self.spongos;
86 | let transport_msg = self.transport_msg;
87 | // Cannot use Self::remaining_message() due to partial move of spongos
88 | let mut ctx = unwrap::Context::new_with_spongos(&transport_msg.body()[self.cursor..], spongos);
89 | ctx.unwrap(&mut pcf).await?;
90 | // discard `self.ctx.stream` that should be empty
91 | let (spongos, _) = ctx.finalize();
92 | Ok((Message::new(self.header, pcf), spongos))
93 | }
94 | }
95 |
96 | impl fmt::Debug for PreparsedMessage {
97 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
98 | write!(
99 | f,
100 | "{{header: {:?}, ctx: {:?}}}",
101 | self.header,
102 | &self.remaining_message()[..10]
103 | )
104 | }
105 | }
106 |
--------------------------------------------------------------------------------
/lets/src/message/topic.rs:
--------------------------------------------------------------------------------
1 | use alloc::{
2 | borrow::Cow,
3 | string::{String, ToString},
4 | vec::Vec,
5 | };
6 | use core::{
7 | convert::{TryFrom, TryInto},
8 | fmt::Formatter,
9 | };
10 | use spongos::{
11 | ddml::{
12 | commands::{sizeof, unwrap, wrap, Mask},
13 | io,
14 | types::{Bytes, NBytes},
15 | },
16 | error::Result as SpongosResult,
17 | KeccakF1600, Spongos, PRP,
18 | };
19 |
20 | use crate::error::Result;
21 |
22 | /// A wrapper around a `String` used for identifying a branch within a `Stream`
23 | #[derive(Clone, PartialEq, Eq, Debug, Default, Hash, serde::Serialize)]
24 | pub struct Topic(String);
25 |
26 | impl Topic {
27 | /// Create a new [`Topic`] wrapper for the provided `String`
28 | ///
29 | /// # Arguments
30 | /// * `t`: A unique branch identifier
31 | pub fn new(t: String) -> Self {
32 | Self(t)
33 | }
34 |
35 | /// Returns a reference to the inner branch identifier `String`
36 | pub fn str(&self) -> &str {
37 | &self.0
38 | }
39 | }
40 |
41 | impl From<&str> for Topic {
42 | fn from(t: &str) -> Self {
43 | Self(t.to_string())
44 | }
45 | }
46 |
47 | impl From for Topic {
48 | fn from(t: String) -> Self {
49 | Self(t)
50 | }
51 | }
52 |
53 | impl TryFrom<&[u8]> for Topic {
54 | type Error = crate::error::Error;
55 | fn try_from(t: &[u8]) -> Result {
56 | let topic = String::from_utf8(t.to_vec())?;
57 | Ok(Topic(topic))
58 | }
59 | }
60 |
61 | impl TryFrom> for Topic {
62 | type Error = crate::error::Error;
63 | fn try_from(t: Vec) -> Result {
64 | let topic = String::from_utf8(t)?;
65 | Ok(Topic(topic))
66 | }
67 | }
68 |
69 | impl core::fmt::Display for Topic {
70 | fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
71 | write!(f, "{}", &self.0)
72 | }
73 | }
74 |
75 | impl AsRef<[u8]> for Topic {
76 | fn as_ref(&self) -> &[u8] {
77 | self.0.as_ref()
78 | }
79 | }
80 |
81 | impl From for Cow<'_, Topic> {
82 | fn from(topic: Topic) -> Self {
83 | Self::Owned(topic)
84 | }
85 | }
86 |
87 | impl<'a> From<&'a Topic> for Cow<'a, Topic> {
88 | fn from(topic: &'a Topic) -> Self {
89 | Self::Borrowed(topic)
90 | }
91 | }
92 |
93 | impl Mask<&Topic> for sizeof::Context {
94 | fn mask(&mut self, topic: &Topic) -> SpongosResult<&mut Self> {
95 | self.mask(Bytes::new(topic))
96 | }
97 | }
98 |
99 | impl Mask<&Topic> for wrap::Context
100 | where
101 | F: PRP,
102 | OS: io::OStream,
103 | {
104 | fn mask(&mut self, topic: &Topic) -> SpongosResult<&mut Self> {
105 | self.mask(Bytes::new(topic))
106 | }
107 | }
108 |
109 | impl Mask<&mut Topic> for unwrap::Context
110 | where
111 | F: PRP,
112 | IS: io::IStream,
113 | {
114 | fn mask(&mut self, topic: &mut Topic) -> SpongosResult<&mut Self> {
115 | let mut topic_bytes = topic.as_ref().to_vec();
116 | self.mask(Bytes::new(&mut topic_bytes))?;
117 | *topic = topic_bytes
118 | .try_into()
119 | .map_err(|e: crate::error::Error| spongos::error::Error::Context("Mask", e.to_string()))?;
120 | Ok(self)
121 | }
122 | }
123 |
124 | /// A 16 byte fixed size hash representation of a [`Topic`]
125 | #[derive(Clone, Copy, PartialEq, PartialOrd, Eq, Debug, Default, Hash, serde::Serialize)]
126 | pub struct TopicHash([u8; 16]);
127 |
128 | impl From<&Topic> for TopicHash {
129 | fn from(topic: &Topic) -> Self {
130 | let topic_hash: [u8; 16] = Spongos::::init().sponge(topic.as_ref());
131 | Self(topic_hash)
132 | }
133 | }
134 |
135 | impl From<&str> for TopicHash {
136 | fn from(t: &str) -> Self {
137 | TopicHash::from(&Topic::from(t))
138 | }
139 | }
140 |
141 | impl core::fmt::Display for TopicHash {
142 | fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
143 | write!(f, "{:?}", &self.0)
144 | }
145 | }
146 |
147 | impl AsRef<[u8]> for TopicHash {
148 | fn as_ref(&self) -> &[u8] {
149 | self.0.as_ref()
150 | }
151 | }
152 |
153 | impl Mask<&TopicHash> for sizeof::Context {
154 | fn mask(&mut self, topic_hash: &TopicHash) -> SpongosResult<&mut Self> {
155 | self.mask(NBytes::<[u8; 16]>::new(topic_hash.0))
156 | }
157 | }
158 |
159 | impl Mask<&TopicHash> for wrap::Context
160 | where
161 | F: PRP,
162 | OS: io::OStream,
163 | {
164 | fn mask(&mut self, topic_hash: &TopicHash) -> SpongosResult<&mut Self> {
165 | self.mask(NBytes::<[u8; 16]>::new(topic_hash.0))
166 | }
167 | }
168 |
169 | impl Mask<&mut TopicHash> for unwrap::Context
170 | where
171 | F: PRP,
172 | IS: io::IStream,
173 | {
174 | fn mask(&mut self, topic_hash: &mut TopicHash) -> SpongosResult<&mut Self> {
175 | self.mask(NBytes::<&mut [u8; 16]>::new(&mut topic_hash.0))?;
176 | Ok(self)
177 | }
178 | }
179 |
--------------------------------------------------------------------------------
/lets/src/message/transport.rs:
--------------------------------------------------------------------------------
1 | // Rust
2 | use alloc::vec::Vec;
3 |
4 | // IOTA
5 |
6 | // Streams
7 | use spongos::{ddml::commands::unwrap, PRP};
8 |
9 | // Local
10 | use crate::{
11 | error::Result,
12 | message::{content::ContentUnwrap, hdf::HDF, preparsed::PreparsedMessage},
13 | };
14 |
15 | /// Binary network Message representation.
16 | #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
17 | pub struct TransportMessage(Vec);
18 |
19 | impl TransportMessage {
20 | /// Creates a new [`TransportMessage`] wrapper for the provided bytes
21 | ///
22 | /// # Arguments
23 | /// * `body`: The body of the message
24 | pub fn new(body: Vec) -> Self {
25 | Self(body)
26 | }
27 |
28 | /// Returns a reference to the body of the message
29 | pub(crate) fn body(&self) -> &Vec {
30 | &self.0
31 | }
32 |
33 | /// Consumes the [`TransportMessage`], returning the body of the message
34 | pub(crate) fn into_body(self) -> Vec {
35 | self.0
36 | }
37 | }
38 |
39 | impl TransportMessage {
40 | /// Creates a new [`unwrap::Context`] for the message body and decodes the [`HDF`].
41 | /// The remaining context [`spongos::Spongos`] and cursor position are then wrapped with the
42 | /// [`HDF`] into a [`PreparsedMessage`] for content processing and returned.
43 | pub async fn parse_header(self) -> Result>
44 | where
45 | F: PRP + Default,
46 | {
47 | let mut ctx = unwrap::Context::new(self.body().as_ref());
48 | let mut header = HDF::default();
49 |
50 | ctx.unwrap(&mut header).await?;
51 |
52 | let (spongos, cursor) = ctx.finalize();
53 |
54 | Ok(PreparsedMessage::new(self, header, spongos, cursor))
55 | }
56 | }
57 |
58 | impl From for Vec {
59 | fn from(message: TransportMessage) -> Self {
60 | message.into_body()
61 | }
62 | }
63 |
64 | impl AsRef<[u8]> for TransportMessage {
65 | fn as_ref(&self) -> &[u8] {
66 | self.body().as_ref()
67 | }
68 | }
69 |
--------------------------------------------------------------------------------
/lets/src/message/version.rs:
--------------------------------------------------------------------------------
1 | //! Streams message syntax version distinguished between incompatible changes in `DDML`
2 | //! syntax and/or rules of processing `DDML` messages. It usually means that a new
3 | //! command or type is added, or command proceeds in a different manner than before.
4 | //! It can also signify changes in the `Header` message.
5 | //!
6 | //! Note, changes in syntax of Messages of a particular Application should be reflected
7 | //! in `Header.content_type` field or the Content Message should implicitly support
8 | //! versioning (ie. include `content_version` field for example).
9 | //!
10 | //! Streams message syntax version is indicated as the first byte in the binary encoded message.
11 | //!
12 | //! Backwards compatibility of the Streams implementations is welcome and not mandatory.
13 |
14 | /// Streams version number.
15 | pub(crate) const STREAMS_VER: u8 = 2;
16 |
17 | /// Encoding Constants
18 | pub(crate) const UTF8: u8 = 0;
19 |
20 | /// HDF Frame Identifier
21 | pub(crate) const HDF_ID: u8 = 4;
22 | /// Initial PCF Frame Identifier
23 | pub(crate) const INIT_PCF_ID: u8 = 5;
24 | /// Intermediate PCF Frame Identifier
25 | pub(crate) const INTER_PCF_ID: u8 = 12;
26 | /// Final PCF Frame Identifier
27 | pub(crate) const FINAL_PCF_ID: u8 = 14;
28 |
--------------------------------------------------------------------------------
/lets/src/transport/bucket.rs:
--------------------------------------------------------------------------------
1 | // Rust
2 | use alloc::{boxed::Box, collections::BTreeMap, vec::Vec};
3 |
4 | // 3rd-party
5 | use async_trait::async_trait;
6 |
7 | // IOTA
8 |
9 | // Streams
10 |
11 | // Local
12 | use crate::{
13 | address::Address,
14 | error::{Error, Result},
15 | message::TransportMessage,
16 | transport::Transport,
17 | };
18 |
19 | /// [`BTreeMap`] wrapper client for testing purposes
20 | #[derive(Clone, Debug, PartialEq, Eq, Hash)]
21 | pub struct Client {
22 | /// Mapping of stored [Addresses](`Address`) and `Messages`
23 | // Use BTreeMap instead of HashMap to make BucketTransport nostd without pulling hashbrown
24 | // (this transport is for hacking purposes only, performance is no concern)
25 | bucket: BTreeMap>,
26 | }
27 |
28 | impl Client {
29 | /// Creates a new [Bucket Client](`Client`)
30 | pub fn new() -> Self {
31 | Self::default()
32 | }
33 | }
34 |
35 | impl Default for Client {
36 | // Implement default manually because derive puts Default bounds in type parameters
37 | fn default() -> Self {
38 | Self {
39 | bucket: BTreeMap::default(),
40 | }
41 | }
42 | }
43 |
44 | #[async_trait(?Send)]
45 | impl Transport<'_> for Client
46 | where
47 | Msg: Clone,
48 | {
49 | type Msg = Msg;
50 | type SendResponse = Msg;
51 |
52 | /// If the address is not in the bucket, add it and return the message.
53 | ///
54 | /// # Arguments
55 | /// * `addr`: Address - The address of the message to store.
56 | /// * `msg`: The message to store.
57 | ///
58 | /// Returns:
59 | /// The message that was sent.
60 | async fn send_message(&mut self, addr: Address, msg: Msg) -> Result
61 | where
62 | Self::Msg: 'async_trait,
63 | {
64 | self.bucket.entry(addr).or_default().push(msg.clone());
65 | Ok(msg)
66 | }
67 |
68 | /// Returns a vector of messages from the bucket, or an error if the bucket doesn't contain the
69 | /// address
70 | ///
71 | /// # Arguments
72 | /// * `address`: The address to retrieve messages from.
73 | ///
74 | /// Returns:
75 | /// A vector of messages.
76 | async fn recv_messages(&mut self, address: Address) -> Result> {
77 | self.bucket
78 | .get(&address)
79 | .cloned()
80 | .ok_or(Error::AddressError("No message found", address))
81 | }
82 | }
83 |
--------------------------------------------------------------------------------
/lets/src/transport/mod.rs:
--------------------------------------------------------------------------------
1 | // Rust
2 | use alloc::{boxed::Box, rc::Rc, vec::Vec};
3 | use core::cell::RefCell;
4 |
5 | // 3rd-party
6 | use async_trait::async_trait;
7 |
8 | // IOTA
9 |
10 | // Streams
11 |
12 | // Local
13 | use crate::{
14 | address::Address,
15 | error::{Error, Result},
16 | };
17 |
18 | /// Network transport abstraction.
19 | /// Parametrized by the type of message addresss.
20 | /// Message address is used to identify/locate a message (eg. like URL for HTTP).
21 | #[async_trait(?Send)]
22 | pub trait Transport<'a> {
23 | type Msg;
24 | type SendResponse;
25 | /// Send a message
26 | async fn send_message(&mut self, address: Address, msg: Self::Msg) -> Result
27 | where
28 | 'a: 'async_trait;
29 |
30 | /// Receive messages
31 | async fn recv_messages(&mut self, address: Address) -> Result>
32 | where
33 | 'a: 'async_trait;
34 |
35 | /// Receive a single message
36 | async fn recv_message(&mut self, address: Address) -> Result {
37 | let mut msgs = self.recv_messages(address).await?;
38 | if let Some(msg) = msgs.pop() {
39 | match msgs.is_empty() {
40 | true => Ok(msg),
41 | false => Err(Error::AddressError("More than one found", address)),
42 | }
43 | } else {
44 | Err(Error::AddressError("not found in transport", address))
45 | }
46 | }
47 | }
48 |
49 | #[async_trait(?Send)]
50 | impl<'a, Tsp: Transport<'a>> Transport<'a> for Rc> {
51 | type Msg = Tsp::Msg;
52 | type SendResponse = Tsp::SendResponse;
53 |
54 | /// Send a message.
55 | async fn send_message(&mut self, address: Address, msg: Tsp::Msg) -> Result
56 | where
57 | Self::Msg: 'async_trait,
58 | {
59 | self.borrow_mut().send_message(address, msg).await
60 | }
61 |
62 | /// Receive messages with default options.
63 | async fn recv_messages(&mut self, address: Address) -> Result> {
64 | self.borrow_mut().recv_messages(address).await
65 | }
66 | }
67 |
68 | /// Localised mapping for tests and simulations
69 | pub mod bucket;
70 | /// `iota.rs` based tangle client
71 | #[cfg(any(feature = "tangle-client", feature = "tangle-client-wasm"))]
72 | pub mod tangle;
73 | /// Localised micro tangle client
74 | #[cfg(feature = "utangle-client")]
75 | pub mod utangle;
76 |
--------------------------------------------------------------------------------
/specification/Streams_Specification_1_0A.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/iotaledger-archive/streams/deb3f2dd4b52873c7721bd70c42dd69f15dd6999/specification/Streams_Specification_1_0A.pdf
--------------------------------------------------------------------------------
/spongos/Cargo.toml:
--------------------------------------------------------------------------------
1 | [package]
2 | authors = [
3 | "Vlad Semenov ",
4 | "Dyrell Chapman ",
5 | "Brord van Wierst ",
6 | "Arnau Orriols ",
7 | ]
8 | description = "A Rust implementation of the cryptographic de/serialization library Spongos"
9 | edition = "2018"
10 | keywords = ["iota", "spongos", "streams", "encoding", "binary", "serialization"]
11 | license = "Apache-2.0/MIT"
12 | name = "spongos"
13 | readme = "README.md"
14 | version = "0.2.0"
15 |
16 | [features]
17 | default = ["osrng"]
18 | # std is needed within spongos to enable the Dump DDML command
19 | std = []
20 | # rand and particularly its getrandom feature are necessary for the X25519 wrapping command.
21 | # This means that X25519 wrapping can only be used on architectures supported by `getrandom` (see https://docs.rs/getrandom/latest/getrandom/).
22 | # When compiling for other architectures, the binary can still implement a custom random backing function
23 | # if X25519 wrapping is needed (see https://docs.rs/getrandom/latest/getrandom/#custom-implementations).
24 | # X25519 unwrapping command does *not* need the "rand" feature nor `getrandom` support
25 | osrng = ["rand/std_rng", "rand/getrandom", "iota-crypto/rand"]
26 |
27 | [dependencies]
28 | # IOTA Dependencies
29 | iota-crypto = {version = "0.9.1", default-features = false, features = ["ed25519", "x25519"]}
30 |
31 | # 3rd-party dependencies
32 | digest = {version = "0.9", default-features = false}
33 | generic-array = {version = "0.14", default-features = false}
34 | hex = {version = "0.4", default-features = false}
35 | keccak = {version = "0.1", default-features = false}
36 | rand = {version = "0.8", default-features = false}
37 |
38 | # Error
39 | thiserror-no-std = {version = "2.0.2", default-features = false}
40 |
41 | # quarentine
42 | # anyhow is forcing spongos (and anything depending on it) to pull in libstd
43 | # anyhow is going to be replaced by typeful checking soon anyway, no point in making it work with no_std
44 | anyhow = {version = "1.0", default-features = false, features = ["std"], optional = false}
45 |
--------------------------------------------------------------------------------
/spongos/README.md:
--------------------------------------------------------------------------------
1 | # IOTA Streams core layers
2 |
3 | ## Binary operations
4 | ## Spongos basic operations
5 | ## Spongos-based pseudo-random generator
6 | ## Pre-shared keys
7 | ## Hash interface and spongos-based instance
8 | ## Troika sponge transform
9 |
--------------------------------------------------------------------------------
/spongos/src/core/mod.rs:
--------------------------------------------------------------------------------
1 | pub(crate) mod prng;
2 | pub(crate) mod prp;
3 | pub(crate) mod spongos;
4 |
5 | #[cfg(test)]
6 | mod tests;
7 |
--------------------------------------------------------------------------------
/spongos/src/core/prng.rs:
--------------------------------------------------------------------------------
1 | //! Spongos-based pseudo-random number generator.
2 | use rand::{CryptoRng, RngCore, SeedableRng};
3 |
4 | use super::{
5 | prp::{keccak::KeccakF1600, PRP},
6 | spongos::Spongos,
7 | };
8 |
9 | type Nonce = [u8; 16];
10 | type Key = [u8; 32];
11 |
12 | /// Spongos-based psuedo-random number generator.
13 | pub struct SpongosRng {
14 | /// Inner [`Spongos`] state
15 | spongos: Spongos,
16 | nonce: Nonce,
17 | }
18 |
19 | impl SpongosRng {
20 | /// Creates a new [`SpongosRng`] from an explicit byte array. A new [`Spongos`] object is
21 | /// created, and is used to sponge the seed into a new `Key`. This `Key` is then used as a
22 | /// seed to generate a new [`SpongosRng`].
23 | ///
24 | /// # Arguments
25 | /// * `seed`: A unique byte array
26 | pub fn new(seed: T) -> Self
27 | where
28 | T: AsRef<[u8]>,
29 | F: PRP + Default,
30 | {
31 | let mut spongos = Spongos::::init();
32 | let key = spongos.sponge(seed);
33 | Self::from_seed(key)
34 | }
35 |
36 | /// Creates a new [`SpongosRng`] from an explicit [`Spongos`] state and [`Nonce`].
37 | fn from_spongos(prng: Spongos, nonce: Nonce) -> Self {
38 | Self { spongos: prng, nonce }
39 | }
40 |
41 | /// Increments the inner nonce
42 | fn inc(&mut self) {
43 | for i in self.nonce.iter_mut() {
44 | let (r, has_wrapped) = i.overflowing_add(1);
45 | *i = r;
46 | if !has_wrapped {
47 | return;
48 | }
49 | }
50 | }
51 | }
52 |
53 | impl RngCore for SpongosRng
54 | where
55 | F: PRP,
56 | {
57 | fn next_u32(&mut self) -> u32 {
58 | self.inc();
59 | u32::from_le_bytes(self.spongos.sponge(&self.nonce))
60 | }
61 | fn next_u64(&mut self) -> u64 {
62 | self.inc();
63 | u64::from_le_bytes(self.spongos.sponge(&self.nonce))
64 | }
65 | fn fill_bytes(&mut self, dest: &mut [u8]) {
66 | self.inc();
67 | self.spongos.sponge_mut(&self.nonce, dest);
68 | }
69 | fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), rand::Error> {
70 | self.fill_bytes(dest);
71 | Ok(())
72 | }
73 | }
74 |
75 | impl CryptoRng for SpongosRng where F: PRP {}
76 |
77 | impl SeedableRng for SpongosRng
78 | where
79 | F: PRP + Default,
80 | {
81 | type Seed = Key;
82 |
83 | fn from_seed(seed: Self::Seed) -> Self {
84 | let mut spongos = Spongos::init();
85 | let nonce = spongos.sponge(seed);
86 | Self::from_spongos(spongos, nonce)
87 | }
88 | }
89 |
90 | #[cfg(test)]
91 | mod tests {
92 | use rand::Rng;
93 |
94 | use crate::core::{prp::keccak::KeccakF1600, spongos::Spongos};
95 |
96 | use super::SpongosRng;
97 |
98 | #[test]
99 | fn nonce_incremental_does_not_overflow() {
100 | let mut rng = SpongosRng::::from_spongos(Spongos::init(), [255; 16]);
101 | let _random_number: usize = rng.gen();
102 | }
103 | }
104 |
--------------------------------------------------------------------------------
/spongos/src/core/prp/keccak.rs:
--------------------------------------------------------------------------------
1 | use generic_array::{
2 | typenum::{U168, U32},
3 | GenericArray,
4 | };
5 |
6 | use super::PRP;
7 |
8 | /// A psuedo-random permutation implementing `Keccak-F[1600]`
9 | #[derive(Copy, Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)]
10 | pub struct KeccakF1600 {
11 | /// Inner state for transformation
12 | state: [u64; 25],
13 | }
14 |
15 | impl KeccakF1600 {
16 | /// Use `Keccak-F[1600]` sponge function on inner state
17 | fn permutation(&mut self) {
18 | keccak::f1600(&mut self.state);
19 | }
20 | }
21 |
22 | impl PRP for KeccakF1600 {
23 | type RateSize = U168; // (1600 - 256) / 8
24 |
25 | type CapacitySize = U32; // 256
26 |
27 | fn transform(&mut self) {
28 | self.permutation();
29 | }
30 |
31 | fn outer(&self) -> &GenericArray {
32 | unsafe { &*(self.state.as_ptr() as *const GenericArray) }
33 | }
34 |
35 | fn outer_mut(&mut self) -> &mut GenericArray {
36 | unsafe { &mut *(self.state.as_mut_ptr() as *mut GenericArray) }
37 | }
38 |
39 | fn inner(&self) -> &GenericArray {
40 | unsafe { &*(self.state.as_ptr().add(21) as *const GenericArray) }
41 | }
42 |
43 | fn inner_mut(&mut self) -> &mut GenericArray {
44 | unsafe { &mut *(self.state.as_mut_ptr().add(21) as *mut GenericArray) }
45 | }
46 | }
47 |
--------------------------------------------------------------------------------
/spongos/src/core/prp/mod.rs:
--------------------------------------------------------------------------------
1 | use generic_array::{ArrayLength, GenericArray};
2 |
3 | pub(crate) mod keccak;
4 |
5 | /// Pseudo-random permutation.
6 | ///
7 | /// Actually, it may be non-bijective as the inverse transform is not used in sponge construction.
8 | #[allow(clippy::upper_case_acronyms)]
9 | pub trait PRP {
10 | /// Size of the outer state in bytes.
11 | /// In other words, size of data chunk that PRP can process in one transform.
12 | type RateSize: ArrayLength;
13 |
14 | /// Size of the inner state in bits, determines the security of sponge constructions.
15 | /// Other sizes such as sizes of hash/key/nonce/etc. are derived from the capacity.
16 | type CapacitySize: ArrayLength;
17 |
18 | /// Transform full state.
19 | fn transform(&mut self);
20 |
21 | /// Ref for ejecting outer state.
22 | fn outer(&self) -> &GenericArray;
23 |
24 | /// Mut ref for injecting outer state.
25 | fn outer_mut(&mut self) -> &mut GenericArray;
26 |
27 | /// Ref to inner state.
28 | fn inner(&self) -> &GenericArray;
29 |
30 | /// Mut ref to inner state
31 | fn inner_mut(&mut self) -> &mut GenericArray;
32 | }
33 |
--------------------------------------------------------------------------------
/spongos/src/core/tests.rs:
--------------------------------------------------------------------------------
1 | use generic_array::{typenum::Unsigned, GenericArray};
2 |
3 | use super::{
4 | prp::{keccak::KeccakF1600, PRP},
5 | spongos::Spongos,
6 | };
7 |
8 | fn bytes_spongosn(n: usize) {
9 | let mut rng = Spongos::::init();
10 | rng.absorb(&vec![0; 10]);
11 | rng.commit();
12 | let mut k = vec![0; n];
13 | let mut p = vec![0; n];
14 | let mut x = vec![0; n];
15 | rng.squeeze_mut(&mut k);
16 | rng.squeeze_mut(&mut p);
17 | rng.squeeze_mut(&mut x);
18 |
19 | let mut s = Spongos::::init();
20 | s.absorb(&k);
21 | s.absorb(&p);
22 | s.commit();
23 | let mut y = x.clone();
24 | s.encrypt_mut(&x, &mut y).unwrap();
25 | s.commit();
26 | let mut t = vec![0; n];
27 | let mut t2 = vec![0; n];
28 | let mut t3 = vec![0; n];
29 | s.squeeze_mut(&mut t);
30 | s.squeeze_mut(&mut t2);
31 | s.squeeze_mut(&mut t3);
32 |
33 | let mut s = Spongos::::init();
34 | s.absorb(&k);
35 | s.absorb(&p);
36 | s.commit();
37 | let mut z = y.clone();
38 | s.decrypt_mut(&y, &mut z).unwrap();
39 | s.commit();
40 | let mut u = vec![0; n];
41 | s.squeeze_mut(&mut u);
42 | assert!(s.squeeze_eq(&t2));
43 | assert!(s.squeeze_eq(&t3));
44 |
45 | assert!(x == z, "{}: x != D(E(x))", n);
46 | assert!(t == u, "{}: MAC(x) != MAC(D(E(x)))", n);
47 | }
48 |
49 | fn slice_spongosn(n: usize) {
50 | let mut k = vec![0u8; n];
51 | let mut p = vec![0u8; n];
52 | let mut x = vec![0u8; n];
53 | let mut y = vec![0u8; n];
54 | let mut z = vec![0u8; n];
55 | let mut t = vec![0u8; n];
56 | let mut u = vec![0u8; n];
57 | let mut t23 = vec![0u8; n + n];
58 |
59 | let mut s = Spongos::::init();
60 | s.absorb(&k[..]);
61 | s.commit();
62 | s.squeeze_mut(&mut k[..]);
63 | s.squeeze_mut(&mut p[..]);
64 | s.squeeze_mut(&mut x[..]);
65 |
66 | s = Spongos::init();
67 | s.absorb(&k[..]);
68 | s.absorb(&p[..]);
69 | s.commit();
70 | s.encrypt_mut(&x[..], &mut y[..]).unwrap();
71 | s.commit();
72 | s.squeeze_mut(&mut t[..]);
73 | s.squeeze_mut(&mut t23[..n]);
74 | s.squeeze_mut(&mut t23[n..]);
75 |
76 | s = Spongos::init();
77 | s.absorb(&k[..]);
78 | s.absorb(&p[..]);
79 | s.commit();
80 | s.decrypt_mut(&y[..], &mut z[..]).unwrap();
81 | s.commit();
82 | s.squeeze_mut(&mut u[..]);
83 | assert!(s.squeeze_eq(&t23[..n]));
84 | assert!(s.squeeze_eq(&t23[n..]));
85 |
86 | assert!(x == z, "{}: x != D(E(x))", n);
87 | assert!(t == u, "{}: MAC(x) != MAC(D(E(x)))", n);
88 | }
89 |
90 | #[test]
91 | fn bytes_with_size_boundary_cases() {
92 | let rate = ::RateSize::USIZE;
93 | for i in 1..100 {
94 | bytes_spongosn::(i);
95 | encrypt_decrypt_n::(i);
96 | }
97 | bytes_spongosn::(rate / 2 - 1);
98 | bytes_spongosn::(rate / 2);
99 | bytes_spongosn::(rate / 2 + 1);
100 | bytes_spongosn::(rate - 1);
101 | bytes_spongosn::(rate);
102 | bytes_spongosn::(rate + 1);
103 | bytes_spongosn::(rate * 2 - 1);
104 | bytes_spongosn::(rate * 2);
105 | bytes_spongosn::(rate * 2 + 1);
106 | bytes_spongosn::(rate * 5);
107 | }
108 |
109 | #[test]
110 | fn slices_with_size_boundary_cases() {
111 | let rate = ::RateSize::USIZE;
112 | for i in 1..100 {
113 | slice_spongosn::(i);
114 | encrypt_decrypt_n::(i);
115 | }
116 | slice_spongosn::(rate / 2 - 1);
117 | slice_spongosn::(rate / 2);
118 | slice_spongosn::(rate / 2 + 1);
119 | slice_spongosn::(rate - 1);
120 | slice_spongosn::(rate);
121 | slice_spongosn::(rate + 1);
122 | slice_spongosn::(rate * 2 - 1);
123 | slice_spongosn::(rate * 2);
124 | slice_spongosn::(rate * 2 + 1);
125 | slice_spongosn::(rate * 5);
126 | }
127 |
128 | fn encrypt_decrypt_n(n: usize) {
129 | let mut s = Spongos::::init();
130 | s.absorb(&vec![1; 32]);
131 | s.commit();
132 |
133 | let mut x = vec![0; n];
134 | s.clone().squeeze_mut(&mut x);
135 | let mut s2 = s.clone();
136 |
137 | let mut ex = x.clone();
138 | s.encrypt_mut(&x, &mut ex).unwrap();
139 | s.commit();
140 | let tag: GenericArray = s.squeeze();
141 |
142 | let mut dex = ex.clone();
143 | s2.decrypt_mut(&ex, &mut dex).unwrap();
144 | assert_eq!(x, dex);
145 | s2.commit();
146 | assert_eq!(tag, s2.squeeze());
147 | }
148 |
--------------------------------------------------------------------------------
/spongos/src/ddml/commands/sizeof/absorb.rs:
--------------------------------------------------------------------------------
1 | use crypto::{keys::x25519, signatures::ed25519};
2 |
3 | use crate::{
4 | ddml::{
5 | commands::{sizeof::Context, Absorb},
6 | types::{Bytes, Maybe, NBytes, Size, Uint16, Uint32, Uint64, Uint8},
7 | },
8 | error::Result,
9 | };
10 |
11 | /// Increases [`Context`] size by 1 byte, representing the number of encoded bytes for all Uint8
12 | /// values.
13 | impl Absorb for Context {
14 | fn absorb(&mut self, _u: Uint8) -> Result<&mut Self> {
15 | self.size += 1;
16 | Ok(self)
17 | }
18 | }
19 |
20 | /// Increases [`Context`] size by 2 bytes, representing the number of encoded bytes for all Uint16
21 | /// values.
22 | impl Absorb for Context {
23 | fn absorb(&mut self, _u: Uint16) -> Result<&mut Self> {
24 | self.size += 2;
25 | Ok(self)
26 | }
27 | }
28 |
29 | /// Increases [`Context`] size by 4 bytes, representing the number of encoded bytes for all Uint32
30 | /// values.
31 | impl Absorb for Context {
32 | fn absorb(&mut self, _u: Uint32) -> Result<&mut Self> {
33 | self.size += 4;
34 | Ok(self)
35 | }
36 | }
37 |
38 | /// Increases [`Context`] size by 8 bytes, representing the number of encoded bytes for all Uint64
39 | /// values.
40 | impl Absorb for Context {
41 | fn absorb(&mut self, _u: Uint64) -> Result<&mut Self> {
42 | self.size += 8;
43 | Ok(self)
44 | }
45 | }
46 |
47 | /// Increases [`Context`] size by the number of bytes present in the provided [`Size`] wrapper.
48 | /// `Size` has var-size encoding.
49 | impl Absorb for Context {
50 | fn absorb(&mut self, size: Size) -> Result<&mut Self> {
51 | self.size += size.num_bytes() as usize + 1;
52 | Ok(self)
53 | }
54 | }
55 |
56 | /// Increases [`Context`] size by the number of bytes present in the provided [`Bytes`] wrapper.
57 | /// `Bytes` has variable size thus the size `n` is encoded before the content bytes.
58 | impl> Absorb> for Context {
59 | fn absorb(&mut self, bytes: Bytes) -> Result<&mut Self> {
60 | let bytes_size = Size::new(bytes.len());
61 | self.absorb(bytes_size)?;
62 | self.size += bytes.len();
63 | Ok(self)
64 | }
65 | }
66 |
67 | /// Increases [`Context`] size by the number of bytes present in the provided [`NBytes`] wrapper.
68 | /// `NByte` is fixed-size and is encoded with `n` bytes.
69 | impl> Absorb> for Context {
70 | fn absorb(&mut self, nbytes: NBytes) -> Result<&mut Self> {
71 | self.size += nbytes.inner().as_ref().len();
72 | Ok(self)
73 | }
74 | }
75 |
76 | /// Increases [`Context`] size by the fixed size of an ed25519 public key (32 bytes).
77 | impl Absorb<&ed25519::PublicKey> for Context {
78 | fn absorb(&mut self, _pk: &ed25519::PublicKey) -> Result<&mut Self> {
79 | self.size += ed25519::PUBLIC_KEY_LENGTH;
80 | Ok(self)
81 | }
82 | }
83 |
84 | /// Increases [`Context`] size by the fixed size of an x25519 public key (32 bytes).
85 | impl Absorb<&x25519::PublicKey> for Context {
86 | fn absorb(&mut self, _pk: &x25519::PublicKey) -> Result<&mut Self> {
87 | self.size += x25519::PUBLIC_KEY_LENGTH;
88 | Ok(self)
89 | }
90 | }
91 |
92 | /// Absorbs a [`Maybe`] wrapper for an `Option` into the [`Context`] size. If the `Option` is
93 | /// `Some`, a `Uint8(1)` value is absorbed first, followed by the content. If the `Option` is
94 | /// `None`, only a `Uint8(0)` is absorbed.
95 | impl Absorb>> for Context
96 | where
97 | Self: Absorb,
98 | {
99 | fn absorb(&mut self, maybe: Maybe>) -> Result<&mut Self> {
100 | match maybe.into_inner() {
101 | // for some reason fully qualified syntax is necessary, and cannot use the trait bound like in wrap::Context
102 | Some(t) => >::absorb(self, Uint8::new(1))?.absorb(t)?,
103 | None => >::absorb(self, Uint8::new(0))?,
104 | };
105 | Ok(self)
106 | }
107 | }
108 |
--------------------------------------------------------------------------------
/spongos/src/ddml/commands/sizeof/absorb_external.rs:
--------------------------------------------------------------------------------
1 | use crate::{
2 | ddml::{
3 | commands::{sizeof::Context, Absorb},
4 | modifiers::External,
5 | types::{NBytes, Uint16, Uint32, Uint64, Uint8},
6 | },
7 | error::Result,
8 | };
9 |
10 | /// External values are not encoded in the stream.
11 | impl Absorb> for Context {
12 | fn absorb(&mut self, _external: External) -> Result<&mut Self> {
13 | Ok(self)
14 | }
15 | }
16 |
17 | /// External values are not encoded in the stream.
18 | impl Absorb> for Context {
19 | fn absorb(&mut self, _external: External) -> Result<&mut Self> {
20 | Ok(self)
21 | }
22 | }
23 |
24 | /// External values are not encoded in the stream.
25 | impl Absorb> for Context {
26 | fn absorb(&mut self, _external: External) -> Result<&mut Self> {
27 | Ok(self)
28 | }
29 | }
30 |
31 | /// External values are not encoded in the stream.
32 | impl Absorb> for Context {
33 | fn absorb(&mut self, _external: External) -> Result<&mut Self> {
34 | Ok(self)
35 | }
36 | }
37 |
38 | /// External values are not encoded in the binary stream.
39 | impl> Absorb>> for Context {
40 | fn absorb(&mut self, _external: External<&NBytes>) -> Result<&mut Self> {
41 | Ok(self)
42 | }
43 | }
44 |
--------------------------------------------------------------------------------
/spongos/src/ddml/commands/sizeof/commit.rs:
--------------------------------------------------------------------------------
1 | use crate::{
2 | ddml::commands::{sizeof::Context, Commit},
3 | error::Result,
4 | };
5 |
6 | /// Commit has no effect on [sizeof context](`Context`)
7 | impl Commit for Context {
8 | fn commit(&mut self) -> Result<&mut Self> {
9 | Ok(self)
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/spongos/src/ddml/commands/sizeof/dump.rs:
--------------------------------------------------------------------------------
1 | use crate::{
2 | ddml::commands::{sizeof::Context, Dump},
3 | error::Result,
4 | };
5 |
6 | /// Displays context size
7 | impl Dump for Context {
8 | fn dump<'a>(&mut self, args: core::fmt::Arguments<'a>) -> Result<&mut Self> {
9 | println!("{}: size=[{}]", args, self.size);
10 | Ok(self)
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/spongos/src/ddml/commands/sizeof/ed25519.rs:
--------------------------------------------------------------------------------
1 | use crypto::signatures::ed25519;
2 |
3 | use crate::{
4 | ddml::{
5 | commands::{sizeof::Context, Ed25519},
6 | modifiers::External,
7 | types::NBytes,
8 | },
9 | error::Result,
10 | };
11 |
12 | /// Increases [`Context`] size by Ed25519 Signature Length (64 Bytes)
13 | impl Ed25519<&ed25519::SecretKey, External<&NBytes<[u8; 64]>>> for Context {
14 | fn ed25519(&mut self, _sk: &ed25519::SecretKey, _hash: External<&NBytes<[u8; 64]>>) -> Result<&mut Self> {
15 | self.size += ed25519::SIGNATURE_LENGTH;
16 | Ok(self)
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/spongos/src/ddml/commands/sizeof/fork.rs:
--------------------------------------------------------------------------------
1 | use crate::ddml::commands::{sizeof::Context, Fork};
2 |
3 | /// Copy context for looped [`Context`] encryption operations
4 | impl<'a> Fork<'a> for Context {
5 | type Forked = &'a mut Context;
6 | fn fork(&'a mut self) -> Self::Forked {
7 | self
8 | }
9 | }
10 |
--------------------------------------------------------------------------------
/spongos/src/ddml/commands/sizeof/join.rs:
--------------------------------------------------------------------------------
1 | use crate::{
2 | core::spongos::Spongos,
3 | ddml::commands::{sizeof::Context, Join},
4 | error::Result,
5 | };
6 |
7 | /// Join does not take any space in the binary stream.
8 | impl Join for Context {
9 | fn join(&mut self, _joinee: &mut Spongos) -> Result<&mut Self> {
10 | Ok(self)
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/spongos/src/ddml/commands/sizeof/mask.rs:
--------------------------------------------------------------------------------
1 | use crypto::{keys::x25519, signatures::ed25519};
2 | use generic_array::typenum::Unsigned;
3 |
4 | use crate::{
5 | core::{prp::PRP, spongos::Spongos},
6 | ddml::{
7 | commands::{sizeof::Context, Mask},
8 | types::{Bytes, Maybe, NBytes, Size, Uint16, Uint32, Uint64, Uint8},
9 | },
10 | error::Result,
11 | };
12 |
13 | /// Increases [`Context`] size by 1 byte, representing the number of masking bytes for all Uint8
14 | /// values.
15 | impl Mask for Context {
16 | fn mask(&mut self, _val: Uint8) -> Result<&mut Self> {
17 | self.size += 1;
18 | Ok(self)
19 | }
20 | }
21 |
22 | /// Increases [`Context`] size by 2 bytes, representing the number of masking bytes for all Uint16
23 | /// values.
24 | impl Mask for Context {
25 | fn mask(&mut self, _val: Uint16) -> Result<&mut Self> {
26 | self.size += 2;
27 | Ok(self)
28 | }
29 | }
30 |
31 | /// Increases [`Context`] size by 4 bytes, representing the number of masking bytes for all Uint32
32 | /// values.
33 | impl Mask for Context {
34 | fn mask(&mut self, _val: Uint32) -> Result<&mut Self> {
35 | self.size += 4;
36 | Ok(self)
37 | }
38 | }
39 |
40 | /// Increases [`Context`] size by 8 bytes, representing the number of masking bytes for all Uint64
41 | /// values.
42 | impl Mask for Context {
43 | fn mask(&mut self, _val: Uint64) -> Result<&mut Self> {
44 | self.size += 8;
45 | Ok(self)
46 | }
47 | }
48 |
49 | /// Increases [`Context`] size by the number of bytes present in the provided [`Size`] wrapper.
50 | /// `Size` has var-size encoding.
51 | impl Mask for Context {
52 | fn mask(&mut self, size: Size) -> Result<&mut Self> {
53 | self.size += size.num_bytes() as usize + 1;
54 | Ok(self)
55 | }
56 | }
57 |
58 | /// Increases [`Context`] size by the number of bytes present in the provided [`NBytes`] wrapper.
59 | /// `NByte` is fixed-size and is masked with `n` bytes.
60 | impl> Mask> for Context {
61 | fn mask(&mut self, nbytes: NBytes) -> Result<&mut Self> {
62 | self.size += nbytes.inner().as_ref().len();
63 | Ok(self)
64 | }
65 | }
66 |
67 | /// Increases [`Context`] size by the number of bytes present in the provided [`Bytes`] wrapper.
68 | /// `Bytes` has variable size thus the size `n` is masked before the content bytes.
69 | impl> Mask> for Context {
70 | fn mask(&mut self, bytes: Bytes) -> Result<&mut Self> {
71 | let size = Size::new(bytes.len());
72 | self.mask(size)?;
73 | self.size += bytes.len();
74 | Ok(self)
75 | }
76 | }
77 |
78 | /// Increases [`Context`] size by the fixed size of an x25519 public key (32 bytes).
79 | impl Mask<&x25519::PublicKey> for Context {
80 | fn mask(&mut self, _pk: &x25519::PublicKey) -> Result<&mut Self> {
81 | self.size += x25519::PUBLIC_KEY_LENGTH;
82 | Ok(self)
83 | }
84 | }
85 |
86 | /// Increases [`Context`] size by the fixed size of an ed25519 public key (32 bytes).
87 | impl Mask<&ed25519::PublicKey> for Context {
88 | fn mask(&mut self, _pk: &ed25519::PublicKey) -> Result<&mut Self> {
89 | self.size += ed25519::PUBLIC_KEY_LENGTH;
90 | Ok(self)
91 | }
92 | }
93 |
94 | /// Increases [`Context`] size by the fixed size of a [`Spongos`] (CapacitySize + RateSize bytes).
95 | impl Mask<&Spongos> for Context {
96 | fn mask(&mut self, _spongos: &Spongos) -> Result<&mut Self> {
97 | self.size += F::CapacitySize::USIZE + F::RateSize::USIZE;
98 | Ok(self)
99 | }
100 | }
101 |
102 | /// Masks a [`Maybe`] wrapper for an `Option` into the [`Context`] size. If the `Option` is `Some`,
103 | /// a `Uint8(1)` value is masked first, followed by the content. If the `Option` is `None`, only a
104 | /// `Uint8(0)` is masked.
105 | impl Mask>> for Context
106 | where
107 | for<'a> Self: Mask + Mask<&'a ()>,
108 | {
109 | fn mask(&mut self, maybe: Maybe>) -> Result<&mut Self> {
110 | match maybe.into_inner() {
111 | Some(t) => self.mask(Uint8::new(1))?.mask(t)?,
112 | None => self.mask(Uint8::new(0))?,
113 | };
114 | Ok(self)
115 | }
116 | }
117 |
118 | impl<'a> Mask<&'a ()> for Context {
119 | fn mask(&mut self, _: &'a ()) -> Result<&mut Self> {
120 | Ok(self)
121 | }
122 | }
123 |
--------------------------------------------------------------------------------
/spongos/src/ddml/commands/sizeof/mod.rs:
--------------------------------------------------------------------------------
1 | //! Implementation of command traits for calculating the size for output buffer in Wrap operation.
2 |
3 | /// Message size counting context.
4 | #[derive(Clone, Copy, PartialEq, Eq, Hash, Default, Debug)]
5 | pub struct Context {
6 | size: usize,
7 | }
8 |
9 | /// Context for determining required stream size for wrapping.
10 | impl Context {
11 | /// Creates a new [Context]([`Context`]).
12 | pub fn new() -> Self {
13 | Self { size: 0 }
14 | }
15 |
16 | /// Returns calculated message size.
17 | pub fn finalize(self) -> usize {
18 | self.size
19 | }
20 | }
21 |
22 | mod absorb;
23 | mod absorb_external;
24 | mod commit;
25 | #[cfg(feature = "std")]
26 | mod dump;
27 | mod fork;
28 | mod join;
29 | mod mask;
30 | mod repeated;
31 | mod skip;
32 | mod squeeze;
33 |
34 | mod ed25519;
35 | mod x25519;
36 |
--------------------------------------------------------------------------------
/spongos/src/ddml/commands/sizeof/repeated.rs:
--------------------------------------------------------------------------------
1 | use core::iter;
2 |
3 | use crate::{
4 | ddml::commands::{sizeof::Context, Repeated},
5 | error::Result,
6 | };
7 |
8 | /// Repeated modifier. The actual number of repetitions must be wrapped
9 | /// (absorbed/masked/skipped) explicitly.
10 | impl Repeated for Context
11 | where
12 | I: iter::Iterator,
13 | C: for<'a> FnMut(&'a mut Self, I::Item) -> Result<&'a mut Self>,
14 | {
15 | fn repeated(&mut self, values_iter: I, mut value_handle: C) -> Result<&mut Self> {
16 | values_iter.fold(Ok(self), |rctx, item| -> Result<&mut Self> {
17 | match rctx {
18 | Ok(ctx) => value_handle(ctx, item),
19 | Err(e) => Err(e),
20 | }
21 | })
22 | }
23 | }
24 |
--------------------------------------------------------------------------------
/spongos/src/ddml/commands/sizeof/skip.rs:
--------------------------------------------------------------------------------
1 | use crate::{
2 | ddml::{
3 | commands::{sizeof::Context, Skip},
4 | types::{Bytes, NBytes, Size, Uint16, Uint32, Uint64, Uint8},
5 | },
6 | error::Result,
7 | };
8 |
9 | /// Skipped values are just encoded and not wrapped.
10 | /// All Uint8 values are encoded with 1 byte
11 | impl Skip for Context {
12 | fn skip(&mut self, _u: Uint8) -> Result<&mut Self> {
13 | self.size += 1;
14 | Ok(self)
15 | }
16 | }
17 |
18 | /// Skipped values are just encoded and not wrapped.
19 | /// All Uint16 values are encoded with 2 bytes
20 | impl Skip for Context {
21 | fn skip(&mut self, _u: Uint16) -> Result<&mut Self> {
22 | self.size += 2;
23 | Ok(self)
24 | }
25 | }
26 |
27 | /// Skipped values are just encoded and not wrapped.
28 | /// All Uint32 values are encoded with 4 bytes
29 | impl Skip for Context {
30 | fn skip(&mut self, _u: Uint32) -> Result<&mut Self> {
31 | self.size += 4;
32 | Ok(self)
33 | }
34 | }
35 |
36 | /// Skipped values are just encoded and not wrapped.
37 | /// All Uint64 values are encoded with 8 bytes
38 | impl Skip for Context {
39 | fn skip(&mut self, _u: Uint64) -> Result<&mut Self> {
40 | self.size += 8;
41 | Ok(self)
42 | }
43 | }
44 |
45 | /// Increases [`Context`] size by the number of bytes present in the provided [`Size`] wrapper.
46 | /// `Size` has var-size encoding.
47 | impl Skip for Context {
48 | fn skip(&mut self, size: Size) -> Result<&mut Self> {
49 | self.size += size.num_bytes() as usize + 1;
50 | Ok(self)
51 | }
52 | }
53 |
54 | /// Increases [`Context`] size by the number of bytes present in the provided [`Bytes`] wrapper.
55 | /// `Bytes` has variable size thus the size `n` is encoded before the content bytes.
56 | impl> Skip> for Context {
57 | fn skip(&mut self, bytes: Bytes) -> Result<&mut Self> {
58 | let bytes_size = Size::new(bytes.len());
59 | self.skip(bytes_size)?;
60 | self.size += bytes.len();
61 | Ok(self)
62 | }
63 | }
64 |
65 | /// Increases [`Context`] size by the number of bytes present in the provided [`NBytes`] wrapper.
66 | /// `NByte` is fixed-size and is encoded with `n` bytes.
67 | impl> Skip> for Context {
68 | fn skip(&mut self, nbytes: NBytes) -> Result<&mut Self> {
69 | self.size += nbytes.inner().as_ref().len();
70 | Ok(self)
71 | }
72 | }
73 |
--------------------------------------------------------------------------------
/spongos/src/ddml/commands/sizeof/squeeze.rs:
--------------------------------------------------------------------------------
1 | use crate::{
2 | ddml::{
3 | commands::{sizeof::Context, Squeeze},
4 | modifiers::External,
5 | types::{Mac, NBytes},
6 | },
7 | error::Result,
8 | };
9 |
10 | /// Increases [`Context`] size by the number of bytes present in a [`Mac`] wrapper.
11 | /// Mac is just like NBytes.
12 | impl Squeeze<&Mac> for Context {
13 | fn squeeze(&mut self, mac: &Mac) -> Result<&mut Self> {
14 | self.size += mac.length();
15 | Ok(self)
16 | }
17 | }
18 |
19 | /// Increases [`Context`] size by the number of bytes present in a [`Mac`] wrapper.
20 | /// Mac is just like NBytes.
21 | impl Squeeze for Context {
22 | fn squeeze(&mut self, val: Mac) -> Result<&mut Self> {
23 | self.squeeze(&val)
24 | }
25 | }
26 |
27 | /// External values are not encoded.
28 | impl> Squeeze>> for Context {
29 | fn squeeze(&mut self, _external_nbytes: External<&NBytes>) -> Result<&mut Self> {
30 | Ok(self)
31 | }
32 | }
33 |
34 | /// External values are not encoded.
35 | impl Squeeze> for Context {
36 | fn squeeze(&mut self, _mac: External) -> Result<&mut Self> {
37 | Ok(self)
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/spongos/src/ddml/commands/sizeof/x25519.rs:
--------------------------------------------------------------------------------
1 | use crypto::keys::x25519;
2 |
3 | use crate::{
4 | ddml::{
5 | commands::{sizeof::Context, X25519},
6 | types::NBytes,
7 | },
8 | error::Result,
9 | };
10 |
11 | /// Increases [`Context`] size by the x25519 Public Key Length (32 Bytes) as well as the number of
12 | /// bytes present in the [`NBytes`] wrapper.
13 | impl<'a, T: AsRef<[u8]>> X25519<&'a x25519::PublicKey, NBytes> for Context {
14 | fn x25519(&mut self, _pk: &x25519::PublicKey, encryption_key: NBytes) -> Result<&mut Self> {
15 | self.size += x25519::PUBLIC_KEY_LENGTH + encryption_key.inner().as_ref().len();
16 | Ok(self)
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/spongos/src/ddml/commands/unwrap/absorb_external.rs:
--------------------------------------------------------------------------------
1 | use crypto::{keys::x25519, signatures::ed25519};
2 |
3 | use crate::{
4 | core::prp::PRP,
5 | ddml::{
6 | commands::{unwrap::Context, Absorb},
7 | modifiers::External,
8 | types::{NBytes, Size, Uint16, Uint32, Uint64, Uint8},
9 | },
10 | error::Result,
11 | };
12 |
13 | /// Reads a single byte encoded `Uint8` from [Spongos](`crate::core::spongos::Spongos`) state but
14 | /// does not advance internal stream.
15 | impl Absorb> for Context {
16 | fn absorb(&mut self, u: External) -> Result<&mut Self> {
17 | self.spongos.absorb(u.into_inner().to_bytes());
18 | Ok(self)
19 | }
20 | }
21 |
22 | /// Reads a two byte encoded `Uint16` from [Spongos](`crate::core::spongos::Spongos`) state but
23 | /// does not advance internal stream.
24 | impl Absorb> for Context