├── version ├── docs ├── .gitignore ├── src │ ├── references │ │ ├── openapi.md │ │ └── cli-options.md │ ├── README.md │ ├── configuration │ │ ├── README.md │ │ ├── submitter.md │ │ ├── logging.md │ │ ├── database.md │ │ └── indexer.md │ ├── installation │ │ ├── README.md │ │ ├── docker.md │ │ └── nix.md │ ├── SUMMARY.md │ ├── useful-links.md │ └── prism-test │ │ └── README.md ├── book.toml └── notes │ └── sqlite.md ├── bin └── neoprism-node │ ├── .gitignore │ ├── src │ ├── app │ │ ├── mod.rs │ │ ├── service │ │ │ ├── mod.rs │ │ │ └── error.rs │ │ └── worker.rs │ ├── http │ │ ├── features │ │ │ ├── mod.rs │ │ │ ├── ui_explorer │ │ │ │ ├── models.rs │ │ │ │ └── mod.rs │ │ │ ├── ui_resolver │ │ │ │ ├── models.rs │ │ │ │ └── mod.rs │ │ │ └── api │ │ │ │ ├── error.rs │ │ │ │ ├── system.rs │ │ │ │ └── submitter.rs │ │ ├── urls.rs │ │ └── mod.rs │ └── main.rs │ ├── tailwind.css │ └── Cargo.toml ├── tests └── prism-test │ ├── project │ ├── build.properties │ └── plugins.sbt │ ├── .gitignore │ ├── src │ ├── test │ │ └── scala │ │ │ └── org │ │ │ └── hyperledger │ │ │ └── identus │ │ │ └── prismtest │ │ │ ├── suite │ │ │ └── StorageTestUtils.scala │ │ │ ├── NodeName.scala │ │ │ └── MainSpec.scala │ └── main │ │ └── scala │ │ └── org │ │ └── hyperledger │ │ └── identus │ │ └── prismtest │ │ └── utils │ │ └── Sha256.scala │ ├── .scalafmt.conf │ └── build.sbt ├── lib ├── did-prism-ledger │ ├── src │ │ ├── lib.rs │ │ └── in_memory │ │ │ ├── sink.rs │ │ │ ├── mod.rs │ │ │ └── source.rs │ └── Cargo.toml ├── did-prism-submitter │ ├── src │ │ ├── dlt │ │ │ └── mod.rs │ │ └── lib.rs │ └── Cargo.toml ├── node-storage │ ├── src │ │ ├── backend │ │ │ ├── mod.rs │ │ │ └── shared.rs │ │ ├── entity │ │ │ ├── mod.rs │ │ │ └── indexer.rs │ │ └── lib.rs │ ├── migrations │ │ ├── sqlite │ │ │ ├── 20250612062110_vdr_index.sql │ │ │ └── 20250424101829_migrate_from_seaorm.sql │ │ └── postgres │ │ │ ├── 20250424101829_migrate_from_seaorm.sql │ │ │ └── 20250612062110_vdr_index.sql │ └── Cargo.toml ├── did-prism │ ├── src │ │ ├── macros.rs │ │ ├── prelude.rs │ │ ├── utils │ │ │ ├── paging.rs │ │ │ └── mod.rs │ │ ├── error.rs │ │ ├── lib.rs │ │ ├── protocol │ │ │ └── resolver.rs │ │ └── did │ │ │ └── operation │ │ │ └── mod.rs │ ├── build.rs │ ├── Cargo.toml │ ├── proto │ │ ├── prism-version.proto │ │ ├── prism.proto │ │ ├── node-api.proto │ │ └── prism-storage.proto │ └── tests │ │ └── ssi_operation.rs ├── did-core │ ├── src │ │ ├── lib.rs │ │ ├── error.rs │ │ ├── uri.rs │ │ └── did.rs │ ├── Cargo.toml │ ├── README.md │ └── tests │ │ └── did.rs ├── did-prism-indexer │ ├── src │ │ ├── dlt │ │ │ ├── mod.rs │ │ │ ├── common.rs │ │ │ └── error.rs │ │ └── lib.rs │ └── Cargo.toml ├── apollo │ ├── src │ │ ├── lib.rs │ │ ├── jwk.rs │ │ ├── crypto │ │ │ ├── mod.rs │ │ │ ├── x25519.rs │ │ │ └── ed25519.rs │ │ ├── hash.rs │ │ └── hex.rs │ └── Cargo.toml └── did-resolver-http │ └── Cargo.toml ├── package.json ├── .helix └── languages.toml ├── .dockerignore ├── .gitignore ├── tools ├── compose_gen │ ├── __init__.py │ ├── stacks │ │ ├── __init__.py │ │ ├── universal_resolver.py │ │ └── blockfrost_neoprism_demo.py │ ├── metadata.py │ ├── services │ │ ├── __init__.py │ │ ├── uni_resolver_web.py │ │ ├── caddy.py │ │ ├── db.py │ │ ├── cardano_submit_api.py │ │ ├── cardano_wallet.py │ │ ├── cardano_dbsync.py │ │ ├── cardano_node.py │ │ ├── ryo.py │ │ └── prism_node.py │ └── models.py ├── just-recipes │ ├── tools.just │ ├── e2e.just │ └── release.just └── pyproject.toml ├── rustfmt.toml ├── taplo.toml ├── docker ├── prism-test │ ├── Caddyfile-blockfrost │ ├── ryo.yaml │ ├── compose-sqlite-dev.yml │ ├── init-wallet.hurl │ ├── init-did.hurl │ └── cardano-submit-api.yaml ├── blockfrost-neoprism-demo │ ├── .env.template │ ├── Caddyfile-blockfrost │ ├── ryo.yaml │ └── compose.yml ├── preprod-relay │ └── compose.yml ├── mainnet-relay │ └── compose.yml ├── mainnet-dbsync │ └── compose.yml └── mainnet-universal-resolver │ └── compose.yml ├── nix ├── pythonTools.nix ├── devShells │ ├── default.nix │ ├── docs.nix │ ├── cardano.nix │ ├── prism-test.nix │ └── development.nix ├── checks │ ├── default.nix │ ├── tools-checks.nix │ └── neoprism-checks.nix ├── packages │ ├── docs-site.nix │ ├── neoprism-ui-assets.nix │ ├── neoprism-docker.nix │ ├── neoprism-bin.nix │ ├── cardano-testnet-docker.nix │ └── default.nix └── rustTools.nix ├── .sqlfluff ├── .github └── workflows │ ├── pr-lint.yml │ ├── deploy-docs.yml │ ├── checks.yml │ ├── conformance-test.yml │ ├── scala-steward.yml │ ├── release.yml │ └── scorecard.yml ├── .scala-steward.conf ├── bindings └── ts-types │ └── did_core_types.ts ├── flake.nix ├── Cargo.toml ├── changes └── archive │ └── 2025-11-26-sqlite-backend-support.md └── cliff.toml /version: -------------------------------------------------------------------------------- 1 | 0.8.0 2 | -------------------------------------------------------------------------------- /docs/.gitignore: -------------------------------------------------------------------------------- 1 | book/ 2 | -------------------------------------------------------------------------------- /bin/neoprism-node/.gitignore: -------------------------------------------------------------------------------- 1 | assets/ 2 | -------------------------------------------------------------------------------- /tests/prism-test/project/build.properties: -------------------------------------------------------------------------------- 1 | sbt.version=1.11.7 2 | -------------------------------------------------------------------------------- /bin/neoprism-node/src/app/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod service; 2 | pub mod worker; 3 | -------------------------------------------------------------------------------- /lib/did-prism-ledger/src/lib.rs: -------------------------------------------------------------------------------- 1 | #[cfg(feature = "in-memory")] 2 | pub mod in_memory; 3 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "devDependencies": { 3 | "daisyui": "^5.0.28" 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /.helix/languages.toml: -------------------------------------------------------------------------------- 1 | [language-server.rust-analyzer.config] 2 | cargo.features = "all" 3 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | target/ 2 | .envrc 3 | *.db 4 | *.dump 5 | node_modules/ 6 | result 7 | docker/ 8 | -------------------------------------------------------------------------------- /lib/did-prism-submitter/src/dlt/mod.rs: -------------------------------------------------------------------------------- 1 | #[cfg(feature = "cardano-wallet")] 2 | pub mod cardano_wallet; 3 | -------------------------------------------------------------------------------- /bin/neoprism-node/src/http/features/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod api; 2 | pub mod ui_explorer; 3 | pub mod ui_resolver; 4 | -------------------------------------------------------------------------------- /bin/neoprism-node/src/app/service/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod error; 2 | mod prism; 3 | 4 | pub use prism::PrismDidService; 5 | -------------------------------------------------------------------------------- /bin/neoprism-node/tailwind.css: -------------------------------------------------------------------------------- 1 | @import "tailwindcss"; 2 | @plugin "daisyui" { 3 | themes: dark --default; 4 | } 5 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | target/ 2 | .envrc 3 | .env 4 | *.db 5 | *.dump 6 | data/ 7 | node_modules/ 8 | result 9 | __pycache__ 10 | -------------------------------------------------------------------------------- /lib/node-storage/src/backend/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod postgres; 2 | mod shared; 3 | 4 | #[cfg(feature = "sqlite-storage")] 5 | pub mod sqlite; 6 | -------------------------------------------------------------------------------- /docs/src/references/openapi.md: -------------------------------------------------------------------------------- 1 | # OpenAPI specification 2 | 3 | ```yaml 4 | 5 | ``` 6 | -------------------------------------------------------------------------------- /tools/compose_gen/__init__.py: -------------------------------------------------------------------------------- 1 | from . import services, stacks 2 | from .metadata import VERSION 3 | 4 | __all__ = ["services", "stacks", "VERSION"] 5 | -------------------------------------------------------------------------------- /lib/node-storage/migrations/sqlite/20250612062110_vdr_index.sql: -------------------------------------------------------------------------------- 1 | -- no-op for sqlite: schema introduced in the initial migration already matches this version. 2 | -------------------------------------------------------------------------------- /rustfmt.toml: -------------------------------------------------------------------------------- 1 | max_width = 120 2 | 3 | format_code_in_doc_comments = true 4 | 5 | imports_granularity = "Module" 6 | group_imports = "StdExternalCrate" 7 | -------------------------------------------------------------------------------- /tools/compose_gen/stacks/__init__.py: -------------------------------------------------------------------------------- 1 | from . import blockfrost_neoprism_demo, prism_test, universal_resolver 2 | 3 | __all__ = ["blockfrost_neoprism_demo", "prism_test", "universal_resolver"] 4 | -------------------------------------------------------------------------------- /bin/neoprism-node/src/http/features/ui_explorer/models.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | 3 | #[derive(Debug, Clone, Serialize, Deserialize)] 4 | pub struct PageQuery { 5 | pub page: Option, 6 | } 7 | -------------------------------------------------------------------------------- /bin/neoprism-node/src/http/features/ui_resolver/models.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | 3 | #[derive(Debug, Clone, Serialize, Deserialize)] 4 | pub struct DidQuery { 5 | pub did: Option, 6 | } 7 | -------------------------------------------------------------------------------- /bin/neoprism-node/src/main.rs: -------------------------------------------------------------------------------- 1 | use neoprism_node::run_command; 2 | 3 | #[tokio::main] 4 | async fn main() -> anyhow::Result<()> { 5 | tracing_subscriber::fmt::init(); 6 | run_command().await?; 7 | Ok(()) 8 | } 9 | -------------------------------------------------------------------------------- /lib/did-prism/src/macros.rs: -------------------------------------------------------------------------------- 1 | #[macro_export] 2 | macro_rules! location { 3 | () => { 4 | $crate::utils::Location { 5 | file: file!(), 6 | line: line!(), 7 | } 8 | }; 9 | } 10 | -------------------------------------------------------------------------------- /lib/did-core/src/lib.rs: -------------------------------------------------------------------------------- 1 | mod did; 2 | mod did_doc; 3 | mod error; 4 | mod resolution; 5 | pub mod uri; 6 | 7 | pub use did::*; 8 | pub use did_doc::*; 9 | pub use error::*; 10 | pub use resolution::*; 11 | pub use uri::*; 12 | -------------------------------------------------------------------------------- /taplo.toml: -------------------------------------------------------------------------------- 1 | [formatting] 2 | align_entries = true 3 | column_width = 100 4 | allowed_blank_lines = 1 5 | indent_string = " " 6 | array_auto_collapse = false 7 | array_auto_expand = false 8 | compact_arrays = false 9 | -------------------------------------------------------------------------------- /tests/prism-test/project/plugins.sbt: -------------------------------------------------------------------------------- 1 | addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.5.6") 2 | addSbtPlugin("com.thesamet" % "sbt-protoc" % "1.0.8") 3 | 4 | libraryDependencies += "com.thesamet.scalapb" %% "compilerplugin" % "0.11.20" 5 | -------------------------------------------------------------------------------- /docs/src/README.md: -------------------------------------------------------------------------------- 1 | # NeoPRISM Documentation 2 | 3 | Welcome to the NeoPRISM documentation site! 4 | 5 | Use the navigation panel on the left to browse different sections of the documentation, including guides, references, and community resources. 6 | -------------------------------------------------------------------------------- /lib/did-prism-indexer/src/dlt/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod error; 2 | 3 | #[cfg(any(feature = "oura", feature = "dbsync"))] 4 | mod common; 5 | 6 | #[cfg(feature = "oura")] 7 | pub mod oura; 8 | 9 | #[cfg(feature = "dbsync")] 10 | pub mod dbsync; 11 | -------------------------------------------------------------------------------- /docker/prism-test/Caddyfile-blockfrost: -------------------------------------------------------------------------------- 1 | :3000 { 2 | handle /tx/submit { 3 | rewrite * /api/submit/tx 4 | reverse_proxy cardano-submit-api:8090 5 | } 6 | 7 | handle { 8 | reverse_proxy bf-ryo:3000 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /nix/pythonTools.nix: -------------------------------------------------------------------------------- 1 | { python313 }: 2 | 3 | let 4 | commonPackages = 5 | ps: with ps; [ 6 | pydantic 7 | pyyaml 8 | ]; 9 | in 10 | rec { 11 | python = python313; 12 | pythonEnv = python.withPackages commonPackages; 13 | } 14 | -------------------------------------------------------------------------------- /docker/blockfrost-neoprism-demo/.env.template: -------------------------------------------------------------------------------- 1 | NETWORK=mainnet 2 | 3 | DBSYNC_HOST= 4 | DBSYNC_DB= 5 | DBSYNC_PORT= 6 | DBSYNC_USER= 7 | DBSYNC_PASSWORD= 8 | 9 | DBSYNC_URL="postgresql://$DBSYNC_USER:$DBSYNC_PASSWORD@$DBSYNC_HOST:$DBSYNC_PORT/$DBSYNC_DB" 10 | -------------------------------------------------------------------------------- /lib/did-prism/src/prelude.rs: -------------------------------------------------------------------------------- 1 | pub use protobuf::Message; 2 | 3 | pub use crate::did::{CanonicalPrismDid, DidState, LongFormPrismDid, PrismDid, PrismDidOps}; 4 | pub use crate::proto::MessageExt; 5 | pub use crate::proto::prism::{PrismOperation, SignedPrismOperation}; 6 | -------------------------------------------------------------------------------- /lib/apollo/src/lib.rs: -------------------------------------------------------------------------------- 1 | #![feature(split_array)] 2 | 3 | pub mod crypto; 4 | 5 | #[cfg(feature = "hash")] 6 | pub mod hash; 7 | 8 | #[cfg(feature = "hex")] 9 | pub mod hex; 10 | 11 | #[cfg(feature = "base64")] 12 | pub mod base64; 13 | 14 | #[cfg(feature = "jwk")] 15 | pub mod jwk; 16 | -------------------------------------------------------------------------------- /.sqlfluff: -------------------------------------------------------------------------------- 1 | [sqlfluff] 2 | dialect = postgres 3 | exclude_rules = LT08 4 | max_line_length = 120 5 | large_file_skip_byte_limit = 0 6 | 7 | [sqlfluff:rules:capitalisation.keywords] 8 | capitalisation_policy = upper 9 | 10 | [sqlfluff:rules:references.quoting] 11 | prefer_quoted_keywords = True 12 | -------------------------------------------------------------------------------- /tools/compose_gen/metadata.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | 4 | def _read_version() -> str: 5 | version_file = Path(__file__).parent.parent.parent / "version" 6 | return version_file.read_text().strip() 7 | 8 | 9 | VERSION: str = _read_version() 10 | 11 | __all__ = ["VERSION"] 12 | -------------------------------------------------------------------------------- /nix/devShells/default.nix: -------------------------------------------------------------------------------- 1 | { self, pkgs }: 2 | 3 | { 4 | default = import ./development.nix { inherit pkgs; }; 5 | cardano = import ./cardano.nix { inherit pkgs; }; 6 | prism-test = import ./prism-test.nix { inherit pkgs; }; 7 | 8 | docs = import ./docs.nix { inherit pkgs self; }; 9 | } 10 | -------------------------------------------------------------------------------- /lib/did-prism-submitter/src/lib.rs: -------------------------------------------------------------------------------- 1 | use identus_did_prism::dlt::TxId; 2 | use identus_did_prism::prelude::SignedPrismOperation; 3 | 4 | pub mod dlt; 5 | 6 | #[async_trait::async_trait] 7 | pub trait DltSink { 8 | async fn publish_operations(&self, operations: Vec) -> Result; 9 | } 10 | -------------------------------------------------------------------------------- /docker/blockfrost-neoprism-demo/Caddyfile-blockfrost: -------------------------------------------------------------------------------- 1 | :3000 { 2 | handle /tx/submit { 3 | reverse_proxy cardano-submit-api:8090 4 | } 5 | 6 | handle /dids/* { 7 | rewrite * /api{uri} 8 | reverse_proxy neoprism:8080 9 | } 10 | 11 | handle { 12 | reverse_proxy bf-ryo:3000 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /nix/checks/default.nix: -------------------------------------------------------------------------------- 1 | { pkgs, self, ... }: 2 | 3 | { 4 | default = pkgs.callPackage ./neoprism-checks.nix { }; 5 | tools = pkgs.callPackage ./tools-checks.nix { }; 6 | } 7 | // { 8 | inherit (self.packages.${pkgs.stdenv.hostPlatform.system}) 9 | docs-site 10 | neoprism-bin 11 | neoprism-docker 12 | neoprism-docker-latest 13 | ; 14 | } 15 | -------------------------------------------------------------------------------- /lib/did-prism/src/utils/paging.rs: -------------------------------------------------------------------------------- 1 | #[derive(Debug, Clone, PartialEq, Eq)] 2 | pub struct Paginated { 3 | pub items: Vec, 4 | pub current_page: u32, 5 | pub page_size: u32, 6 | pub total_items: u32, 7 | } 8 | 9 | impl Paginated { 10 | pub fn total_pages(&self) -> u32 { 11 | self.total_items.div_ceil(self.page_size) 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /docker/blockfrost-neoprism-demo/ryo.yaml: -------------------------------------------------------------------------------- 1 | server: 2 | listenAddress: "0.0.0.0" 3 | port: 3000 4 | debug: true 5 | dbSync: 6 | host: db-sync 7 | port: 5432 8 | user: postgres 9 | database: postgres 10 | maxConnections: 4 11 | network: mainnet 12 | tokenRegistryUrl: https://tokens.cardano.org 13 | mithril: 14 | enabled: false 15 | aggregator: https://aggregator.release-mainnet.api.mithril.network/aggregator 16 | -------------------------------------------------------------------------------- /docker/prism-test/ryo.yaml: -------------------------------------------------------------------------------- 1 | server: 2 | listenAddress: "0.0.0.0" 3 | port: 3000 4 | debug: true 5 | dbSync: 6 | host: db-sync 7 | port: 5432 8 | user: postgres 9 | database: postgres 10 | maxConnections: 4 11 | network: custom 12 | genesisDataFolder: /node/testnet 13 | tokenRegistryUrl: https://tokens.cardano.org 14 | mithril: 15 | enabled: false 16 | aggregator: https://aggregator.release-mainnet.api.mithril.network/aggregator 17 | -------------------------------------------------------------------------------- /lib/did-prism/build.rs: -------------------------------------------------------------------------------- 1 | use protobuf_codegen::Codegen; 2 | 3 | fn main() { 4 | Codegen::new() 5 | .include("proto") 6 | .inputs([ 7 | "proto/prism.proto", 8 | "proto/prism-ssi.proto", 9 | "proto/prism-storage.proto", 10 | "proto/prism-version.proto", 11 | "proto/node-api.proto", 12 | ]) 13 | .cargo_out_dir("generated") 14 | .run_from_script(); 15 | } 16 | -------------------------------------------------------------------------------- /.github/workflows/pr-lint.yml: -------------------------------------------------------------------------------- 1 | name: "Validate PR Title" 2 | 3 | on: 4 | pull_request_target: 5 | types: 6 | - opened 7 | - edited 8 | - synchronize 9 | 10 | permissions: 11 | pull-requests: read 12 | 13 | jobs: 14 | main: 15 | name: Validate PR Title 16 | runs-on: ubuntu-latest 17 | steps: 18 | - uses: amannn/action-semantic-pull-request@v5 19 | env: 20 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 21 | -------------------------------------------------------------------------------- /tests/prism-test/.gitignore: -------------------------------------------------------------------------------- 1 | # macOS 2 | .DS_Store 3 | 4 | # sbt specific 5 | dist/* 6 | target/ 7 | lib_managed/ 8 | src_managed/ 9 | project/boot/ 10 | project/plugins/project/ 11 | project/local-plugins.sbt 12 | .history 13 | .ensime 14 | .ensime_cache/ 15 | .sbt-scripted/ 16 | local.sbt 17 | 18 | # Bloop 19 | .bsp 20 | 21 | # VS Code 22 | .vscode/ 23 | 24 | # Metals 25 | .bloop/ 26 | .metals/ 27 | metals.sbt 28 | 29 | # IDEA 30 | .idea 31 | .idea_modules 32 | /.worksheet/ 33 | -------------------------------------------------------------------------------- /lib/did-prism-indexer/src/lib.rs: -------------------------------------------------------------------------------- 1 | #![feature(error_reporter)] 2 | 3 | use identus_did_prism::dlt::{DltCursor, PublishedPrismObject}; 4 | use tokio::sync::{mpsc, watch}; 5 | 6 | pub mod dlt; 7 | mod indexing; 8 | pub mod repo; 9 | 10 | pub use indexing::{run_indexer_loop, run_sync_loop}; 11 | 12 | pub trait DltSource { 13 | fn sync_cursor(&self) -> watch::Receiver>; 14 | fn into_stream(self) -> Result, String>; 15 | } 16 | -------------------------------------------------------------------------------- /docs/book.toml: -------------------------------------------------------------------------------- 1 | [book] 2 | title = "NeoPRISM Documentation" 3 | authors = [ ] 4 | 5 | [build] 6 | build-dir = "book" 7 | 8 | [preprocessor.cmdrun] 9 | renderers = [ "html", "markdown" ] 10 | 11 | [preprocessor.d2] 12 | command = "mdbook-d2" 13 | renderer = [ "html", "markdown" ] 14 | 15 | [output.html] 16 | 17 | [output.markdown] 18 | 19 | [output.linkcheck] 20 | follow-web-links = false # enable this when checking broken links 21 | exclude = [ 'localhost' ] 22 | -------------------------------------------------------------------------------- /tests/prism-test/src/test/scala/org/hyperledger/identus/prismtest/suite/StorageTestUtils.scala: -------------------------------------------------------------------------------- 1 | package org.hyperledger.identus.prismtest.suite 2 | 3 | import io.iohk.atala.prism.protos.node_api.DIDData 4 | import org.hyperledger.identus.prismtest.utils.TestUtils 5 | 6 | trait StorageTestUtils extends TestUtils: 7 | protected def extractStorageHex(didData: DIDData): Seq[String] = 8 | didData.storageData 9 | .flatMap(_.data.bytes) 10 | .map(_.toByteArray().toHexString) 11 | -------------------------------------------------------------------------------- /lib/apollo/src/jwk.rs: -------------------------------------------------------------------------------- 1 | use crate::base64::Base64UrlStrNoPad; 2 | 3 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] 4 | #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] 5 | #[cfg_attr(feature = "openapi", derive(utoipa::ToSchema))] 6 | pub struct Jwk { 7 | pub kty: String, 8 | pub crv: String, 9 | pub x: Option, 10 | pub y: Option, 11 | } 12 | 13 | pub trait EncodeJwk { 14 | fn encode_jwk(&self) -> Jwk; 15 | } 16 | -------------------------------------------------------------------------------- /lib/did-resolver-http/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "identus-did-resolver-http" 3 | version.workspace = true 4 | edition.workspace = true 5 | 6 | [dependencies] 7 | identus-did-core = { workspace = true } 8 | axum = { workspace = true } 9 | derive_more = { workspace = true, features = [ "from" ] } 10 | utoipa = { workspace = true, optional = true, features = [ "chrono" ] } 11 | 12 | [features] 13 | default = [ ] 14 | openapi = [ "dep:utoipa", "identus-did-core/openapi" ] 15 | -------------------------------------------------------------------------------- /tests/prism-test/.scalafmt.conf: -------------------------------------------------------------------------------- 1 | version = 3.10.2 2 | runner.dialect = scala3 3 | 4 | maxColumn = 120 5 | trailingCommas = preserve 6 | 7 | rewrite.rules = [Imports] 8 | rewrite.imports.expand = true 9 | rewrite.imports.sort = original 10 | rewrite.imports.groups = [ 11 | [".*"], 12 | ["java\\..*", "scala\\..*"] 13 | ] 14 | 15 | rewrite.scala3.convertToNewSyntax = true 16 | rewrite.scala3.removeOptionalBraces.enabled = true 17 | rewrite.sortModifiers.preset = styleGuide 18 | rewrite.trailingCommas.style = never 19 | -------------------------------------------------------------------------------- /tools/compose_gen/services/__init__.py: -------------------------------------------------------------------------------- 1 | from . import ( 2 | caddy, 3 | cardano_dbsync, 4 | cardano_node, 5 | cardano_submit_api, 6 | cardano_wallet, 7 | db, 8 | neoprism, 9 | prism_node, 10 | ryo, 11 | uni_resolver_web, 12 | ) 13 | 14 | __all__ = [ 15 | "caddy", 16 | "cardano_dbsync", 17 | "cardano_node", 18 | "cardano_submit_api", 19 | "cardano_wallet", 20 | "db", 21 | "neoprism", 22 | "prism_node", 23 | "ryo", 24 | "uni_resolver_web", 25 | ] 26 | -------------------------------------------------------------------------------- /docs/src/references/cli-options.md: -------------------------------------------------------------------------------- 1 | # CLI Options 2 | 3 | ## Commands 4 | 5 | ``` 6 | 7 | ``` 8 | 9 | ## Indexer options 10 | 11 | ``` 12 | 13 | ``` 14 | 15 | ## Submitter options 16 | 17 | ``` 18 | 19 | ``` 20 | 21 | ## Standalone options 22 | 23 | ``` 24 | 25 | ``` 26 | 27 | ## Generate OpenAPI options 28 | 29 | ``` 30 | 31 | ``` 32 | -------------------------------------------------------------------------------- /nix/devShells/docs.nix: -------------------------------------------------------------------------------- 1 | { self, pkgs }: 2 | 3 | let 4 | rootDir = "$ROOT_DIR"; 5 | in 6 | pkgs.mkShell { 7 | name = "docs-shell"; 8 | buildInputs = with pkgs; [ 9 | d2 10 | mdbook 11 | mdbook-cmdrun 12 | mdbook-d2 13 | mdbook-linkcheck 14 | yq-go 15 | self.packages.${stdenv.hostPlatform.system}.neoprism-bin 16 | ]; 17 | shellHook = '' 18 | export ROOT_DIR=$(${pkgs.git}/bin/git rev-parse --show-toplevel) 19 | ${pkgs.cowsay}/bin/cowsay "Working on project root directory: ${rootDir}" 20 | cd "${rootDir}/docs" 21 | ''; 22 | } 23 | -------------------------------------------------------------------------------- /tools/compose_gen/services/uni_resolver_web.py: -------------------------------------------------------------------------------- 1 | from pydantic import BaseModel 2 | 3 | from ..models import Service 4 | 5 | 6 | class Options(BaseModel): 7 | image: str = "universalresolver/uni-resolver-web:0.44.0-4922fcc" 8 | host_port: int 9 | prism_driver_url: str = "http://neoprism-indexer:8080/api" 10 | 11 | 12 | def mk_service(options: Options) -> Service: 13 | return Service( 14 | image=options.image, 15 | ports=[f"{options.host_port}:8080"], 16 | environment={"uniresolver_web_driver_url_did_prism": options.prism_driver_url}, 17 | ) 18 | -------------------------------------------------------------------------------- /lib/did-core/src/error.rs: -------------------------------------------------------------------------------- 1 | #[derive(Debug, derive_more::From, derive_more::Display, derive_more::Error)] 2 | #[display("{source}")] 3 | pub struct InvalidDid { 4 | source: identity_did::Error, 5 | } 6 | 7 | #[derive(Debug, derive_more::Display, derive_more::Error)] 8 | #[display("invalid uri: {msg}")] 9 | pub struct InvalidUri { 10 | pub msg: &'static str, 11 | } 12 | 13 | #[derive(Debug, derive_more::From, derive_more::Display, derive_more::Error)] 14 | pub enum Error { 15 | #[display("{_0}")] 16 | InvalidDid(InvalidDid), 17 | #[display("{_0}")] 18 | InvalidUri(InvalidUri), 19 | } 20 | -------------------------------------------------------------------------------- /docs/src/configuration/README.md: -------------------------------------------------------------------------------- 1 | # Configuration 2 | 3 | NeoPRISM nodes offer flexible configuration to suit various deployment scenarios. 4 | 5 | This section explains the key configuration options. 6 | For a complete list of available options, see [CLI Options](../references/cli-options.md). 7 | 8 | To start NeoPRISM node in each mode, use one of the following subcommands: 9 | 10 | ``` 11 | 12 | ``` 13 | 14 | The standalone mode combines both indexer and submitter functionalities. 15 | You can refer to the configuration options for each mode and apply them together when using standalone. 16 | 17 | -------------------------------------------------------------------------------- /nix/packages/docs-site.nix: -------------------------------------------------------------------------------- 1 | { 2 | stdenv, 3 | lib, 4 | version, 5 | d2, 6 | mdbook, 7 | mdbook-d2, 8 | mdbook-cmdrun, 9 | mdbook-linkcheck, 10 | yq-go, 11 | neoprism-bin, 12 | }: 13 | 14 | stdenv.mkDerivation { 15 | inherit version; 16 | pname = "docs-site"; 17 | 18 | src = lib.cleanSource ../../docs; 19 | 20 | buildInputs = [ 21 | d2 22 | mdbook 23 | mdbook-cmdrun 24 | mdbook-d2 25 | mdbook-linkcheck 26 | neoprism-bin 27 | yq-go 28 | ]; 29 | 30 | buildPhase = '' 31 | mdbook build 32 | ''; 33 | 34 | installPhase = '' 35 | mkdir -p $out 36 | cp -r book/* $out/ 37 | ''; 38 | } 39 | -------------------------------------------------------------------------------- /tools/just-recipes/tools.just: -------------------------------------------------------------------------------- 1 | set working-directory := '../..' 2 | 3 | # Format and lint-fix tools sources and justfile 4 | format: 5 | echo "Formatting Python files..." 6 | ruff check --select I --fix tools/compose_gen 7 | ruff format tools/compose_gen 8 | 9 | echo "Formatting justfiles..." 10 | just --fmt --unstable 11 | find . -name '*.just' -type f -exec sh -c 'echo " → {}" && just --fmt --unstable --justfile {}' \; 12 | 13 | # Type check and validate Python tools code 14 | check: 15 | #!/usr/bin/env bash 16 | SYSTEM=$(nix eval --impure --raw --expr 'builtins.currentSystem') 17 | nix build ".#checks.$SYSTEM.tools" 18 | -------------------------------------------------------------------------------- /lib/did-prism-ledger/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "identus-did-prism-ledger" 3 | version.workspace = true 4 | edition.workspace = true 5 | 6 | [dependencies] 7 | async-trait = { workspace = true } 8 | chrono = { workspace = true } 9 | tokio = { workspace = true, features = [ "sync", "time", "rt" ] } 10 | tracing = { workspace = true } 11 | 12 | identus-apollo = { workspace = true, features = [ "hash" ] } 13 | identus-did-prism = { workspace = true } 14 | identus-did-prism-indexer = { workspace = true } 15 | identus-did-prism-submitter = { workspace = true } 16 | 17 | [features] 18 | default = [ ] 19 | in-memory = [ ] 20 | -------------------------------------------------------------------------------- /lib/did-prism/src/error.rs: -------------------------------------------------------------------------------- 1 | use crate::utils::Location; 2 | 3 | #[derive(Debug, derive_more::Display, derive_more::Error)] 4 | pub enum InvalidInputSizeError { 5 | #[display("expected input size of {expected} but got {actual} for type {type_name} {location}")] 6 | NotExact { 7 | expected: usize, 8 | actual: usize, 9 | type_name: &'static str, 10 | location: Location, 11 | }, 12 | #[display("input size has a limit of {limit} but got {actual} for type {type_name} {location}")] 13 | TooBig { 14 | limit: usize, 15 | actual: usize, 16 | type_name: &'static str, 17 | location: Location, 18 | }, 19 | } 20 | -------------------------------------------------------------------------------- /.github/workflows/deploy-docs.yml: -------------------------------------------------------------------------------- 1 | name: Deploy Docs Site 2 | 3 | on: 4 | workflow_dispatch: 5 | 6 | jobs: 7 | build-and-deploy: 8 | runs-on: ubuntu-latest 9 | permissions: 10 | contents: write 11 | 12 | steps: 13 | - name: Checkout repository 14 | uses: actions/checkout@v4 15 | 16 | - name: Install Nix 17 | uses: cachix/install-nix-action@v31 18 | 19 | - name: Build docs site 20 | run: nix build .#docs-site -L 21 | 22 | - name: Deploy to GitHub Pages 23 | uses: peaceiris/actions-gh-pages@v4 24 | with: 25 | github_token: ${{ secrets.GITHUB_TOKEN }} 26 | publish_dir: ./result/html 27 | -------------------------------------------------------------------------------- /docs/src/installation/README.md: -------------------------------------------------------------------------------- 1 | # NeoPRISM Installation 2 | 3 | NeoPRISM can be installed and run using either Docker or Nix. Choose the method that best fits your environment. 4 | 5 | - [Docker installation](./docker.md): Run NeoPRISM using Docker containers. 6 | - [Nix installation](./nix.md): Build and run NeoPRISM using Nix flakes. 7 | 8 | > **Note:** During development and testing, NeoPRISM typically uses between 10–100 MB of memory. No special hardware requirements are expected. 9 | 10 | NeoPRISM also supports advanced deployment modes and Cardano data sources (such as DB Sync and testnet environments). For details on advanced configurations, see the relevant documentation pages. 11 | 12 | -------------------------------------------------------------------------------- /nix/devShells/cardano.nix: -------------------------------------------------------------------------------- 1 | { pkgs }: 2 | 3 | let 4 | rootDir = "$ROOT_DIR"; 5 | in 6 | pkgs.mkShell { 7 | packages = with pkgs; [ 8 | nix 9 | jq 10 | hurl 11 | cardano-node 12 | cardano-cli 13 | cardano-wallet 14 | cardano-testnet 15 | cardano-db-sync 16 | cardano-submit-api 17 | ]; 18 | 19 | shellHook = '' 20 | export ROOT_DIR=$(${pkgs.git}/bin/git rev-parse --show-toplevel) 21 | ${pkgs.cowsay}/bin/cowsay "Working on project root directory: ${rootDir}" 22 | cd "${rootDir}" 23 | ''; 24 | 25 | CARDANO_CLI = "${pkgs.cardano-cli}/bin/cardano-cli"; 26 | CARDANO_NODE = "${pkgs.cardano-node}/bin/cardano-node"; 27 | PRISM_HOME = "."; 28 | } 29 | -------------------------------------------------------------------------------- /nix/devShells/prism-test.nix: -------------------------------------------------------------------------------- 1 | { pkgs }: 2 | 3 | let 4 | rootDir = "$ROOT_DIR"; 5 | in 6 | pkgs.mkShell { 7 | packages = with pkgs; [ 8 | git 9 | jdk 10 | metals 11 | ncurses 12 | sbt 13 | ]; 14 | 15 | shellHook = '' 16 | export ROOT_DIR=$(${pkgs.git}/bin/git rev-parse --show-toplevel) 17 | ${pkgs.cowsay}/bin/cowsay "Working on project root directory: ${rootDir}" 18 | cd "${rootDir}/tests/prism-test" 19 | ''; 20 | 21 | JAVA_HOME = "${pkgs.jdk}/lib/openjdk"; 22 | SBT_OPTS = "-Xmx4G"; 23 | SSL_CERT_FILE = "${pkgs.cacert}/etc/ssl/certs/ca-bundle.crt"; 24 | 25 | LANG = "C.utf8"; 26 | LD_LIBRARY_PATH = "${pkgs.stdenv.cc.cc.lib}/lib/"; # required by scalapb 27 | } 28 | -------------------------------------------------------------------------------- /tools/compose_gen/services/caddy.py: -------------------------------------------------------------------------------- 1 | from pydantic import BaseModel 2 | 3 | from ..models import Service 4 | 5 | IMAGE = "caddy:2.10.2" 6 | 7 | 8 | class Options(BaseModel): 9 | image_override: str | None = None 10 | host_port: int | None = None 11 | target_port: int = 3000 12 | caddyfile: str = "./Caddyfile" 13 | 14 | 15 | def mk_service(options: Options) -> Service: 16 | image = options.image_override or IMAGE 17 | ports = ( 18 | [f"{options.host_port}:{options.target_port}"] if options.host_port else None 19 | ) 20 | 21 | return Service( 22 | image=image, 23 | ports=ports, 24 | volumes=[f"{options.caddyfile}:/etc/caddy/Caddyfile"], 25 | ) 26 | -------------------------------------------------------------------------------- /tools/compose_gen/services/db.py: -------------------------------------------------------------------------------- 1 | from pydantic import BaseModel 2 | 3 | from ..models import Healthcheck, Service 4 | 5 | IMAGE = "postgres:16" 6 | 7 | 8 | class Options(BaseModel): 9 | host_port: int | None = None 10 | 11 | 12 | def mk_service(options: Options) -> Service: 13 | ports = [f"{options.host_port}:5432"] if options.host_port else None 14 | 15 | return Service( 16 | image=IMAGE, 17 | ports=ports, 18 | environment={ 19 | "POSTGRES_DB": "postgres", 20 | "POSTGRES_PASSWORD": "postgres", 21 | "POSTGRES_USER": "postgres", 22 | }, 23 | healthcheck=Healthcheck(test=["CMD", "pg_isready", "-U", "postgres"]), 24 | ) 25 | -------------------------------------------------------------------------------- /lib/did-prism-submitter/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "identus-did-prism-submitter" 3 | version.workspace = true 4 | edition.workspace = true 5 | 6 | [dependencies] 7 | async-trait = { workspace = true } 8 | derive_more = { workspace = true, features = [ "from", "debug", "display" ] } 9 | serde = { workspace = true, features = [ "derive" ] } 10 | serde_json = { workspace = true } 11 | reqwest = { workspace = true, optional = true, features = [ "rustls-tls", "json" ] } 12 | 13 | identus-apollo = { workspace = true, features = [ "hash", "hex", "secp256k1", "ed25519", "x25519" ] } 14 | identus-did-prism = { workspace = true } 15 | 16 | [features] 17 | default = [ ] 18 | cardano-wallet = [ "dep:reqwest" ] 19 | -------------------------------------------------------------------------------- /tests/prism-test/src/test/scala/org/hyperledger/identus/prismtest/NodeName.scala: -------------------------------------------------------------------------------- 1 | package org.hyperledger.identus.prismtest 2 | 3 | import zio.* 4 | import zio.test.* 5 | 6 | case class NodeName(name: String) 7 | 8 | object NodeName: 9 | def layer(name: String): ULayer[NodeName] = ZLayer.succeed(NodeName(name)) 10 | 11 | /** Skip the test if node name matches */ 12 | def skipIf(name: String, names: String*): TestAspect[Nothing, NodeName, Nothing, Any] = 13 | new TestAspect[Nothing, NodeName, Nothing, Any]: 14 | def some[R <: NodeName, E](spec: Spec[R, E])(implicit trace: Trace): Spec[R, E] = 15 | spec.whenZIO[R, E]( 16 | ZIO.serviceWith[NodeName](nodeName => (name +: names).forall(_ != nodeName.name)) 17 | ) 18 | -------------------------------------------------------------------------------- /docs/src/SUMMARY.md: -------------------------------------------------------------------------------- 1 | # Summary 2 | 3 | # Usage Guide 4 | 5 | - [Welcome](./README.md) 6 | - [Useful links](./useful-links.md) 7 | - [Installation](./installation/README.md) 8 | - [Docker installation](./installation/docker.md) 9 | - [Nix installation](./installation/nix.md) 10 | - [Architecture](./architecture/README.md) 11 | - [Configuration](./configuration/README.md) 12 | - [Indexer](./configuration/indexer.md) 13 | - [Submitter](./configuration/submitter.md) 14 | - [Logging](./configuration/logging.md) 15 | - [Database](./configuration/database.md) 16 | - [PRISM specification tests](./prism-test/README.md) 17 | 18 | # Reference Guide 19 | 20 | - [CLI options](./references/cli-options.md) 21 | - [OpenAPI specification](./references/openapi.md) 22 | -------------------------------------------------------------------------------- /nix/packages/neoprism-ui-assets.nix: -------------------------------------------------------------------------------- 1 | { 2 | buildNpmPackage, 3 | stdenv, 4 | tailwindcss_4, 5 | }: 6 | 7 | let 8 | npmDeps = buildNpmPackage { 9 | name = "assets-nodemodules"; 10 | src = ./../..; 11 | npmDepsHash = "sha256-snC2EOnV3200x4fziwcj/1o9KoqSJkTFgJgAh9TWNpE="; 12 | dontNpmBuild = true; 13 | installPhase = '' 14 | cp -r ./node_modules $out 15 | ''; 16 | }; 17 | in 18 | stdenv.mkDerivation { 19 | name = "neoprism-ui-assets"; 20 | src = ./../..; 21 | buildInputs = [ tailwindcss_4 ]; 22 | installPhase = '' 23 | mkdir -p ./node_modules 24 | cp -r ${npmDeps}/* ./node_modules 25 | cd ./bin/neoprism-node 26 | mkdir -p $out/assets 27 | tailwindcss -i ./tailwind.css -o $out/assets/styles.css 28 | ''; 29 | } 30 | -------------------------------------------------------------------------------- /docker/prism-test/compose-sqlite-dev.yml: -------------------------------------------------------------------------------- 1 | # Code generated by Python script. DO NOT EDIT. 2 | services: 3 | neoprism-standalone: 4 | command: 5 | - dev 6 | environment: 7 | NPRISM_CARDANO_NETWORK: custom 8 | NPRISM_CONFIRMATION_BLOCKS: '0' 9 | NPRISM_DB_URL: 'sqlite::memory:' 10 | NPRISM_EXTERNAL_URL: http://localhost:18080 11 | NPRISM_INDEX_INTERVAL: '1' 12 | RUST_LOG: oura=warn,tracing::span=warn,info 13 | healthcheck: 14 | interval: 2s 15 | retries: 30 16 | test: 17 | - CMD 18 | - curl 19 | - -f 20 | - http://localhost:8080/api/_system/health 21 | timeout: 5s 22 | image: identus-neoprism:latest 23 | ports: 24 | - 18080:8080 25 | restart: always 26 | volumes: {} 27 | -------------------------------------------------------------------------------- /.scala-steward.conf: -------------------------------------------------------------------------------- 1 | buildRoots = [ "tests/prism-test" ] 2 | 3 | # If true, Scala Steward will sign off all commits (e.g. `git --signoff`). 4 | # Default: false 5 | signoffCommits = true 6 | 7 | # If set, Scala Steward will only create or update `n` PRs each time it runs (see `pullRequests.frequency` above). 8 | # Useful if running frequently and/or CI build are costly 9 | # Default: null 10 | updates.limit = 5 11 | 12 | # If set, Scala Steward will use this message template for the commit messages and PR titles. 13 | # Supported variables: ${artifactName}, ${currentVersion}, ${nextVersion} and ${default} 14 | # Default: "${default}" which is equivalent to "Update ${artifactName} to ${nextVersion}" 15 | commits.message = "chore: update ${artifactName} from ${currentVersion} to ${nextVersion}" 16 | -------------------------------------------------------------------------------- /docs/notes/sqlite.md: -------------------------------------------------------------------------------- 1 | # SQLite backend notes 2 | 3 | - **When to pick SQLite**: ideal for local development, CI smoke tests, and air-gapped demos. For production workloads or multi-writer setups stick with PostgreSQL. 4 | - **Limitations**: single-writer, WAL must remain enabled (we already enforce this in the pool setup), and long-running readers can block checkpointing. Monitor file size and consider VACUUM if needed. 5 | - **Default location**: `~/Library/Application Support/NeoPRISM//neoprism.db` on macOS, `$XDG_DATA_HOME/NeoPRISM//neoprism.db` on Linux, `%APPDATA%\NeoPRISM\\neoprism.db` on Windows. Override via `--db-url sqlite:///custom/path.db`. 6 | - **Backups**: use filesystem snapshots or `sqlite3 neoprism.db ".backup backup.db"`; copying the database file while the node is stopped is usually sufficient for local setups. 7 | -------------------------------------------------------------------------------- /nix/packages/neoprism-docker.nix: -------------------------------------------------------------------------------- 1 | { 2 | bash, 3 | curl, 4 | dockerTools, 5 | neoprism-bin, 6 | tagSuffix ? "", 7 | neoprism-ui-assets, 8 | version, 9 | extraPackages ? [ ], 10 | }: 11 | 12 | dockerTools.buildLayeredImage { 13 | name = "identus-neoprism"; 14 | tag = "${version}${tagSuffix}"; 15 | extraCommands = '' 16 | install -d -m 700 var/lib/neoprism/sqlite 17 | touch var/lib/neoprism/sqlite/.keep 18 | ''; 19 | contents = [ 20 | bash 21 | curl 22 | neoprism-bin 23 | neoprism-ui-assets 24 | ] 25 | ++ extraPackages; 26 | config = { 27 | Env = [ 28 | "RUST_LOG=info,oura=warn" 29 | "NPRISM_ASSETS_PATH=/assets" 30 | ]; 31 | Entrypoint = [ "/bin/neoprism-node" ]; 32 | Cmd = [ ]; 33 | WorkingDir = "/"; 34 | Volumes = { 35 | "/var/lib/neoprism/sqlite" = { }; 36 | }; 37 | }; 38 | } 39 | -------------------------------------------------------------------------------- /bin/neoprism-node/src/http/features/ui_resolver/mod.rs: -------------------------------------------------------------------------------- 1 | use axum::Router; 2 | use axum::extract::{Query, State}; 3 | use axum::routing::get; 4 | use maud::Markup; 5 | use models::DidQuery; 6 | 7 | use crate::IndexerUiState; 8 | use crate::http::urls; 9 | 10 | pub(in crate::http) mod models; 11 | mod views; 12 | 13 | pub fn router() -> Router { 14 | Router::new().route(urls::Resolver::AXUM_PATH, get(index)) 15 | } 16 | 17 | async fn index(Query(query): Query, State(state): State) -> Markup { 18 | let network = state.dlt_source.map(|i| i.network); 19 | match query.did.as_ref() { 20 | None => views::index(network), 21 | Some(did_str) => { 22 | let (state, debug) = state.prism_did_service.resolve_did(did_str).await; 23 | views::resolve(network, did_str, state, debug) 24 | } 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /tools/compose_gen/stacks/universal_resolver.py: -------------------------------------------------------------------------------- 1 | from ..models import ComposeConfig 2 | from ..services import db, neoprism, uni_resolver_web 3 | 4 | 5 | def mk_stack() -> ComposeConfig: 6 | services = { 7 | "db": db.mk_service(db.Options(host_port=5432)), 8 | "neoprism-indexer": neoprism.mk_service( 9 | neoprism.Options( 10 | host_port=8081, 11 | network="mainnet", 12 | command=neoprism.IndexerCommand( 13 | dlt_source=neoprism.OuraDltSource( 14 | address="backbone.mainnet.cardanofoundation.org:3001", 15 | ), 16 | ), 17 | ), 18 | ), 19 | "uni-resolver-web": uni_resolver_web.mk_service( 20 | uni_resolver_web.Options(host_port=8080) 21 | ), 22 | } 23 | 24 | return ComposeConfig(services=services) 25 | -------------------------------------------------------------------------------- /lib/did-prism/src/utils/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod paging; 2 | 3 | /// Check if the given slice contains unique items. 4 | /// 5 | /// # Examples 6 | /// ``` 7 | /// use identus_did_prism::utils::is_slice_unique; 8 | /// assert_eq!(is_slice_unique(&[1, 2, 3]), true); 9 | /// assert_eq!(is_slice_unique(&[1, 2, 2]), false); 10 | /// assert_eq!(is_slice_unique(&[1, 1, 1]), false); 11 | /// assert_eq!(is_slice_unique::(&[]), true); 12 | /// ``` 13 | pub fn is_slice_unique(items: &[T]) -> bool 14 | where 15 | T: Eq + Ord, 16 | { 17 | let mut set = std::collections::BTreeSet::new(); 18 | items.iter().all(|x| set.insert(x)) 19 | } 20 | /// Location of a particular point in the source code. 21 | /// Intended to use for debugging purposes. 22 | #[derive(Debug, Clone, derive_more::Display)] 23 | #[display("[at {}:{}]", file, line)] 24 | pub struct Location { 25 | pub file: &'static str, 26 | pub line: u32, 27 | } 28 | -------------------------------------------------------------------------------- /lib/did-core/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "identus-did-core" 3 | version.workspace = true 4 | edition.workspace = true 5 | 6 | [dependencies] 7 | identus-apollo = { workspace = true, features = [ "base64", "serde", "jwk" ] } 8 | async-trait = { workspace = true } 9 | chrono = { workspace = true, features = [ "serde" ] } 10 | derive_more = { workspace = true, features = [ "from", "display", "error" ] } 11 | serde = { workspace = true, features = [ "derive" ] } 12 | serde_json = { workspace = true } 13 | ts-rs = { workspace = true, optional = true } 14 | utoipa = { workspace = true, optional = true, features = [ "chrono" ] } 15 | regex = { workspace = true } 16 | uriparse = { workspace = true } 17 | 18 | identity_did = { version = "1.5" } 19 | 20 | [features] 21 | default = [ ] 22 | openapi = [ "dep:utoipa", "identus-apollo/openapi" ] 23 | ts-types = [ "dep:ts-rs" ] 24 | -------------------------------------------------------------------------------- /docker/prism-test/init-wallet.hurl: -------------------------------------------------------------------------------- 1 | # Wait until wallet is up 2 | GET {{ WALLET_BASE_URL }}/wallets 3 | [Options] 4 | delay: 5s 5 | retry: 10 6 | HTTP 200 7 | 8 | POST {{ WALLET_BASE_URL }}/wallets 9 | { 10 | "name": "Test wallet", 11 | "mnemonic_sentence": [ 12 | "mimic", 13 | "candy", 14 | "diamond", 15 | "virus", 16 | "hospital", 17 | "dragon", 18 | "culture", 19 | "price", 20 | "emotion", 21 | "tell", 22 | "update", 23 | "give", 24 | "faint", 25 | "resist", 26 | "faculty", 27 | "soup", 28 | "demand", 29 | "window", 30 | "dignity", 31 | "capital", 32 | "bullet", 33 | "purity", 34 | "practice", 35 | "fossil" 36 | ], 37 | "passphrase": "{{ WALLET_PASSPHRASE }}" 38 | } 39 | HTTP 201 40 | [Captures] 41 | wallet_id: jsonpath "$.id" 42 | 43 | GET {{ WALLET_BASE_URL }}/wallets/{{ wallet_id }}/addresses 44 | HTTP 200 45 | [Captures] 46 | address: jsonpath "$[0].id" 47 | -------------------------------------------------------------------------------- /lib/did-core/README.md: -------------------------------------------------------------------------------- 1 | # identus-did-core 2 | 3 | This crate provides canonical Rust definitions for DID Documents and related types. 4 | 5 | ## TypeScript Type Export 6 | 7 | To generate TypeScript type definitions for the canonical DID Document types, enable the `ts-types` feature and run tests: 8 | 9 | ```sh 10 | cargo test --features ts-types 11 | ``` 12 | 13 | This will generate TypeScript files in the `bindings/` directory for all exported types. 14 | 15 | ### How it works 16 | - The `ts-types` feature enables the optional `ts-rs` dependency and derives the necessary macros for TypeScript export. 17 | - The test module at the end of `src/did_doc.rs` triggers export for all canonical types. 18 | - Downstream crates do not pull in `ts-rs` unless they opt-in to the feature. 19 | 20 | ### Example 21 | See `src/did_doc.rs` for annotated type definitions and the export test. 22 | 23 | --- 24 | 25 | For more details, see the [ts-rs documentation](https://docs.rs/ts-rs/latest/ts_rs/). 26 | -------------------------------------------------------------------------------- /lib/node-storage/src/entity/mod.rs: -------------------------------------------------------------------------------- 1 | use identus_apollo::hex::HexStr; 2 | use identus_did_prism::did::CanonicalPrismDid; 3 | use lazybe::macros::Newtype; 4 | use serde::{Deserialize, Serialize}; 5 | 6 | mod indexer; 7 | 8 | pub use indexer::*; 9 | 10 | #[derive(Debug, Clone, Serialize, Deserialize, Newtype, derive_more::From)] 11 | pub struct DidSuffix(Vec); 12 | 13 | impl From for DidSuffix { 14 | fn from(value: CanonicalPrismDid) -> Self { 15 | value.suffix.to_vec().into() 16 | } 17 | } 18 | 19 | impl TryFrom for CanonicalPrismDid { 20 | type Error = crate::Error; 21 | 22 | fn try_from(value: DidSuffix) -> Result { 23 | let suffix = HexStr::from(value.0); 24 | let did = CanonicalPrismDid::from_suffix(suffix)?; 25 | Ok(did) 26 | } 27 | } 28 | 29 | impl DidSuffix { 30 | pub fn into_bytes(self) -> Vec { 31 | self.0 32 | } 33 | 34 | pub fn as_bytes(&self) -> &[u8] { 35 | &self.0 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /tools/pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.ruff] 2 | target-version = "py313" 3 | 4 | [tool.ruff.lint] 5 | # Enable common, opinionated rules 6 | select = [ 7 | "E", # pycodestyle errors 8 | "W", # pycodestyle warnings 9 | "F", # pyflakes 10 | "I", # isort (import sorting) 11 | "UP", # pyupgrade (modernize Python syntax) 12 | "B", # flake8-bugbear (common bugs) 13 | "C4", # flake8-comprehensions 14 | "SIM", # flake8-simplify 15 | ] 16 | 17 | [tool.pyright] 18 | # Strict type checking to catch type errors early and maintain code quality. 19 | # All compose_gen code must pass strict mode checks. 20 | typeCheckingMode = "strict" 21 | pythonVersion = "3.13" 22 | include = [ "compose_gen" ] 23 | 24 | # Core strict mode rules 25 | reportUnnecessaryIsInstance = true 26 | reportUnknownVariableType = true 27 | reportUnknownArgumentType = true 28 | reportMissingTypeArgument = true 29 | 30 | # Additional quality rules 31 | reportUnusedImport = "warning" 32 | reportUnusedVariable = "warning" 33 | reportIncompatibleMethodOverride = true 34 | reportPrivateUsage = "warning" 35 | -------------------------------------------------------------------------------- /tools/compose_gen/models.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Literal 2 | 3 | from pydantic import BaseModel 4 | 5 | 6 | class Healthcheck(BaseModel): 7 | test: list[str] 8 | interval: str = "2s" 9 | timeout: str = "5s" 10 | retries: int = 30 11 | 12 | 13 | ServiceCondition = Literal[ 14 | "service_started", "service_healthy", "service_completed_successfully" 15 | ] 16 | 17 | 18 | class ServiceDependency(BaseModel): 19 | condition: ServiceCondition 20 | 21 | 22 | class Service(BaseModel): 23 | image: str 24 | restart: str | None = "always" 25 | ports: list[str] | None = None 26 | command: list[str] | None = None 27 | entrypoint: list[str] | None = None 28 | environment: dict[str, str] | None = None 29 | volumes: list[str] | None = None 30 | depends_on: dict[str, ServiceDependency] | None = None 31 | healthcheck: Healthcheck | None = None 32 | 33 | model_config = {"extra": "forbid"} 34 | 35 | 36 | class ComposeConfig(BaseModel): 37 | services: dict[str, Service] 38 | volumes: dict[str, dict[str, Any]] | None = None 39 | 40 | model_config = {"extra": "forbid"} 41 | -------------------------------------------------------------------------------- /nix/rustTools.nix: -------------------------------------------------------------------------------- 1 | { rust-bin, rust-overlay }: 2 | 3 | let 4 | nightlyVersion = "2025-11-10"; 5 | rustOverrideArgs = { 6 | extensions = [ 7 | "rust-src" 8 | "rust-analyzer" 9 | ]; 10 | targets = [ ]; 11 | }; 12 | in 13 | rec { 14 | rust = mkRust { }; 15 | 16 | rustMinimal = mkRust { minimal = true; }; 17 | 18 | mkRust = 19 | { 20 | minimal ? false, 21 | }: 22 | if minimal then 23 | rust-bin.nightly.${nightlyVersion}.minimal 24 | else 25 | rust-bin.nightly.${nightlyVersion}.default.override rustOverrideArgs; 26 | 27 | mkRustCross = 28 | { 29 | pkgsCross, 30 | minimal ? false, 31 | }: 32 | let 33 | rust-bin = rust-overlay.lib.mkRustBin { } pkgsCross.buildPackages; 34 | in 35 | if minimal then 36 | rust-bin.nightly.${nightlyVersion}.minimal 37 | else 38 | rust-bin.nightly.${nightlyVersion}.default.override rustOverrideArgs; 39 | 40 | cargoLock = { 41 | lockFile = ../Cargo.lock; 42 | outputHashes = { 43 | "oura-1.9.4" = "sha256-SaSJOlxnM2+BDg9uE4GUxKync37DJQD+P4VVZA2NO3g="; 44 | }; 45 | }; 46 | } 47 | -------------------------------------------------------------------------------- /lib/apollo/src/crypto/mod.rs: -------------------------------------------------------------------------------- 1 | #[cfg(feature = "ed25519")] 2 | pub mod ed25519; 3 | #[cfg(feature = "secp256k1")] 4 | pub mod secp256k1; 5 | #[cfg(feature = "x25519")] 6 | pub mod x25519; 7 | 8 | #[derive(Debug, derive_more::From, derive_more::Display, derive_more::Error)] 9 | pub enum Error { 10 | #[display("expected {key_type} key size to be {expected}, got size {actual}")] 11 | InvalidKeySize { 12 | expected: usize, 13 | actual: usize, 14 | key_type: &'static str, 15 | }, 16 | #[cfg(feature = "ed25519")] 17 | #[from] 18 | #[display("unable to parse Ed25519 key")] 19 | Ed25519KeyParsing { source: ed25519_dalek::SignatureError }, 20 | #[cfg(feature = "secp256k1")] 21 | #[from] 22 | #[display("unable to parse secp256k1 key")] 23 | Secp256k1KeyParsing { source: ::k256::elliptic_curve::Error }, 24 | } 25 | 26 | pub trait EncodeVec { 27 | fn encode_vec(&self) -> Vec; 28 | } 29 | 30 | pub trait EncodeArray { 31 | fn encode_array(&self) -> [u8; N]; 32 | } 33 | 34 | pub trait Verifiable { 35 | fn verify(&self, message: &[u8], signature: &[u8]) -> bool; 36 | } 37 | -------------------------------------------------------------------------------- /lib/did-prism/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "identus-did-prism" 3 | version.workspace = true 4 | edition.workspace = true 5 | 6 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 7 | 8 | [dependencies] 9 | chrono = { workspace = true } 10 | derive_more = { workspace = true, features = [ "as_ref", "from", "into", "debug", "display", "error" ] } 11 | enum_dispatch = { workspace = true } 12 | im-rc = { workspace = true } 13 | protobuf = { workspace = true } 14 | regex = { workspace = true } 15 | serde = { workspace = true, features = [ "derive" ] } 16 | serde_json = { workspace = true } 17 | strum = { workspace = true, features = [ "derive" ] } 18 | tracing = { workspace = true } 19 | utoipa = { workspace = true, optional = true } 20 | 21 | identus-did-core = { workspace = true } 22 | identus-apollo = { workspace = true, features = [ "hash", "hex", "secp256k1", "ed25519", "x25519" ] } 23 | 24 | [build-dependencies] 25 | protobuf-codegen = { workspace = true } 26 | 27 | [features] 28 | default = [ ] 29 | openapi = [ "dep:utoipa" ] 30 | -------------------------------------------------------------------------------- /nix/packages/neoprism-bin.nix: -------------------------------------------------------------------------------- 1 | { 2 | lib, 3 | makeRustPlatform, 4 | rust, 5 | cargoLock, 6 | stdenv, 7 | buildPackages, 8 | }: 9 | 10 | let 11 | rustPlatform = makeRustPlatform { 12 | cargo = rust; 13 | rustc = rust; 14 | }; 15 | in 16 | rustPlatform.buildRustPackage { 17 | inherit cargoLock; 18 | name = "neoprism"; 19 | src = lib.cleanSourceWith { 20 | filter = 21 | path: _: 22 | let 23 | baseName = builtins.baseNameOf path; 24 | in 25 | !( 26 | baseName == "AGENTS.md" 27 | || baseName == "docker" 28 | || baseName == "docs" 29 | || baseName == ".github" 30 | || baseName == "nix" 31 | || baseName == "README.md" 32 | || baseName == "tests" 33 | || baseName == "tools" 34 | ); 35 | src = ./../..; 36 | }; 37 | nativeBuildInputs = 38 | with buildPackages; 39 | [ 40 | protobuf 41 | ] 42 | ++ lib.optionals stdenv.buildPlatform.isDarwin [ 43 | buildPackages.libiconv 44 | buildPackages.apple-sdk 45 | ]; 46 | doCheck = false; 47 | PROTOC = "${buildPackages.protobuf}/bin/protoc"; 48 | } 49 | -------------------------------------------------------------------------------- /lib/apollo/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "identus-apollo" 3 | version.workspace = true 4 | edition.workspace = true 5 | 6 | [dependencies] 7 | derive_more = { workspace = true, features = [ "as_ref", "from", "into", "debug", "display", "error" ] } 8 | serde = { workspace = true, optional = true, features = [ "derive" ] } 9 | utoipa = { workspace = true, optional = true } 10 | 11 | # crypto 12 | ring = { version = "0.17", optional = true } 13 | base64 = { version = "0.22", optional = true } 14 | hex = { version = "0.4", optional = true } 15 | ed25519-dalek = { version = "2", optional = true } 16 | k256 = { version = "0.13", optional = true, features = [ "arithmetic", "ecdsa" ] } 17 | x25519-dalek = { version = "2", optional = true } 18 | 19 | [features] 20 | default = [ ] 21 | base64 = [ "dep:base64" ] 22 | ed25519 = [ "jwk", "dep:ed25519-dalek" ] 23 | hash = [ "hex", "dep:ring" ] 24 | hex = [ "dep:hex" ] 25 | jwk = [ "base64" ] 26 | openapi = [ "serde", "dep:utoipa" ] 27 | secp256k1 = [ "jwk", "dep:k256" ] 28 | serde = [ "dep:serde" ] 29 | x25519 = [ "jwk", "dep:x25519-dalek" ] 30 | -------------------------------------------------------------------------------- /.github/workflows/checks.yml: -------------------------------------------------------------------------------- 1 | name: Checks 2 | 3 | concurrency: 4 | group: ${{ github.head_ref }}${{ github.ref }}-checks 5 | cancel-in-progress: true 6 | 7 | on: 8 | pull_request: 9 | branches: 10 | - main 11 | push: 12 | branches: 13 | - main 14 | 15 | jobs: 16 | neoprism-check: 17 | runs-on: ubuntu-latest 18 | 19 | steps: 20 | - name: Checkout repository 21 | uses: actions/checkout@v4 22 | 23 | - name: Install Nix 24 | uses: cachix/install-nix-action@v31 25 | with: 26 | extra_nix_config: | 27 | experimental-features = nix-command flakes 28 | 29 | - name: Run neoprism check 30 | run: nix build .#checks.x86_64-linux.default 31 | 32 | tools-check: 33 | runs-on: ubuntu-latest 34 | 35 | steps: 36 | - name: Checkout repository 37 | uses: actions/checkout@v4 38 | 39 | - name: Install Nix 40 | uses: cachix/install-nix-action@v31 41 | with: 42 | extra_nix_config: | 43 | experimental-features = nix-command flakes 44 | 45 | - name: Run tools check 46 | run: nix build .#checks.x86_64-linux.tools 47 | -------------------------------------------------------------------------------- /bindings/ts-types/did_core_types.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | 3 | export type Did = string; 4 | 5 | export type DidDocument = { "@context": Array, id: Did, alsoKnownAs: Array | null, verificationMethod: Array, authentication: Array | null, assertionMethod: Array | null, keyAgreement: Array | null, capabilityInvocation: Array | null, capabilityDelegation: Array | null, service: Array | null, }; 6 | 7 | export type Service = { id: string, type: ServiceType, serviceEndpoint: ServiceEndpoint, }; 8 | 9 | export type ServiceEndpoint = StringOrMap | Array; 10 | 11 | export type ServiceType = string | Array; 12 | 13 | export type StringOrMap = string | Record; 14 | 15 | export type Uri = string; 16 | 17 | export type VerificationMethod = { id: string, type: string, controller: string, publicKeyJwk: Record | null, }; 18 | 19 | export type VerificationMethodOrRef = VerificationMethod | string; 20 | -------------------------------------------------------------------------------- /bin/neoprism-node/src/http/urls.rs: -------------------------------------------------------------------------------- 1 | use lazybe::macros::typed_uri; 2 | 3 | use crate::http::ui_explorer::models::PageQuery; 4 | use crate::http::ui_resolver::models::DidQuery; 5 | 6 | // assets 7 | typed_uri!(AssetBase, "assets"); 8 | typed_uri!(AssetStyleSheet, "assets" / "styles.css"); 9 | 10 | // misc 11 | typed_uri!(Home, ""); 12 | typed_uri!(Swagger, "swagger-ui"); 13 | 14 | // UI resolver 15 | typed_uri!(Resolver, "resolver" ? Option); 16 | 17 | // UI explorer 18 | typed_uri!(Explorer, "explorer" ? Option); 19 | typed_uri!(ExplorerDltCursor, "explorer" / "dlt-cursor"); 20 | typed_uri!(ExplorerDidList, "explorer" / "did-list" ? Option); 21 | 22 | // API system 23 | typed_uri!(ApiHealth, "api" / "_system" / "health"); 24 | typed_uri!(ApiAppMeta, "api" / "_system" / "metadata"); 25 | 26 | // API submitter 27 | typed_uri!(ApiSignedOpSubmissions, "api" / "signed-operation-submissions"); 28 | 29 | // API indexer 30 | typed_uri!(ApiDid, "api" / "dids" / (did: String)); 31 | typed_uri!(ApiDidData, "api" / "did-data" / (did: String)); 32 | typed_uri!(ApiIndexerStats, "api" / "indexer-stats"); 33 | typed_uri!(ApiVdrBlob, "api" / "vdr-data" / (entry_hash: String)); 34 | -------------------------------------------------------------------------------- /tools/compose_gen/services/cardano_submit_api.py: -------------------------------------------------------------------------------- 1 | from pydantic import BaseModel 2 | 3 | from ..models import Service, ServiceDependency 4 | 5 | IMAGE = "ghcr.io/intersectmbo/cardano-submit-api:10.5.1" 6 | 7 | 8 | class Options(BaseModel): 9 | host_port: int | None = None 10 | testnet_volume: str 11 | cardano_node_host: str 12 | network_magic: int 13 | 14 | 15 | def mk_service(options: Options) -> Service: 16 | ports = [f"{options.host_port}:8090"] if options.host_port else None 17 | 18 | return Service( 19 | image=IMAGE, 20 | ports=ports, 21 | command=[ 22 | "--config", 23 | "/config.yaml", 24 | "--socket-path", 25 | "/node/testnet/socket/node1/sock", 26 | "--testnet-magic", 27 | str(options.network_magic), 28 | "--listen-address", 29 | "0.0.0.0", 30 | ], 31 | volumes=[ 32 | f"{options.testnet_volume}:/node/testnet", 33 | "./cardano-submit-api.yaml:/config.yaml", 34 | ], 35 | depends_on={ 36 | options.cardano_node_host: ServiceDependency(condition="service_healthy") 37 | }, 38 | ) 39 | -------------------------------------------------------------------------------- /nix/checks/tools-checks.nix: -------------------------------------------------------------------------------- 1 | { 2 | lib, 3 | stdenv, 4 | pythonTools, 5 | ruff, 6 | pyright, 7 | just, 8 | }: 9 | 10 | let 11 | inherit (pythonTools) pythonEnv; 12 | in 13 | stdenv.mkDerivation { 14 | name = "tools-checks"; 15 | src = lib.cleanSourceWith { 16 | filter = 17 | path: _: 18 | let 19 | baseName = builtins.baseNameOf path; 20 | relativePath = lib.removePrefix (toString ./../..) (toString path); 21 | in 22 | baseName == "justfile" || lib.hasPrefix "/tools" relativePath; 23 | src = ./../..; 24 | }; 25 | 26 | nativeBuildInputs = [ 27 | pythonEnv 28 | ruff 29 | pyright 30 | just 31 | ]; 32 | 33 | buildPhase = "true"; 34 | 35 | doCheck = true; 36 | 37 | checkPhase = '' 38 | echo "Checking justfile formatting..." 39 | just --unstable --check --fmt 40 | find . -name '*.just' -type f -print0 | xargs -0 -I {} sh -c 'echo " → {}" && just --unstable --check --fmt --justfile {}' 41 | 42 | cd tools 43 | 44 | echo "Linting Python files..." 45 | ruff check compose_gen 46 | 47 | echo "Type checking Python files..." 48 | pyright compose_gen 49 | ''; 50 | 51 | installPhase = "touch $out"; 52 | } 53 | -------------------------------------------------------------------------------- /docker/preprod-relay/compose.yml: -------------------------------------------------------------------------------- 1 | # Code generated by Python script. DO NOT EDIT. 2 | services: 3 | db: 4 | environment: 5 | POSTGRES_DB: postgres 6 | POSTGRES_PASSWORD: postgres 7 | POSTGRES_USER: postgres 8 | healthcheck: 9 | interval: 2s 10 | retries: 30 11 | test: 12 | - CMD 13 | - pg_isready 14 | - -U 15 | - postgres 16 | timeout: 5s 17 | image: postgres:16 18 | ports: 19 | - 5432:5432 20 | restart: always 21 | neoprism-indexer: 22 | command: 23 | - indexer 24 | depends_on: 25 | db: 26 | condition: service_healthy 27 | environment: 28 | NPRISM_CARDANO_NETWORK: preprod 29 | NPRISM_CARDANO_RELAY_ADDR: preprod-node.play.dev.cardano.org:3001 30 | NPRISM_DB_URL: postgres://postgres:postgres@db:5432/postgres 31 | RUST_LOG: oura=warn,tracing::span=warn,info 32 | healthcheck: 33 | interval: 2s 34 | retries: 30 35 | test: 36 | - CMD 37 | - curl 38 | - -f 39 | - http://localhost:8080/api/_system/health 40 | timeout: 5s 41 | image: hyperledgeridentus/identus-neoprism:0.8.0 42 | ports: 43 | - 8080:8080 44 | restart: always 45 | -------------------------------------------------------------------------------- /tools/compose_gen/services/cardano_wallet.py: -------------------------------------------------------------------------------- 1 | from pydantic import BaseModel 2 | 3 | from ..models import Healthcheck, Service, ServiceDependency 4 | 5 | IMAGE = "cardanofoundation/cardano-wallet:2025.3.31" 6 | 7 | 8 | class Options(BaseModel): 9 | host_port: int | None = None 10 | testnet_volume: str 11 | cardano_node_host: str 12 | 13 | 14 | def mk_service(options: Options) -> Service: 15 | ports = [f"{options.host_port}:8090"] if options.host_port else None 16 | 17 | return Service( 18 | image=IMAGE, 19 | entrypoint=[], 20 | command=[ 21 | "bash", 22 | "-c", 23 | """cardano-wallet serve \\ 24 | --database /wallet/db \\ 25 | --node-socket /node/testnet/socket/node1/sock \\ 26 | --testnet /node/testnet/byron-genesis.json \\ 27 | --listen-address 0.0.0.0 28 | """, 29 | ], 30 | ports=ports, 31 | volumes=[f"{options.testnet_volume}:/node/testnet"], 32 | healthcheck=Healthcheck( 33 | test=["CMD-SHELL", "cardano-wallet network information"] 34 | ), 35 | depends_on={ 36 | options.cardano_node_host: ServiceDependency(condition="service_healthy") 37 | }, 38 | ) 39 | -------------------------------------------------------------------------------- /docker/mainnet-relay/compose.yml: -------------------------------------------------------------------------------- 1 | # Code generated by Python script. DO NOT EDIT. 2 | services: 3 | db: 4 | environment: 5 | POSTGRES_DB: postgres 6 | POSTGRES_PASSWORD: postgres 7 | POSTGRES_USER: postgres 8 | healthcheck: 9 | interval: 2s 10 | retries: 30 11 | test: 12 | - CMD 13 | - pg_isready 14 | - -U 15 | - postgres 16 | timeout: 5s 17 | image: postgres:16 18 | ports: 19 | - 5432:5432 20 | restart: always 21 | neoprism-indexer: 22 | command: 23 | - indexer 24 | depends_on: 25 | db: 26 | condition: service_healthy 27 | environment: 28 | NPRISM_CARDANO_NETWORK: mainnet 29 | NPRISM_CARDANO_RELAY_ADDR: backbone.mainnet.cardanofoundation.org:3001 30 | NPRISM_DB_URL: postgres://postgres:postgres@db:5432/postgres 31 | RUST_LOG: oura=warn,tracing::span=warn,info 32 | healthcheck: 33 | interval: 2s 34 | retries: 30 35 | test: 36 | - CMD 37 | - curl 38 | - -f 39 | - http://localhost:8080/api/_system/health 40 | timeout: 5s 41 | image: hyperledgeridentus/identus-neoprism:0.8.0 42 | ports: 43 | - 8080:8080 44 | restart: always 45 | -------------------------------------------------------------------------------- /docs/src/useful-links.md: -------------------------------------------------------------------------------- 1 | # Useful links 2 | 3 | ## NeoPRISM & Support 4 | 5 | - [NeoPRISM GitHub](https://github.com/hyperledger-identus/neoprism) 6 | - [Releases](https://github.com/hyperledger-identus/neoprism/releases) 7 | - [Issue Tracker](https://github.com/hyperledger-identus/neoprism/issues) 8 | - [DockerHub](https://hub.docker.com/r/hyperledgeridentus/identus-neoprism) 9 | - [Discord Community](https://discord.com/channels/905194001349627914/1230596020790886490) 10 | 11 | ## Publicly Available Instances 12 | 13 | - [Mainnet Instance](https://neoprism.patlo.dev) 14 | - [Preprod Instance](https://neoprism-preprod.patlo.dev) 15 | 16 | > **Note:** The public NeoPRISM instances below are personal deployments by the project author, intended for quick evaluation and testing. Please do not use them for production purposes. 17 | 18 | ## External Resources 19 | 20 | - [PRISM DID Method Specification](https://github.com/input-output-hk/prism-did-method-spec) 21 | - [Oura](https://github.com/txpipe/oura) 22 | - [Universal Resolver GitHub](https://github.com/decentralized-identity/universal-resolver) 23 | - [Cardano DB Sync](https://github.com/input-output-hk/cardano-db-sync) 24 | - [Cardano Wallet](https://github.com/input-output-hk/cardano-wallet) 25 | -------------------------------------------------------------------------------- /docker/mainnet-dbsync/compose.yml: -------------------------------------------------------------------------------- 1 | # Code generated by Python script. DO NOT EDIT. 2 | services: 3 | db: 4 | environment: 5 | POSTGRES_DB: postgres 6 | POSTGRES_PASSWORD: postgres 7 | POSTGRES_USER: postgres 8 | healthcheck: 9 | interval: 2s 10 | retries: 30 11 | test: 12 | - CMD 13 | - pg_isready 14 | - -U 15 | - postgres 16 | timeout: 5s 17 | image: postgres:16 18 | ports: 19 | - 5432:5432 20 | restart: always 21 | neoprism-indexer: 22 | command: 23 | - indexer 24 | depends_on: 25 | db: 26 | condition: service_healthy 27 | environment: 28 | NPRISM_CARDANO_DBSYNC_POLL_INTERVAL: '10' 29 | NPRISM_CARDANO_DBSYNC_URL: ${DBSYNC_URL} 30 | NPRISM_CARDANO_NETWORK: mainnet 31 | NPRISM_DB_URL: postgres://postgres:postgres@db:5432/postgres 32 | RUST_LOG: oura=warn,tracing::span=warn,info 33 | healthcheck: 34 | interval: 2s 35 | retries: 30 36 | test: 37 | - CMD 38 | - curl 39 | - -f 40 | - http://localhost:8080/api/_system/health 41 | timeout: 5s 42 | image: hyperledgeridentus/identus-neoprism:0.8.0 43 | ports: 44 | - 8080:8080 45 | restart: always 46 | -------------------------------------------------------------------------------- /lib/did-prism/proto/prism-version.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package proto; 4 | 5 | // Specifies the protocol version update 6 | message ProtoProtocolVersionUpdate { 7 | string proposer_did = 1; // The DID suffix that proposes the protocol update. 8 | ProtocolVersionInfo version = 2; // Information of the new version 9 | } 10 | 11 | message ProtocolVersion { 12 | // Represent the major version 13 | int32 major_version = 1; 14 | // Represent the minor version 15 | int32 minor_version = 2; 16 | } 17 | 18 | message ProtocolVersionInfo { 19 | reserved 2, 3; 20 | string version_name = 1; // (optional) name of the version 21 | int32 effective_since = 4; // Cardano block number that tells since which block the update is enforced 22 | 23 | // New major and minor version to be announced, 24 | // If major value changes, the node MUST stop issuing and reading events/operations, and upgrade before `effective_since` because the new protocol version. 25 | // If minor value changes, the node can opt to not update. All events _published_ by this node would be also 26 | // understood by other nodes with the same major version. However, there may be new events that this node won't _read_ 27 | ProtocolVersion protocol_version = 5; 28 | } 29 | -------------------------------------------------------------------------------- /docs/src/installation/docker.md: -------------------------------------------------------------------------------- 1 | # Docker Installation 2 | 3 | This guide will help you install and run a NeoPRISM node using Docker. 4 | 5 | ## Prerequisites 6 | 7 | - **Docker**: [Install Docker](https://docs.docker.com/get-docker/) 8 | - **Git**: [Install Git](https://git-scm.com/downloads) 9 | 10 | ## Steps (Mainnet Relay Example) 11 | 12 | 1. **Clone the NeoPRISM repository:** 13 | ```bash 14 | git clone https://github.com/hyperledger-identus/neoprism.git 15 | cd neoprism/docker/mainnet-relay 16 | ``` 17 | 18 | 2. **Start NeoPRISM and PostgreSQL using Docker Compose:** 19 | ```bash 20 | docker-compose up 21 | ``` 22 | 23 | 3. **Access the Web UI:** 24 | - Open [http://localhost:8080](http://localhost:8080) in your browser. 25 | 26 | 4. **Resolve a DID using the API:** 27 | ```bash 28 | curl http://localhost:8080/api/dids/ 29 | ``` 30 | 31 | --- 32 | 33 | > **Note:** The example above demonstrates one way to run NeoPRISM using Docker. You can find additional deployment examples and configurations in the `docker` directory of the repository. 34 | > 35 | > If you are deploying NeoPRISM in a production environment, please take extra care to harden your setup according to your organization's security requirements. 36 | -------------------------------------------------------------------------------- /docs/src/configuration/submitter.md: -------------------------------------------------------------------------------- 1 | # Submitter Configuration 2 | 3 | The **Submitter node** publishes PRISM DID operations to the Cardano blockchain. 4 | It is typically used for creating, updating, or deactivating DIDs. 5 | 6 | ## DLT Sink 7 | 8 | The Submitter node currently supports Cardano wallet integration as its DLT sink: 9 | 10 | - **Cardano Wallet:** 11 | Uses a Cardano wallet to sign and submit transactions containing DID operations. 12 | - Key options: 13 | - Wallet base URL: `--wallet-base-url` or `NPRISM_WALLET_BASE_URL` 14 | - Wallet ID: `--wallet-id` or `NPRISM_WALLET_ID` 15 | - Passphrase: `--wallet-passphrase` or `NPRISM_WALLET_PASSPHRASE` 16 | - Payment address: `--payment-address` or `NPRISM_PAYMENT_ADDRESS` 17 | 18 | > **Important:** 19 | When the submitter publishes a DID operation, it creates a transaction from the configured wallet to the specified payment address. 20 | Make sure you use your own payment address. Using an incorrect or third-party address may result in permanent loss of funds. 21 | 22 | Configure the wallet integration to match your operational and security requirements. 23 | 24 | --- 25 | 26 | **Next Steps:** 27 | - [CLI Options](../references/cli-options.md): Full list of flags and environment variables. 28 | -------------------------------------------------------------------------------- /lib/did-prism-indexer/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "identus-did-prism-indexer" 3 | version.workspace = true 4 | edition.workspace = true 5 | 6 | [dependencies] 7 | anyhow = { workspace = true } 8 | async-trait = { workspace = true } 9 | chrono = { workspace = true } 10 | derive_more = { workspace = true, features = [ "as_ref", "from", "into", "debug", "display", "error" ] } 11 | oura = { workspace = true, optional = true } 12 | pallas-primitives = { workspace = true, optional = true } 13 | protobuf = { workspace = true } 14 | serde = { workspace = true, features = [ "derive" ] } 15 | serde_json = { workspace = true, optional = true } 16 | sqlx = { workspace = true, optional = true, features = [ "postgres", "chrono" ] } 17 | tokio = { workspace = true, features = [ "sync", "time", "rt" ] } 18 | tracing = { workspace = true } 19 | uuid = { workspace = true } 20 | 21 | identus-apollo = { workspace = true, features = [ "hash", "hex", "secp256k1", "ed25519", "x25519" ] } 22 | identus-did-prism = { workspace = true } 23 | 24 | [features] 25 | default = [ ] 26 | oura = [ "dep:oura", "dep:pallas-primitives" ] 27 | dbsync = [ "dep:sqlx", "dep:serde_json" ] 28 | -------------------------------------------------------------------------------- /lib/node-storage/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "node-storage" 3 | version.workspace = true 4 | edition.workspace = true 5 | 6 | [features] 7 | default = [ ] 8 | sqlite-storage = [ 9 | "lazybe/sqlite", 10 | "sea-query/backend-sqlite", 11 | "sqlx/sqlite", 12 | ] 13 | 14 | [dependencies] 15 | async-trait = { workspace = true } 16 | chrono = { workspace = true, features = [ "serde" ] } 17 | derive_more = { workspace = true, features = [ "from", "display", "error" ] } 18 | identus-apollo = { workspace = true, features = [ "hex" ] } 19 | lazybe = { workspace = true, features = [ "postgres" ] } 20 | protobuf = { workspace = true } 21 | sea-query = { workspace = true, features = [ "backend-postgres", "with-uuid", "with-chrono" ] } 22 | serde = { workspace = true } 23 | sqlx = { workspace = true, features = [ "postgres", "uuid", "chrono" ] } 24 | tracing = { workspace = true } 25 | uuid = { workspace = true, features = [ "serde" ] } 26 | 27 | identus-did-prism = { workspace = true } 28 | identus-did-prism-indexer = { workspace = true } 29 | identus-did-prism-submitter = { workspace = true } 30 | 31 | [dev-dependencies] 32 | tempfile = { workspace = true } 33 | tokio = { workspace = true, features = [ "macros", "rt-multi-thread" ] } 34 | -------------------------------------------------------------------------------- /tools/compose_gen/services/cardano_dbsync.py: -------------------------------------------------------------------------------- 1 | from pydantic import BaseModel 2 | 3 | from ..models import Service, ServiceDependency 4 | 5 | IMAGE = "ghcr.io/intersectmbo/cardano-db-sync:13.6.0.5" 6 | 7 | 8 | class Options(BaseModel): 9 | testnet_volume: str 10 | cardano_node_host: str 11 | config_file: str 12 | db_host: str 13 | 14 | 15 | def mk_service(options: Options) -> Service: 16 | return Service( 17 | image=IMAGE, 18 | environment={ 19 | "POSTGRES_HOST": options.db_host, 20 | "POSTGRES_DB": "postgres", 21 | "POSTGRES_PORT": "5432", 22 | "POSTGRES_USER": "postgres", 23 | "POSTGRES_PASSWORD": "postgres", 24 | }, 25 | command=[ 26 | "--config", 27 | "/config/dbsync-config.yaml", 28 | "--socket-path", 29 | "/node/testnet/socket/node1/sock", 30 | "--force-indexes", 31 | ], 32 | volumes=[ 33 | f"{options.testnet_volume}:/node/testnet", 34 | f"{options.config_file}:/config/dbsync-config.yaml", 35 | ], 36 | depends_on={ 37 | options.cardano_node_host: ServiceDependency(condition="service_healthy"), 38 | options.db_host: ServiceDependency(condition="service_healthy"), 39 | }, 40 | ) 41 | -------------------------------------------------------------------------------- /docs/src/configuration/logging.md: -------------------------------------------------------------------------------- 1 | # Logging 2 | 3 | NeoPRISM uses structured logging to help you diagnose issues and monitor node activity. 4 | Logging is powered by the [`tracing`](https://docs.rs/tracing/latest/tracing/) crate, and log verbosity is controlled via the standard `RUST_LOG` environment variable. 5 | By default, NeoPRISM outputs all logs to stdout. 6 | 7 | ## Configuring Logging 8 | 9 | To set the log level, set the `RUST_LOG` environment variable before starting NeoPRISM. For example: 10 | 11 | ```bash 12 | RUST_LOG=info 13 | ``` 14 | 15 | Supported log levels (in increasing verbosity) are: `error`, `warn`, `info`, `debug`, and `trace`. 16 | 17 | You can also filter logs by module. For example, to see only HTTP-related logs at debug level: 18 | 19 | ```bash 20 | RUST_LOG=neoprism_node::http=debug 21 | ``` 22 | 23 | Multiple filters can be combined: 24 | 25 | ```bash 26 | RUST_LOG=info,oura=warn,neoprism_node::http=trace,tower_http::trace=debug 27 | ``` 28 | 29 | ## About `RUST_LOG` 30 | 31 | NeoPRISM uses the standard [`tracing`](https://docs.rs/tracing/latest/tracing/) environment variables to control log verbosity and filtering, including `RUST_LOG`. For more details on how `RUST_LOG` works and advanced usage, see the [tracing-subscriber EnvFilter documentation](https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html). 32 | -------------------------------------------------------------------------------- /lib/did-prism/src/lib.rs: -------------------------------------------------------------------------------- 1 | pub mod did; 2 | pub mod dlt; 3 | pub mod error; 4 | mod macros; 5 | pub mod prelude; 6 | pub mod protocol; 7 | pub mod utils; 8 | 9 | #[allow(clippy::doc_lazy_continuation)] 10 | pub mod proto { 11 | use identus_apollo::hash::{Sha256Digest, sha256}; 12 | use protobuf::Message; 13 | 14 | use crate::proto::prism::{PrismOperation, SignedPrismOperation}; 15 | 16 | include!(concat!(env!("OUT_DIR"), "/generated/mod.rs")); 17 | 18 | pub trait MessageExt: Sized { 19 | fn encode_to_vec(&self) -> Vec; 20 | fn decode(bytes: &[u8]) -> protobuf::Result; 21 | } 22 | 23 | impl MessageExt for T { 24 | fn encode_to_vec(&self) -> Vec { 25 | self.write_to_bytes().expect("Unable to encode protobuf message to vec") 26 | } 27 | 28 | fn decode(bytes: &[u8]) -> protobuf::Result { 29 | Self::parse_from_bytes(bytes) 30 | } 31 | } 32 | 33 | impl PrismOperation { 34 | pub fn operation_hash(&self) -> Sha256Digest { 35 | let bytes = self.encode_to_vec(); 36 | sha256(bytes) 37 | } 38 | } 39 | 40 | impl SignedPrismOperation { 41 | pub fn operation_hash(&self) -> Option { 42 | self.operation.as_ref().map(|op| op.operation_hash()) 43 | } 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /nix/devShells/development.nix: -------------------------------------------------------------------------------- 1 | { pkgs }: 2 | 3 | let 4 | rootDir = "$ROOT_DIR"; 5 | inherit (pkgs.rustTools) rust; 6 | inherit (pkgs.pythonTools) pythonEnv; 7 | in 8 | pkgs.mkShell { 9 | packages = with pkgs; [ 10 | # base 11 | cowsay 12 | docker 13 | docker-compose 14 | git 15 | git-cliff 16 | hurl 17 | just 18 | less 19 | ncurses 20 | nix 21 | nixfmt-rfc-style 22 | pkg-config 23 | protobuf 24 | taplo 25 | which 26 | # python 27 | pythonEnv 28 | pyright 29 | ruff 30 | # db 31 | sqlfluff 32 | sqlite 33 | # rust 34 | cargo-edit 35 | cargo-expand 36 | cargo-license 37 | cargo-udeps 38 | rust 39 | # js 40 | nodejs_20 41 | tailwindcss_4 42 | typescript-language-server 43 | esbuild 44 | # scala 45 | jdk 46 | metals 47 | sbt 48 | ]; 49 | 50 | shellHook = '' 51 | export ROOT_DIR=$(${pkgs.git}/bin/git rev-parse --show-toplevel) 52 | ${pkgs.cowsay}/bin/cowsay "Working on project root directory: ${rootDir}" 53 | cd "${rootDir}" 54 | ''; 55 | 56 | # envs 57 | RUST_LOG = "info,oura=warn,tower_http::trace=debug"; 58 | 59 | JAVA_HOME = "${pkgs.jdk}/lib/openjdk"; 60 | SBT_OPTS = "-Xmx4G"; 61 | SSL_CERT_FILE = "${pkgs.cacert}/etc/ssl/certs/ca-bundle.crt"; 62 | LD_LIBRARY_PATH = "${pkgs.stdenv.cc.cc.lib}/lib/"; # required by scalapb 63 | } 64 | -------------------------------------------------------------------------------- /docs/src/installation/nix.md: -------------------------------------------------------------------------------- 1 | # Nix Installation 2 | 3 | NeoPRISM can be built and run using [Nix flakes](https://nixos.wiki/wiki/Flakes). 4 | 5 | ## Prerequisites 6 | 7 | - **Nix** with flake support enabled. See [Nix Flakes documentation](https://nixos.wiki/wiki/Flakes) for setup instructions. 8 | - **Git**: [Install Git](https://git-scm.com/downloads) 9 | 10 | ## Steps 11 | 12 | 1. **Build the NeoPRISM binary from the remote flake:** 13 | ```bash 14 | nix build github:hyperledger-identus/neoprism/#neoprism-bin 15 | ``` 16 | - The resulting binary will be located in `./result/bin/neoprism-node`. 17 | 18 | 2. **Build the UI assets from the remote flake (in a separate output directory):** 19 | ```bash 20 | nix build github:hyperledger-identus/neoprism/#neoprism-ui-assets -o ./result-ui-assets 21 | ``` 22 | - The UI assets will be available in `./result-ui-assets`. 23 | 24 | 3. **Run NeoPRISM and link the UI assets:** 25 | - Use the `--assets-path` flag to specify the UI assets directory: 26 | ```bash 27 | ./result/bin/neoprism-node indexer --assets-path ./result-ui-assets/assets [options] 28 | ``` 29 | - For details on available commands and options, see the CLI help: 30 | ```bash 31 | ./result/bin/neoprism-node indexer --help 32 | ``` 33 | 34 | 4. **Access the Web UI:** 35 | - Open [http://localhost:8080](http://localhost:8080) in your browser. 36 | -------------------------------------------------------------------------------- /lib/apollo/src/crypto/x25519.rs: -------------------------------------------------------------------------------- 1 | use super::{EncodeArray, EncodeVec, Error}; 2 | use crate::base64::Base64UrlStrNoPad; 3 | use crate::jwk::{EncodeJwk, Jwk}; 4 | 5 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] 6 | pub struct X25519PublicKey(x25519_dalek::PublicKey); 7 | 8 | impl X25519PublicKey { 9 | pub fn from_slice(slice: &[u8]) -> Result { 10 | let Some((key, _)) = slice.split_first_chunk::<32>() else { 11 | Err(Error::InvalidKeySize { 12 | expected: 32, 13 | actual: slice.len(), 14 | key_type: std::any::type_name::(), 15 | })? 16 | }; 17 | let key = x25519_dalek::PublicKey::from(key.to_owned()); 18 | Ok(X25519PublicKey(key)) 19 | } 20 | } 21 | 22 | impl EncodeVec for X25519PublicKey { 23 | fn encode_vec(&self) -> Vec { 24 | self.0.as_bytes().to_vec() 25 | } 26 | } 27 | 28 | impl EncodeArray<32> for X25519PublicKey { 29 | fn encode_array(&self) -> [u8; 32] { 30 | self.0.to_bytes() 31 | } 32 | } 33 | 34 | impl EncodeJwk for X25519PublicKey { 35 | fn encode_jwk(&self) -> Jwk { 36 | let x = self.encode_array(); 37 | Jwk { 38 | kty: "OKP".to_string(), 39 | crv: "X25519".to_string(), 40 | x: Some(Base64UrlStrNoPad::from(x)), 41 | y: None, 42 | } 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /tools/just-recipes/e2e.just: -------------------------------------------------------------------------------- 1 | set working-directory := '../..' 2 | 3 | # Build PRISM conformance test suite 4 | [working-directory('./tests/prism-test')] 5 | build: 6 | sbt scalafmtAll compile Test/compile 7 | 8 | # Clean PRISM conformance test build artifacts 9 | [working-directory('./tests/prism-test')] 10 | clean: 11 | sbt clean 12 | 13 | # Start PRISM conformance test environment 14 | up NAME: 15 | docker-compose -f docker/prism-test/compose-{{ NAME }}.yml up -d --wait 16 | 17 | # Stop PRISM conformance test environment and remove volumes 18 | down NAME: 19 | docker-compose -f docker/prism-test/compose-{{ NAME }}.yml down --volumes --remove-orphans 20 | 21 | # Run all PRISM conformance test suites 22 | run: docker-publish-local 23 | #!/usr/bin/env bash 24 | set -euo pipefail 25 | 26 | for cfg in sqlite-dev; do 27 | cowsay "Testing ${cfg} configuration" 28 | just e2e::up ${cfg} 29 | (cd tests/prism-test && SKIP_CONFIRMATION_CHECK_MILLIS=2000 sbt test) 30 | just e2e::down ${cfg} 31 | done 32 | 33 | for cfg in ci ci-sqlite; do 34 | cowsay "Testing ${cfg} configuration" 35 | just e2e::up ${cfg} 36 | (cd tests/prism-test && sbt test) 37 | just e2e::down ${cfg} 38 | done 39 | 40 | # Build and load Docker images for local testing 41 | docker-publish-local: 42 | nix build .#neoprism-docker-latest && docker load < ./result 43 | -------------------------------------------------------------------------------- /lib/node-storage/src/backend/shared.rs: -------------------------------------------------------------------------------- 1 | use identus_did_prism::dlt::{BlockMetadata, OperationMetadata}; 2 | use identus_did_prism::prelude::*; 3 | use identus_did_prism::proto::prism::SignedPrismOperation; 4 | use identus_did_prism_indexer::repo::RawOperationId; 5 | 6 | use crate::{Error, entity}; 7 | 8 | pub fn parse_raw_operation( 9 | value: entity::RawOperation, 10 | ) -> Result<(RawOperationId, OperationMetadata, SignedPrismOperation), Error> { 11 | let metadata = OperationMetadata { 12 | block_metadata: value.block_metadata()?, 13 | osn: value.osn.try_into().expect("osn value does not fit in u32"), 14 | }; 15 | SignedPrismOperation::decode(value.signed_operation_data.as_slice()) 16 | .map(|op| (value.id.into(), metadata, op)) 17 | .map_err(|e| Error::ProtobufDecode { 18 | source: e, 19 | target_type: std::any::type_name::(), 20 | }) 21 | } 22 | 23 | impl entity::RawOperation { 24 | fn block_metadata(&self) -> Result { 25 | Ok(BlockMetadata { 26 | slot_number: u64::try_from(self.slot).expect("slot value does not fit in u64").into(), 27 | block_number: u64::try_from(self.block_number) 28 | .expect("block_number value does not fit in u64") 29 | .into(), 30 | cbt: self.cbt, 31 | absn: self.absn.try_into().expect("absn value does not fit in u32"), 32 | }) 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /docker/mainnet-universal-resolver/compose.yml: -------------------------------------------------------------------------------- 1 | # Code generated by Python script. DO NOT EDIT. 2 | services: 3 | db: 4 | environment: 5 | POSTGRES_DB: postgres 6 | POSTGRES_PASSWORD: postgres 7 | POSTGRES_USER: postgres 8 | healthcheck: 9 | interval: 2s 10 | retries: 30 11 | test: 12 | - CMD 13 | - pg_isready 14 | - -U 15 | - postgres 16 | timeout: 5s 17 | image: postgres:16 18 | ports: 19 | - 5432:5432 20 | restart: always 21 | neoprism-indexer: 22 | command: 23 | - indexer 24 | depends_on: 25 | db: 26 | condition: service_healthy 27 | environment: 28 | NPRISM_CARDANO_NETWORK: mainnet 29 | NPRISM_CARDANO_RELAY_ADDR: backbone.mainnet.cardanofoundation.org:3001 30 | NPRISM_DB_URL: postgres://postgres:postgres@db:5432/postgres 31 | RUST_LOG: oura=warn,tracing::span=warn,info 32 | healthcheck: 33 | interval: 2s 34 | retries: 30 35 | test: 36 | - CMD 37 | - curl 38 | - -f 39 | - http://localhost:8080/api/_system/health 40 | timeout: 5s 41 | image: hyperledgeridentus/identus-neoprism:0.8.0 42 | ports: 43 | - 8081:8080 44 | restart: always 45 | uni-resolver-web: 46 | environment: 47 | uniresolver_web_driver_url_did_prism: http://neoprism-indexer:8080/api 48 | image: universalresolver/uni-resolver-web:0.44.0-4922fcc 49 | ports: 50 | - 8080:8080 51 | restart: always 52 | -------------------------------------------------------------------------------- /lib/did-prism-ledger/src/in_memory/sink.rs: -------------------------------------------------------------------------------- 1 | use std::sync::atomic::AtomicU64; 2 | 3 | use identus_apollo::hash::sha256; 4 | use identus_did_prism::dlt::TxId; 5 | use identus_did_prism::prelude::SignedPrismOperation; 6 | use identus_did_prism::proto::prism::{PrismBlock, PrismObject}; 7 | use identus_did_prism_submitter::DltSink; 8 | use tokio::sync::mpsc; 9 | 10 | pub struct InMemoryDltSink { 11 | block_tx: mpsc::Sender, 12 | count: AtomicU64, 13 | } 14 | 15 | impl InMemoryDltSink { 16 | pub fn new(block_tx: mpsc::Sender) -> Self { 17 | Self { 18 | block_tx, 19 | count: AtomicU64::new(0), 20 | } 21 | } 22 | } 23 | 24 | #[async_trait::async_trait] 25 | impl DltSink for InMemoryDltSink { 26 | async fn publish_operations(&self, operations: Vec) -> Result { 27 | let prism_object = PrismObject { 28 | block_content: Some(PrismBlock { 29 | operations, 30 | special_fields: Default::default(), 31 | }) 32 | .into(), 33 | special_fields: Default::default(), 34 | }; 35 | 36 | let count = self.count.fetch_add(1, std::sync::atomic::Ordering::Relaxed); 37 | let tx_id = TxId::from(sha256(count.to_le_bytes())); 38 | self.block_tx 39 | .send(prism_object) 40 | .await 41 | .map_err(|e| e.to_string()) 42 | .map(|_| tx_id) 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /bin/neoprism-node/src/app/worker.rs: -------------------------------------------------------------------------------- 1 | use std::sync::Arc; 2 | 3 | use identus_did_prism::dlt::DltCursor; 4 | use identus_did_prism_indexer::{DltSource, run_indexer_loop, run_sync_loop}; 5 | use node_storage::StorageBackend; 6 | use tokio::sync::watch; 7 | 8 | type SharedStorage = Arc; 9 | 10 | pub struct DltSyncWorker { 11 | store: SharedStorage, 12 | source: Src, 13 | } 14 | 15 | impl DltSyncWorker 16 | where 17 | Src: DltSource, 18 | { 19 | pub fn new(store: SharedStorage, source: Src) -> Self { 20 | Self { store, source } 21 | } 22 | 23 | pub fn sync_cursor(&self) -> watch::Receiver> { 24 | self.source.sync_cursor() 25 | } 26 | 27 | pub async fn run(self) -> anyhow::Result<()> { 28 | run_sync_loop(self.store.as_ref(), self.source).await // block forever 29 | } 30 | } 31 | 32 | pub struct DltIndexWorker { 33 | store: SharedStorage, 34 | index_interval: u64, 35 | } 36 | 37 | impl DltIndexWorker { 38 | pub fn new(store: SharedStorage, index_interval: u64) -> Self { 39 | Self { store, index_interval } 40 | } 41 | 42 | pub async fn run(self) -> anyhow::Result<()> { 43 | loop { 44 | let result = run_indexer_loop(self.store.as_ref()).await; 45 | if let Err(e) = result { 46 | tracing::error!("{:?}", e); 47 | } 48 | tokio::time::sleep(tokio::time::Duration::from_secs(self.index_interval)).await; 49 | } 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /bin/neoprism-node/src/http/features/ui_explorer/mod.rs: -------------------------------------------------------------------------------- 1 | use axum::Router; 2 | use axum::extract::{Query, State}; 3 | use axum::routing::get; 4 | use maud::Markup; 5 | use models::PageQuery; 6 | 7 | use crate::IndexerUiState; 8 | use crate::http::urls; 9 | 10 | pub(in crate::http) mod models; 11 | mod views; 12 | 13 | pub fn router() -> Router { 14 | Router::new() 15 | .route(urls::Explorer::AXUM_PATH, get(index)) 16 | .route(urls::ExplorerDltCursor::AXUM_PATH, get(dlt_cursor)) 17 | .route(urls::ExplorerDidList::AXUM_PATH, get(did_list)) 18 | } 19 | 20 | async fn index(Query(page): Query, State(state): State) -> Markup { 21 | let page = page.page.map(|i| i.max(1) - 1); 22 | let network = state.dlt_source.as_ref().map(|i| i.network); 23 | let cursor = state.dlt_source.as_ref().and_then(|i| i.cursor_rx.borrow().to_owned()); 24 | let dids = state.prism_did_service.get_all_dids(page).await.unwrap(); // FIXME: unwrap 25 | views::index(network, cursor, dids) 26 | } 27 | 28 | async fn dlt_cursor(State(state): State) -> Markup { 29 | let cursor = state.dlt_source.as_ref().and_then(|i| i.cursor_rx.borrow().to_owned()); 30 | views::dlt_cursor_card(cursor) 31 | } 32 | 33 | async fn did_list(Query(page): Query, State(state): State) -> Markup { 34 | let page = page.page.map(|i| i.max(1) - 1); 35 | let dids = state.prism_did_service.get_all_dids(page).await.unwrap(); // FIXME: unwrap 36 | views::did_list(dids) 37 | } 38 | -------------------------------------------------------------------------------- /.github/workflows/conformance-test.yml: -------------------------------------------------------------------------------- 1 | name: PRISM specification test 2 | 3 | concurrency: 4 | group: ${{ github.head_ref }}${{ github.ref }}-prism-test 5 | cancel-in-progress: true 6 | 7 | on: 8 | pull_request: 9 | branches: 10 | - main 11 | push: 12 | branches: 13 | - main 14 | 15 | jobs: 16 | run-conformance-test: 17 | runs-on: ubuntu-latest 18 | env: 19 | JAVA_TOOL_OPTIONS: -Djava.net.preferIPv4Stack=true 20 | 21 | steps: 22 | - name: Checkout repository 23 | uses: actions/checkout@v4 24 | 25 | - name: Install Nix 26 | uses: cachix/install-nix-action@v31 27 | with: 28 | extra_nix_config: | 29 | experimental-features = nix-command flakes 30 | 31 | - name: Set up Docker 32 | uses: docker/setup-docker-action@v4 33 | 34 | - name: Set up Docker Compose 35 | uses: docker/setup-compose-action@v1 36 | 37 | - name: Build neoprism image 38 | run: nix build .#neoprism-docker-latest -L -o result 39 | 40 | - name: Load image into daemon 41 | run: | 42 | docker load < result 43 | docker images 44 | 45 | - name: Start prism-test services 46 | uses: hoverkraft-tech/compose-action@v2.3.0 47 | with: 48 | compose-file: "./docker/prism-test/compose-ci.yml" 49 | up-flags: "--wait" 50 | down-flags: "--volumes" 51 | 52 | - name: Run test suite 53 | working-directory: ./tests/prism-test 54 | run: 55 | nix develop .#prism-test --command sbt test 56 | -------------------------------------------------------------------------------- /docker/prism-test/init-did.hurl: -------------------------------------------------------------------------------- 1 | GET {{ WALLET_BASE_URL }}/wallets 2 | HTTP 200 3 | [Captures] 4 | wallet_id: jsonpath "$[0].id" 5 | 6 | # Wait until wallet receives balance from genesis transaction 7 | GET {{ WALLET_BASE_URL }}/wallets/{{ wallet_id }} 8 | [Options] 9 | retry: 10 10 | HTTP 200 11 | [Asserts] 12 | jsonpath "$.balance.available.quantity" > 0 13 | 14 | GET {{ WALLET_BASE_URL }}/wallets/{{ wallet_id }}/addresses 15 | HTTP 200 16 | [Captures] 17 | address: jsonpath "$[0].id" 18 | 19 | POST {{ WALLET_BASE_URL }}/wallets/{{ wallet_id }}/transactions 20 | { 21 | "passphrase": "{{ WALLET_PASSPHRASE }}", 22 | "payments": [ 23 | { 24 | "address": "{{ address }}", 25 | "amount": { 26 | "quantity": 1000000, 27 | "unit": "lovelace" 28 | } 29 | } 30 | ], 31 | "metadata": { 32 | "21325": { 33 | "map": [ 34 | { "k": { "string": "v" }, "v": { "int": 1 } }, 35 | { 36 | "k": { "string": "c" }, 37 | "v": { 38 | "list": [ 39 | { "bytes": "22d50112d2010a086d61737465722d3012473045022100c921083f391f179c947a4e95f8ed226870c32557565f8adba52daebcf47ce5b3022019f8632237331c" }, 40 | { "bytes": "5183d5ee6d192b617637848e32ce5d26c12dd2a86890b8bd041a7d0a7b0a79123c0a086d61737465722d3010014a2e0a09736563703235366b31122103b20404" }, 41 | { "bytes": "f350d87eec98982131c176acfea520f26f8901fe08b619a56a0dd9e41712390a057664722d3010084a2e0a09736563703235366b31122102647aff70cfd5d510" }, 42 | { "bytes": "ec369c512da85faef95803db30bb47499a28c08a590186ac" } 43 | ] 44 | } 45 | } 46 | ] 47 | } 48 | } 49 | } 50 | HTTP 202 51 | -------------------------------------------------------------------------------- /lib/apollo/src/crypto/ed25519.rs: -------------------------------------------------------------------------------- 1 | use super::{EncodeArray, EncodeVec, Error, Verifiable}; 2 | use crate::base64::Base64UrlStrNoPad; 3 | use crate::jwk::{EncodeJwk, Jwk}; 4 | 5 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] 6 | pub struct Ed25519PublicKey(ed25519_dalek::VerifyingKey); 7 | 8 | impl Ed25519PublicKey { 9 | pub fn from_slice(slice: &[u8]) -> Result { 10 | let Some((key, _)) = slice.split_first_chunk::<32>() else { 11 | Err(Error::InvalidKeySize { 12 | expected: 32, 13 | actual: slice.len(), 14 | key_type: std::any::type_name::(), 15 | })? 16 | }; 17 | let key = ed25519_dalek::VerifyingKey::from_bytes(key)?; 18 | Ok(Ed25519PublicKey(key)) 19 | } 20 | } 21 | 22 | impl EncodeVec for Ed25519PublicKey { 23 | fn encode_vec(&self) -> Vec { 24 | self.0.to_bytes().to_vec() 25 | } 26 | } 27 | 28 | impl EncodeArray<32> for Ed25519PublicKey { 29 | fn encode_array(&self) -> [u8; 32] { 30 | self.0.to_bytes() 31 | } 32 | } 33 | 34 | impl Verifiable for Ed25519PublicKey { 35 | fn verify(&self, message: &[u8], signature: &[u8]) -> bool { 36 | let Ok(signature) = ed25519_dalek::Signature::from_slice(signature) else { 37 | return false; 38 | }; 39 | self.0.verify_strict(message, &signature).is_ok() 40 | } 41 | } 42 | 43 | impl EncodeJwk for Ed25519PublicKey { 44 | fn encode_jwk(&self) -> Jwk { 45 | let x = self.encode_array(); 46 | Jwk { 47 | kty: "OKP".to_string(), 48 | crv: "Ed25519".to_string(), 49 | x: Some(Base64UrlStrNoPad::from(x)), 50 | y: None, 51 | } 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /lib/node-storage/migrations/postgres/20250424101829_migrate_from_seaorm.sql: -------------------------------------------------------------------------------- 1 | DROP TABLE IF EXISTS seaql_migrations; 2 | 3 | CREATE TABLE IF NOT EXISTS dlt_cursor ( 4 | slot int8 NOT NULL, 5 | block_hash bytea NOT NULL, 6 | CONSTRAINT dlt_cursor_pkey PRIMARY KEY (slot, block_hash) 7 | ); 8 | 9 | CREATE TABLE IF NOT EXISTS raw_operation ( 10 | did bytea NOT NULL, 11 | signed_operation_data bytea NOT NULL, 12 | slot int8 NOT NULL, 13 | block_number int8 NOT NULL, 14 | cbt timestamptz NOT NULL, 15 | absn int4 NOT NULL, 16 | osn int4 NOT NULL, 17 | CONSTRAINT raw_operation_pkey PRIMARY KEY (did, block_number, absn, osn) 18 | ); 19 | 20 | -- migrate primary key for raw_operation 21 | ALTER TABLE raw_operation DROP CONSTRAINT raw_operation_pkey; 22 | 23 | ALTER TABLE raw_operation 24 | ADD COLUMN id uuid DEFAULT gen_random_uuid(); 25 | 26 | ALTER TABLE raw_operation 27 | ADD CONSTRAINT raw_operation_pkey PRIMARY KEY (id); 28 | 29 | ALTER TABLE raw_operation 30 | ADD CONSTRAINT raw_operation_abs_order UNIQUE (did, block_number, absn, osn); 31 | 32 | -- migrate primary key for dlt_cursor 33 | ALTER TABLE dlt_cursor DROP CONSTRAINT dlt_cursor_pkey; 34 | 35 | ALTER TABLE dlt_cursor 36 | ADD COLUMN id uuid DEFAULT gen_random_uuid(); 37 | 38 | ALTER TABLE dlt_cursor 39 | ADD CONSTRAINT dlt_cursor_pkey PRIMARY KEY (id); 40 | 41 | ALTER TABLE dlt_cursor 42 | ADD CONSTRAINT dlt_cursor_abs_order UNIQUE (slot, block_hash); 43 | 44 | CREATE VIEW did_stats AS 45 | SELECT 46 | did, 47 | count(*) AS operation_count, 48 | max(block_number) AS last_block, 49 | max(slot) AS last_slot, 50 | max(cbt) AS last_cbt, 51 | min(block_number) AS first_block, 52 | min(slot) AS first_slot, 53 | min(cbt) AS first_cbt 54 | FROM raw_operation 55 | GROUP BY 1; 56 | -------------------------------------------------------------------------------- /tests/prism-test/src/main/scala/org/hyperledger/identus/prismtest/utils/Sha256.scala: -------------------------------------------------------------------------------- 1 | package org.hyperledger.identus.prismtest.utils 2 | 3 | import java.security.MessageDigest 4 | 5 | // Reference: https://github.com/input-output-hk/atala-prism/blob/open-source-node/node/src/main/scala/io/iohk/atala/prism/node/crypto/CryptoUtils.scala 6 | sealed trait Sha256Hash: 7 | def bytes: Vector[Byte] 8 | def hexEncoded: String = 9 | bytes.map(byte => f"${byte & 0xff}%02x").mkString 10 | 11 | override def equals(obj: Any): Boolean = obj match 12 | case other: Sha256Hash => bytes == other.bytes 13 | case _ => false 14 | 15 | override def hashCode(): Int = bytes.hashCode() 16 | 17 | private[utils] case class Sha256HashImpl(bytes: Vector[Byte]) extends Sha256Hash: 18 | require(bytes.size == 32) 19 | 20 | object Sha256Hash: 21 | 22 | def fromBytes(arr: Array[Byte]): Sha256Hash = Sha256HashImpl(arr.toVector) 23 | 24 | def compute(bArray: Array[Byte]): Sha256Hash = 25 | Sha256HashImpl( 26 | MessageDigest 27 | .getInstance("SHA-256") 28 | .digest(bArray) 29 | .toVector 30 | ) 31 | 32 | def fromHex(hexedBytes: String): Sha256Hash = 33 | val HEX_STRING_RE = "^[0-9a-fA-F]{64}$".r 34 | if HEX_STRING_RE.matches(hexedBytes) then Sha256HashImpl(hexToBytes(hexedBytes)) 35 | else 36 | throw new IllegalArgumentException( 37 | "The given hex string doesn't correspond to a valid SHA-256 hash encoded as string" 38 | ) 39 | 40 | private def hexToBytes(hex: String): Vector[Byte] = 41 | val HEX_ARRAY = "0123456789abcdef".toCharArray 42 | for 43 | pair <- hex.grouped(2).toVector 44 | firstIndex = HEX_ARRAY.indexOf(pair(0)) 45 | secondIndex = HEX_ARRAY.indexOf(pair(1)) 46 | octet = firstIndex << 4 | secondIndex 47 | yield octet.toByte 48 | -------------------------------------------------------------------------------- /lib/did-prism-ledger/src/in_memory/mod.rs: -------------------------------------------------------------------------------- 1 | mod sink; 2 | mod source; 3 | 4 | use std::sync::Arc; 5 | 6 | use chrono::Utc; 7 | use identus_did_prism::dlt::{BlockMetadata, BlockNo, PublishedPrismObject, SlotNo}; 8 | use identus_did_prism::proto::prism::PrismObject; 9 | use identus_did_prism_submitter::DltSink; 10 | pub use sink::InMemoryDltSink; 11 | pub use source::InMemoryDltSource; 12 | use tokio::sync::mpsc; 13 | 14 | const BUFFER_SIZE: usize = 1024; 15 | 16 | pub fn create_ledger() -> (InMemoryDltSource, Arc) { 17 | let (block_tx, block_rx) = mpsc::channel::(BUFFER_SIZE); 18 | let (object_tx, mut object_rx) = mpsc::channel::(BUFFER_SIZE); 19 | 20 | tokio::spawn(async move { 21 | let mut block_count = 0; 22 | while let Some(prism_object) = object_rx.recv().await { 23 | let slot = block_count; 24 | let block_number = slot; // For in-memory blockchain, use slot as block number 25 | 26 | let published_prism_object = PublishedPrismObject { 27 | block_metadata: BlockMetadata { 28 | slot_number: SlotNo::from(slot), 29 | block_number: BlockNo::from(block_number), 30 | cbt: Utc::now(), 31 | absn: 0, // In-memory blocks contain a single PrismObject per block 32 | }, 33 | prism_object, 34 | }; 35 | if let Err(e) = block_tx.send(published_prism_object).await { 36 | tracing::error!(error = ?e, "failed to send published object to block receiver"); 37 | break; 38 | } 39 | block_count += 1; 40 | } 41 | }); 42 | 43 | let source = InMemoryDltSource::new(block_rx); 44 | let sink = Arc::new(InMemoryDltSink::new(object_tx)); 45 | (source, sink) 46 | } 47 | -------------------------------------------------------------------------------- /lib/did-prism-indexer/src/dlt/common.rs: -------------------------------------------------------------------------------- 1 | use identus_apollo::hex::HexStr; 2 | use identus_did_prism::dlt::DltCursor; 3 | use tokio::sync::watch; 4 | use tokio::task::JoinHandle; 5 | 6 | use crate::dlt::error::DltError; 7 | use crate::repo::DltCursorRepo; 8 | 9 | pub struct CursorPersistWorker { 10 | cursor_rx: watch::Receiver>, 11 | store: Store, 12 | } 13 | 14 | impl CursorPersistWorker { 15 | pub fn new(store: Store, cursor_rx: tokio::sync::watch::Receiver>) -> Self { 16 | Self { cursor_rx, store } 17 | } 18 | 19 | pub fn spawn(mut self) -> JoinHandle> { 20 | const DELAY: tokio::time::Duration = tokio::time::Duration::from_secs(60); 21 | tracing::info!("Spawn cursor persist worker with {:?} interval", DELAY); 22 | tokio::spawn(async move { 23 | loop { 24 | let recv_result = self.cursor_rx.changed().await; 25 | tokio::time::sleep(DELAY).await; 26 | 27 | if let Err(e) = recv_result { 28 | tracing::error!("Error getting cursor to persist: {}", e); 29 | } 30 | 31 | let cursor = self.cursor_rx.borrow_and_update().clone(); 32 | let Some(cursor) = cursor else { continue }; 33 | tracing::info!( 34 | "Persisting cursor on slot ({}, {})", 35 | cursor.slot, 36 | HexStr::from(cursor.block_hash.as_slice()).to_string(), 37 | ); 38 | 39 | let persist_result = self.store.set_cursor(cursor).await; 40 | if let Err(e) = persist_result { 41 | tracing::error!("Error persisting cursor: {}", e); 42 | } 43 | } 44 | }) 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /.github/workflows/scala-steward.yml: -------------------------------------------------------------------------------- 1 | on: 2 | schedule: 3 | - cron: '30 0 * * *' 4 | workflow_dispatch: 5 | 6 | name: Scala Steward 7 | 8 | permissions: 9 | contents: write 10 | issues: write 11 | pull-requests: write 12 | 13 | jobs: 14 | scala-steward: 15 | runs-on: ubuntu-latest 16 | name: Scala Steward 17 | steps: 18 | - name: Checkout 19 | uses: actions/checkout@v4 20 | 21 | - name: Set up JDK 11 22 | uses: actions/setup-java@v4 23 | with: 24 | distribution: "temurin" 25 | java-version: "11" 26 | 27 | - name: Install GPG key 28 | uses: crazy-max/ghaction-import-gpg@v6 29 | id: import_gpg 30 | with: 31 | gpg_private_key: ${{ secrets.HYP_BOT_GPG_PRIVATE }} 32 | passphrase: ${{ secrets.HYP_BOT_GPG_PASSWORD }} 33 | git_user_signingkey: true 34 | git_commit_gpgsign: true 35 | git_config_global: true 36 | git_tag_gpgsign: false 37 | 38 | - name: GPG user IDs 39 | run: | 40 | echo "fingerprint: ${{ steps.import_gpg.outputs.fingerprint }}" 41 | echo "keyid: ${{ steps.import_gpg.outputs.keyid }}" 42 | echo "name: ${{ steps.import_gpg.outputs.name }}" 43 | echo "email: ${{ steps.import_gpg.outputs.email }}" 44 | 45 | - name: Launch Scala Steward 46 | uses: scala-steward-org/scala-steward-action@v2 47 | env: 48 | GITHUB_TOKEN: ${{ secrets.IDENTUS_CI }} 49 | with: 50 | github-token: ${{ secrets.IDENTUS_CI }} 51 | sign-commits: true 52 | signing-key: ${{ steps.import_gpg.outputs.keyid }} 53 | author-email: ${{ steps.import_gpg.outputs.email }} 54 | author-name: ${{ steps.import_gpg.outputs.name }} 55 | branches: main 56 | repo-config: .scala-steward.conf 57 | -------------------------------------------------------------------------------- /bin/neoprism-node/src/http/mod.rs: -------------------------------------------------------------------------------- 1 | use std::path::Path; 2 | 3 | use axum::Router; 4 | use axum::response::Redirect; 5 | use axum::routing::get; 6 | use features::{api, ui_explorer, ui_resolver}; 7 | use identus_did_resolver_http::DidResolverStateDyn; 8 | use tower_http::services::ServeDir; 9 | 10 | use crate::{AppState, IndexerState, IndexerUiState, RunMode, SubmitterState}; 11 | 12 | mod components; 13 | mod features; 14 | mod urls; 15 | 16 | pub use features::api::build_openapi; 17 | 18 | #[derive(Default)] 19 | pub struct Routers { 20 | pub app_router: Router, 21 | pub indexer_ui_router: Router, 22 | pub indexer_router: Router, 23 | pub did_resolver_router: Router, 24 | pub submitter_router: Router, 25 | } 26 | 27 | pub fn router(assets_dir: &Path, mode: RunMode, port: u16, external_url: Option<&str>) -> Routers { 28 | tracing::info!("Serving static asset from {:?}", assets_dir); 29 | 30 | let api_router = api::router(mode, port, external_url); 31 | 32 | let ui_router = Router::new() 33 | .nest_service(urls::AssetBase::AXUM_PATH, ServeDir::new(assets_dir)) 34 | .merge(ui_explorer::router()) 35 | .merge(ui_resolver::router()); 36 | 37 | let home_router = match mode { 38 | RunMode::Submitter => Router::new().route( 39 | urls::Home::AXUM_PATH, 40 | get(Redirect::temporary(&urls::Swagger::new_uri())), 41 | ), 42 | RunMode::Indexer | RunMode::Standalone => Router::new().route( 43 | urls::Home::AXUM_PATH, 44 | get(Redirect::temporary(&urls::Resolver::new_uri(None))), 45 | ), 46 | }; 47 | 48 | Routers { 49 | app_router: api_router.app_router.merge(home_router), 50 | indexer_ui_router: api_router.indexer_ui_router.merge(ui_router), 51 | ..api_router 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /tools/just-recipes/release.just: -------------------------------------------------------------------------------- 1 | set working-directory := '../..' 2 | 3 | # Automatically bump version using git-cliff 4 | bump-version: 5 | #!/usr/bin/env bash 6 | set -euxo pipefail 7 | NEW_VERSION=$(git-cliff --bump --context | jq -r .[0].version | sed s/^v//) 8 | just release set-version "$NEW_VERSION" 9 | 10 | # Set project version manually 11 | set-version VERSION: 12 | echo "Setting new version to {{ VERSION }}" 13 | echo "{{ VERSION }}" > version 14 | cargo set-version "{{ VERSION }}" 15 | just build-config 16 | git-cliff -t "{{ VERSION }}" > CHANGELOG.md 17 | 18 | # Build and release multi-arch cardano-testnet Docker image 19 | publish-testnet-image: 20 | #!/usr/bin/env bash 21 | set -euxo pipefail 22 | TAG=$(date +"%Y%m%d-%H%M%S") 23 | 24 | echo "Building amd64 image..." 25 | nix build .#cardano-testnet-docker-linux-amd64 -o result-amd64 26 | 27 | echo "Building arm64 image..." 28 | nix build .#cardano-testnet-docker-linux-arm64 -o result-arm64 29 | 30 | echo "Loading images into Docker..." 31 | docker load < ./result-amd64 32 | docker load < ./result-arm64 33 | 34 | echo "Tagging images with $TAG..." 35 | docker tag cardano-testnet:latest-amd64 "patextreme/cardano-testnet:$TAG-amd64" 36 | docker tag cardano-testnet:latest-arm64 "patextreme/cardano-testnet:$TAG-arm64" 37 | 38 | rm -rf ./result-amd64 ./result-arm64 39 | 40 | echo "Pushing architecture-specific images..." 41 | docker push "patextreme/cardano-testnet:$TAG-amd64" 42 | docker push "patextreme/cardano-testnet:$TAG-arm64" 43 | 44 | echo "Creating and pushing multi-arch manifest..." 45 | docker manifest create "patextreme/cardano-testnet:$TAG" \ 46 | "patextreme/cardano-testnet:$TAG-amd64" \ 47 | "patextreme/cardano-testnet:$TAG-arm64" 48 | docker manifest push "patextreme/cardano-testnet:$TAG" 49 | 50 | echo "✓ Released: patextreme/cardano-testnet:$TAG" 51 | -------------------------------------------------------------------------------- /lib/apollo/src/hash.rs: -------------------------------------------------------------------------------- 1 | use ring::digest; 2 | 3 | #[derive(Debug, derive_more::Display, derive_more::Error)] 4 | pub enum Error { 5 | #[display("hash operation encountered invalid input size")] 6 | InvalidByteSize { 7 | type_name: &'static str, 8 | expected: usize, 9 | actual: usize, 10 | }, 11 | } 12 | 13 | #[derive(Clone, PartialEq, Eq, Hash, derive_more::From, derive_more::Debug)] 14 | #[debug("sha256-{}", crate::hex::HexStr::from(_0))] 15 | pub struct Sha256Digest([u8; 32]); 16 | 17 | impl Sha256Digest { 18 | pub fn as_bytes(&self) -> &[u8] { 19 | &self.0 20 | } 21 | 22 | pub fn as_array(&self) -> &[u8; 32] { 23 | &self.0 24 | } 25 | 26 | pub fn to_vec(&self) -> Vec { 27 | self.0.to_vec() 28 | } 29 | 30 | /// Convert bytes to the digest. 31 | /// This only validate and wrap the raw bytes, it does not hash. 32 | /// 33 | /// # Example 34 | /// 35 | /// ``` 36 | /// use identus_apollo::hash::Sha256Digest; 37 | /// let digest = Sha256Digest::from_bytes(&vec![42u8; 32]).unwrap(); 38 | /// assert_eq!(digest.as_bytes(), vec![42u8; 32]); 39 | /// 40 | /// let digest = Sha256Digest::from_bytes(&vec![42u8; 31]); 41 | /// assert!(digest.is_err()); 42 | /// ``` 43 | pub fn from_bytes(bytes: &[u8]) -> Result { 44 | if bytes.len() != 32 { 45 | Err(Error::InvalidByteSize { 46 | type_name: std::any::type_name::(), 47 | expected: 32, 48 | actual: bytes.len(), 49 | })? 50 | } 51 | 52 | let mut digest = [0u8; 32]; 53 | digest.copy_from_slice(bytes); 54 | Ok(Self(digest)) 55 | } 56 | } 57 | 58 | pub fn sha256>(bytes: B) -> Sha256Digest { 59 | let digest = digest::digest(&digest::SHA256, bytes.as_ref()); 60 | let digest: [u8; 32] = digest 61 | .as_ref() 62 | .try_into() 63 | .expect("The digest must have length of 32 bytes"); 64 | Sha256Digest(digest) 65 | } 66 | -------------------------------------------------------------------------------- /docker/blockfrost-neoprism-demo/compose.yml: -------------------------------------------------------------------------------- 1 | # Code generated by Python script. DO NOT EDIT. 2 | services: 3 | bf-ryo: 4 | environment: 5 | BLOCKFROST_CONFIG_DBSYNC_DATABASE: ${DBSYNC_DB} 6 | BLOCKFROST_CONFIG_DBSYNC_HOST: ${DBSYNC_HOST} 7 | BLOCKFROST_CONFIG_DBSYNC_PASSWORD: ${DBSYNC_PASSWORD} 8 | BLOCKFROST_CONFIG_DBSYNC_PORT: ${DBSYNC_PORT:-5432} 9 | BLOCKFROST_CONFIG_DBSYNC_USER: ${DBSYNC_USER} 10 | BLOCKFROST_CONFIG_NETWORK: ${NETWORK:-mainnet} 11 | BLOCKFROST_MITHRIL_ENABLED: 'false' 12 | NODE_ENV: development 13 | image: blockfrost/backend-ryo:v4.3.0 14 | restart: always 15 | volumes: 16 | - ./ryo.yaml:/app/config/development.yaml 17 | caddy: 18 | image: caddy:2.10.2 19 | ports: 20 | - 3000:3000 21 | restart: always 22 | volumes: 23 | - ./Caddyfile-blockfrost:/etc/caddy/Caddyfile 24 | db-neoprism: 25 | environment: 26 | POSTGRES_DB: postgres 27 | POSTGRES_PASSWORD: postgres 28 | POSTGRES_USER: postgres 29 | healthcheck: 30 | interval: 2s 31 | retries: 30 32 | test: 33 | - CMD 34 | - pg_isready 35 | - -U 36 | - postgres 37 | timeout: 5s 38 | image: postgres:16 39 | restart: always 40 | neoprism: 41 | command: 42 | - indexer 43 | depends_on: 44 | db-neoprism: 45 | condition: service_healthy 46 | environment: 47 | NPRISM_CARDANO_DBSYNC_POLL_INTERVAL: '10' 48 | NPRISM_CARDANO_DBSYNC_URL: ${DBSYNC_URL} 49 | NPRISM_CARDANO_NETWORK: ${NETWORK:-mainnet} 50 | NPRISM_DB_URL: postgres://postgres:postgres@db-neoprism:5432/postgres 51 | RUST_LOG: oura=warn,tracing::span=warn,info 52 | healthcheck: 53 | interval: 2s 54 | retries: 30 55 | test: 56 | - CMD 57 | - curl 58 | - -f 59 | - http://localhost:8080/api/_system/health 60 | timeout: 5s 61 | image: hyperledgeridentus/identus-neoprism:0.8.0 62 | ports: 63 | - 8080:8080 64 | restart: always 65 | volumes: 66 | node-testnet: {} 67 | -------------------------------------------------------------------------------- /bin/neoprism-node/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "neoprism-node" 3 | version.workspace = true 4 | edition.workspace = true 5 | 6 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 7 | 8 | [dependencies] 9 | anyhow = { workspace = true } 10 | async-trait = { workspace = true } 11 | axum = { workspace = true } 12 | clap = { workspace = true, features = [ "derive", "env" ] } 13 | chrono = { workspace = true } 14 | derive_more = { workspace = true, features = [ "as_ref", "from", "into", "debug", "display", "error" ] } 15 | dirs = { workspace = true } 16 | lazybe = { workspace = true, features = [ "openapi" ] } 17 | maud = { workspace = true, features = [ "axum" ] } 18 | serde = { workspace = true, features = [ "derive" ] } 19 | sqlx = { workspace = true, features = [ "runtime-tokio" ] } 20 | tokio = { workspace = true, features = [ "full" ] } 21 | tracing = { workspace = true } 22 | tracing-subscriber = { workspace = true } 23 | tower = { workspace = true } 24 | tower-http = { workspace = true, features = [ "fs", "trace", "cors" ] } 25 | 26 | utoipa = { workspace = true, features = [ "chrono" ] } 27 | utoipa-swagger-ui = { version = "9", features = [ "axum", "vendored" ] } 28 | 29 | identus-apollo = { workspace = true, features = [ "hex", "jwk" ] } 30 | identus-did-core = { workspace = true, features = [ "openapi" ] } 31 | identus-did-prism = { workspace = true, features = [ "openapi" ] } 32 | identus-did-prism-indexer = { workspace = true, features = [ "oura", "dbsync" ] } 33 | identus-did-prism-ledger = { workspace = true, features = [ "in-memory" ] } 34 | identus-did-prism-submitter = { workspace = true, features = [ "cardano-wallet" ] } 35 | identus-did-resolver-http = { workspace = true, features = [ "openapi" ] } 36 | node-storage = { workspace = true, features = [ "sqlite-storage" ] } 37 | -------------------------------------------------------------------------------- /lib/did-core/tests/did.rs: -------------------------------------------------------------------------------- 1 | use std::str::FromStr; 2 | 3 | use identus_did_core::{Did, DidOps, DidUrl}; 4 | 5 | #[test] 6 | fn parse_did() { 7 | let did: Did = "did:example:abcdefghi".parse().unwrap(); 8 | assert_eq!(did.to_string(), "did:example:abcdefghi"); 9 | assert_eq!(did.method(), "example"); 10 | assert_eq!(did.method_id(), "abcdefghi"); 11 | 12 | let did: Did = "did:prism:9bf36a6dd4090ad66e359a0c041e25662c3f84c00467e9a61eeba68477c8a595" 13 | .parse() 14 | .unwrap(); 15 | assert_eq!( 16 | did.to_string(), 17 | "did:prism:9bf36a6dd4090ad66e359a0c041e25662c3f84c00467e9a61eeba68477c8a595" 18 | ); 19 | assert_eq!(did.method(), "prism"); 20 | assert_eq!( 21 | did.method_id(), 22 | "9bf36a6dd4090ad66e359a0c041e25662c3f84c00467e9a61eeba68477c8a595" 23 | ); 24 | } 25 | 26 | #[test] 27 | fn parse_did_fail() { 28 | assert!(Did::from_str("did").is_err()); 29 | assert!(Did::from_str("did:").is_err()); 30 | assert!(Did::from_str("did::").is_err()); 31 | assert!(Did::from_str("did:example").is_err()); 32 | assert!(Did::from_str("did:example:").is_err()); 33 | assert!(Did::from_str("did:_______:abcdefghi").is_err()); 34 | assert!(Did::from_str("did:example:abcdefghi?service=abc").is_err()); 35 | assert!(Did::from_str("did:example:abcdefghi#key-1").is_err()); 36 | } 37 | 38 | #[test] 39 | fn parse_did_url() { 40 | let did: DidUrl = "did:example:abcdefghi".parse().unwrap(); 41 | assert_eq!(did.to_string(), "did:example:abcdefghi"); 42 | // assert_eq!(did.method(), "example"); 43 | // assert_eq!(did.method_id(), "abcdefghi"); 44 | 45 | let did: DidUrl = "did:prism:9bf36a6dd4090ad66e359a0c041e25662c3f84c00467e9a61eeba68477c8a595" 46 | .parse() 47 | .unwrap(); 48 | assert_eq!( 49 | did.to_string(), 50 | "did:prism:9bf36a6dd4090ad66e359a0c041e25662c3f84c00467e9a61eeba68477c8a595" 51 | ); 52 | // assert_eq!(did.method(), "prism"); 53 | // assert_eq!(did.method_id(), "9bf36a6dd4090ad66e359a0c041e25662c3f84c00467e9a61eeba68477c8a595"); 54 | } 55 | -------------------------------------------------------------------------------- /lib/did-prism-indexer/src/dlt/error.rs: -------------------------------------------------------------------------------- 1 | use identus_did_prism::utils::Location; 2 | 3 | type StdError = Box; 4 | 5 | #[derive(Debug, derive_more::Display, derive_more::Error)] 6 | pub enum DltError { 7 | #[display("unable to initialize DLT source")] 8 | InitSource { source: StdError }, 9 | #[display("timeout receiving event from DLT source {location}")] 10 | EventRecvTimeout { location: Location }, 11 | #[display("event source has connectivity issue {location}")] 12 | Connection { location: Location }, 13 | #[display("handling DLT event failed {location}")] 14 | EventHandling { source: StdError, location: Location }, 15 | } 16 | 17 | /// This is an internal error type that should be handled when streaming from DLT source. 18 | #[allow(unused)] 19 | #[derive(Debug, derive_more::Display, derive_more::Error)] 20 | pub(crate) enum MetadataReadError { 21 | #[display("metadata is not a valid json on block {block_hash:?} tx {tx_idx:?}")] 22 | InvalidMetadataType { 23 | source: StdError, 24 | block_hash: Option, 25 | tx_idx: Option, 26 | }, 27 | #[display("failed to decode prism_block hex on block {block_hash:?} tx {tx_idx:?}")] 28 | PrismBlockHexDecode { 29 | source: identus_apollo::hex::Error, 30 | block_hash: Option, 31 | tx_idx: Option, 32 | }, 33 | #[display("failed to decode prism_block protobuf on block {block_hash:?} tx {tx_idx:?}")] 34 | PrismBlockProtoDecode { 35 | source: protobuf::Error, 36 | block_hash: Option, 37 | tx_idx: Option, 38 | }, 39 | #[display("timestamp {timestamp} on block {block_hash:?} tx {tx_idx:?} is invalid")] 40 | InvalidBlockTimestamp { 41 | block_hash: Option, 42 | tx_idx: Option, 43 | timestamp: i64, 44 | }, 45 | #[display("block property '{name}' is missing on block {block_hash:?} tx {tx_idx:?}")] 46 | MissingBlockProperty { 47 | block_hash: Option, 48 | tx_idx: Option, 49 | name: &'static str, 50 | }, 51 | } 52 | -------------------------------------------------------------------------------- /bin/neoprism-node/src/http/features/api/error.rs: -------------------------------------------------------------------------------- 1 | use axum::Json; 2 | use axum::http::{StatusCode, header}; 3 | use axum::response::{IntoResponse, Response}; 4 | use serde::{Deserialize, Serialize}; 5 | 6 | use crate::app::service::error::ResolutionError; 7 | 8 | #[derive(Debug, derive_more::Display, derive_more::Error)] 9 | pub enum ApiError { 10 | #[display("service not available")] 11 | NotImplemented, 12 | #[display("not found")] 13 | NotFound, 14 | #[display("bad request: {message}")] 15 | BadRequest { message: String }, 16 | #[display("internal server error")] 17 | Internal { source: anyhow::Error }, 18 | } 19 | 20 | #[derive(Debug, Serialize, Deserialize, utoipa::ToSchema)] 21 | pub struct ApiErrorResponseBody { 22 | message: String, 23 | } 24 | 25 | impl IntoResponse for ApiError { 26 | fn into_response(self) -> Response { 27 | let status = match self { 28 | ApiError::NotImplemented => StatusCode::NOT_IMPLEMENTED, 29 | ApiError::NotFound => StatusCode::NOT_FOUND, 30 | ApiError::BadRequest { .. } => StatusCode::BAD_REQUEST, 31 | ApiError::Internal { .. } => StatusCode::INTERNAL_SERVER_ERROR, 32 | }; 33 | let body = Json(ApiErrorResponseBody { 34 | message: self.to_string(), 35 | }); 36 | if let ApiError::Internal { source } = self { 37 | let msg = source.chain().map(|e| e.to_string()).collect::>().join("\n"); 38 | tracing::error!("{msg}"); 39 | } 40 | (status, [(header::CONTENT_TYPE, "application/json")], body).into_response() 41 | } 42 | } 43 | 44 | impl From for ApiError { 45 | fn from(value: ResolutionError) -> Self { 46 | match value { 47 | ResolutionError::NotFound => ApiError::NotFound, 48 | ResolutionError::MethodNotSupported => ApiError::NotImplemented, 49 | ResolutionError::InternalError { source } => ApiError::Internal { source }, 50 | ResolutionError::InvalidDid { source } => ApiError::BadRequest { 51 | message: source.to_string(), 52 | }, 53 | } 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /bin/neoprism-node/src/http/features/api/system.rs: -------------------------------------------------------------------------------- 1 | use axum::Json; 2 | use axum::extract::State; 3 | use utoipa::OpenApi; 4 | 5 | use crate::http::features::api::error::ApiErrorResponseBody; 6 | use crate::http::features::api::system::models::AppMeta; 7 | use crate::http::features::api::tags; 8 | use crate::http::urls; 9 | use crate::{AppState, VERSION}; 10 | 11 | #[derive(OpenApi)] 12 | #[openapi(paths(health, app_meta))] 13 | pub struct SystemOpenApiDoc; 14 | 15 | mod models { 16 | use serde::{Deserialize, Serialize}; 17 | use utoipa::ToSchema; 18 | 19 | use crate::RunMode; 20 | 21 | #[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] 22 | pub struct AppMeta { 23 | pub version: String, 24 | pub mode: AppMetaRunMode, 25 | } 26 | 27 | #[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] 28 | pub enum AppMetaRunMode { 29 | Indexer, 30 | Submitter, 31 | Standalone, 32 | } 33 | 34 | impl From for AppMetaRunMode { 35 | fn from(value: RunMode) -> Self { 36 | match value { 37 | RunMode::Indexer => Self::Indexer, 38 | RunMode::Submitter => Self::Submitter, 39 | RunMode::Standalone => Self::Standalone, 40 | } 41 | } 42 | } 43 | } 44 | 45 | #[utoipa::path( 46 | get, 47 | path = urls::ApiHealth::AXUM_PATH, 48 | tags = [tags::SYSTEM], 49 | responses( 50 | (status = OK, description = "Healthy", body = String, example = "Ok"), 51 | ) 52 | )] 53 | pub async fn health() -> &'static str { 54 | "Ok" 55 | } 56 | 57 | #[utoipa::path( 58 | get, 59 | path = urls::ApiAppMeta::AXUM_PATH, 60 | tags = [tags::SYSTEM], 61 | responses( 62 | (status = OK, description = "Healthy", body = AppMeta), 63 | (status = INTERNAL_SERVER_ERROR, description = "An unexpected error occurred while retrieving app metadata", body = ApiErrorResponseBody), 64 | ) 65 | )] 66 | pub async fn app_meta(State(state): State) -> Json { 67 | Json(AppMeta { 68 | version: VERSION.to_string(), 69 | mode: state.run_mode.into(), 70 | }) 71 | } 72 | -------------------------------------------------------------------------------- /tests/prism-test/build.sbt: -------------------------------------------------------------------------------- 1 | val scala3Version = "3.3.7" 2 | 3 | val V = new { 4 | val zio = "2.1.23" 5 | val zioHttp = "3.7.4" 6 | val monocle = "3.3.0" 7 | val apollo = "1.8.0" 8 | val grpcNetty = "1.77.0" 9 | } 10 | 11 | val D = new { 12 | val scalaPbDeps: Seq[ModuleID] = Seq( 13 | "com.thesamet.scalapb" %% "scalapb-runtime" % scalapb.compiler.Version.scalapbVersion % "protobuf", 14 | "com.thesamet.scalapb" %% "scalapb-runtime-grpc" % scalapb.compiler.Version.scalapbVersion 15 | ) 16 | 17 | val apolloDeps: Seq[ModuleID] = Seq( 18 | "org.hyperledger.identus" % "apollo-jvm" % V.apollo exclude ( 19 | "net.jcip", 20 | "jcip-annotations" 21 | ), // Exclude because of license 22 | "com.github.stephenc.jcip" % "jcip-annotations" % "1.0-1" % Runtime // Replace for net.jcip % jcip-annotations" 23 | ) 24 | 25 | val deps: Seq[ModuleID] = Seq( 26 | "dev.zio" %% "zio" % V.zio, 27 | "io.grpc" % "grpc-netty-shaded" % V.grpcNetty, 28 | "dev.zio" %% "zio-http" % V.zioHttp, 29 | "dev.optics" %% "monocle-core" % V.monocle, 30 | "dev.optics" %% "monocle-macro" % V.monocle 31 | ) 32 | 33 | val testDeps: Seq[ModuleID] = Seq( 34 | "dev.zio" %% "zio-test" % V.zio % Test, 35 | "dev.zio" %% "zio-test-sbt" % V.zio % Test, 36 | "dev.zio" %% "zio-test-magnolia" % V.zio % Test 37 | ) 38 | } 39 | 40 | lazy val root = project 41 | .in(file(".")) 42 | .settings( 43 | name := "prism-test", 44 | version := "0.1.0-SNAPSHOT", 45 | scalaVersion := scala3Version, 46 | scalacOptions := Seq( 47 | "-feature", 48 | "-deprecation", 49 | "-unchecked", 50 | "-Wunused:all" 51 | ), 52 | testFrameworks += new TestFramework("zio.test.sbt.ZTestFramework"), 53 | Compile / PB.targets := Seq( 54 | scalapb.gen() -> (Compile / sourceManaged).value / "scalapb" 55 | ), 56 | Compile / PB.protoSources := Seq( 57 | baseDirectory.value / ".." / ".." / "lib" / "did-prism" / "proto", 58 | (Compile / resourceDirectory).value // includes scalapb codegen package wide config 59 | ), 60 | libraryDependencies ++= D.scalaPbDeps ++ D.apolloDeps ++ D.deps ++ D.testDeps 61 | ) 62 | -------------------------------------------------------------------------------- /lib/node-storage/migrations/postgres/20250612062110_vdr_index.sql: -------------------------------------------------------------------------------- 1 | -- migrate raw_operation to not be indexed by did 2 | DROP VIEW did_stats RESTRICT; 3 | 4 | ALTER TABLE raw_operation 5 | DROP COLUMN did; 6 | 7 | ALTER TABLE raw_operation 8 | ADD CONSTRAINT raw_operation_abs_order UNIQUE (block_number, absn, osn); 9 | 10 | ALTER TABLE raw_operation 11 | ADD COLUMN is_indexed BOOLEAN; 12 | 13 | UPDATE raw_operation SET is_indexed = false 14 | WHERE is_indexed IS null; 15 | 16 | ALTER TABLE raw_operation 17 | ALTER COLUMN is_indexed SET NOT NULL; 18 | 19 | -- add indexing tables 20 | CREATE TABLE IF NOT EXISTS indexed_ssi_operation ( 21 | id UUID DEFAULT gen_random_uuid(), 22 | raw_operation_id UUID NOT NULL UNIQUE, 23 | did BYTEA NOT NULL, 24 | indexed_at TIMESTAMPTZ NOT NULL, 25 | FOREIGN KEY (raw_operation_id) REFERENCES raw_operation (id) ON DELETE CASCADE 26 | ); 27 | 28 | CREATE TABLE IF NOT EXISTS indexed_vdr_operation ( 29 | id UUID DEFAULT gen_random_uuid(), 30 | raw_operation_id UUID NOT NULL UNIQUE, 31 | operation_hash BYTEA NOT NULL, 32 | init_operation_hash BYTEA NOT NULL, 33 | prev_operation_hash BYTEA, 34 | did BYTEA NOT NULL, 35 | indexed_at TIMESTAMPTZ NOT NULL, 36 | FOREIGN KEY (raw_operation_id) REFERENCES raw_operation (id) ON DELETE CASCADE 37 | ); 38 | 39 | CREATE VIEW raw_operation_by_did AS 40 | WITH unioned AS ( 41 | SELECT 42 | did, 43 | raw_operation_id 44 | FROM indexed_ssi_operation 45 | UNION 46 | SELECT 47 | did, 48 | raw_operation_id 49 | FROM indexed_vdr_operation 50 | ) 51 | SELECT 52 | ro.id, 53 | ro.signed_operation_data, 54 | ro.slot, 55 | ro.block_number, 56 | ro.cbt, 57 | ro.absn, 58 | ro.osn, 59 | ro.is_indexed, 60 | u.did 61 | FROM unioned AS u LEFT JOIN raw_operation AS ro ON u.raw_operation_id = ro.id; 62 | 63 | CREATE VIEW did_stats AS 64 | SELECT 65 | did, 66 | count(*) AS operation_count, 67 | max(block_number) AS last_block, 68 | max(slot) AS last_slot, 69 | max(cbt) AS last_cbt, 70 | min(block_number) AS first_block, 71 | min(slot) AS first_slot, 72 | min(cbt) AS first_cbt 73 | FROM raw_operation_by_did 74 | GROUP BY 1; 75 | -------------------------------------------------------------------------------- /tools/compose_gen/stacks/blockfrost_neoprism_demo.py: -------------------------------------------------------------------------------- 1 | from pydantic import BaseModel 2 | 3 | from ..models import ComposeConfig 4 | from ..services import caddy, db, neoprism, ryo 5 | 6 | 7 | class Options(BaseModel): 8 | dbsync_url: str = "${DBSYNC_URL}" 9 | dbsync_host: str = "${DBSYNC_HOST}" 10 | dbsync_port: str = "${DBSYNC_PORT:-5432}" 11 | dbsync_db: str = "${DBSYNC_DB}" 12 | dbsync_user: str = "${DBSYNC_USER}" 13 | dbsync_password: str = "${DBSYNC_PASSWORD}" 14 | network: str = "${NETWORK:-mainnet}" 15 | 16 | 17 | def mk_stack(options: Options | None = None) -> ComposeConfig: 18 | options = options or Options() 19 | services = { 20 | "neoprism": neoprism.mk_service( 21 | neoprism.Options( 22 | network=options.network, 23 | storage_backend=neoprism.PostgresStorageBackend(host="db-neoprism"), 24 | host_port=8080, 25 | command=neoprism.IndexerCommand( 26 | dlt_source=neoprism.DbSyncDltSource( 27 | url=options.dbsync_url, poll_interval=10 28 | ), 29 | ), 30 | ), 31 | ), 32 | "db-neoprism": db.mk_service(db.Options()), 33 | "bf-ryo": ryo.mk_service( 34 | ryo.Options( 35 | dbsync_db=ryo.DbSyncDbArgs( 36 | host=options.dbsync_host, 37 | port=options.dbsync_port, 38 | db_name=options.dbsync_db, 39 | username=options.dbsync_user, 40 | password=options.dbsync_password, 41 | ), 42 | network=options.network, 43 | testnet_volume=None, 44 | config_file="./ryo.yaml", 45 | bootstrap_testnet_host=None, 46 | wait_for_db_sync=False, 47 | genesis_data_folder=None, 48 | ) 49 | ), 50 | "caddy": caddy.mk_service( 51 | caddy.Options( 52 | host_port=3000, target_port=3000, caddyfile="./Caddyfile-blockfrost" 53 | ) 54 | ), 55 | } 56 | 57 | return ComposeConfig(services=services, volumes={"node-testnet": {}}) 58 | -------------------------------------------------------------------------------- /lib/did-prism-ledger/src/in_memory/source.rs: -------------------------------------------------------------------------------- 1 | use identus_did_prism::dlt::{DltCursor, PublishedPrismObject}; 2 | use identus_did_prism_indexer::DltSource; 3 | use tokio::sync::{mpsc, watch}; 4 | 5 | pub struct InMemoryDltSource { 6 | block_rx: mpsc::Receiver, 7 | sync_cursor_tx: watch::Sender>, 8 | } 9 | 10 | impl InMemoryDltSource { 11 | pub fn new(block_rx: mpsc::Receiver) -> Self { 12 | let (sync_cursor_tx, _) = watch::channel::>(None); 13 | Self { 14 | block_rx, 15 | sync_cursor_tx, 16 | } 17 | } 18 | } 19 | 20 | impl DltSource for InMemoryDltSource { 21 | fn sync_cursor(&self) -> watch::Receiver> { 22 | self.sync_cursor_tx.subscribe() 23 | } 24 | 25 | fn into_stream(self) -> Result, String> { 26 | let (event_tx, event_rx) = mpsc::channel::(1024); 27 | let sync_cursor_tx = self.sync_cursor_tx; 28 | 29 | tokio::spawn(async move { 30 | let mut block_rx = self.block_rx; 31 | 32 | while let Some(published_object) = block_rx.recv().await { 33 | // Update cursor based on the block metadata 34 | let cursor = DltCursor { 35 | slot: published_object.block_metadata.slot_number.into(), 36 | // Generate synthetic block_hash from block_number for in-memory ledger 37 | block_hash: published_object 38 | .block_metadata 39 | .block_number 40 | .inner() 41 | .to_le_bytes() 42 | .to_vec(), 43 | cbt: Some(published_object.block_metadata.cbt), 44 | }; 45 | let _ = sync_cursor_tx.send(Some(cursor)); 46 | 47 | // Send the published object downstream 48 | if event_tx.send(published_object).await.is_err() { 49 | tracing::warn!("InMemoryDltSource: event receiver closed"); 50 | break; 51 | } 52 | } 53 | }); 54 | 55 | Ok(event_rx) 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release 2 | 3 | on: 4 | workflow_dispatch: 5 | inputs: 6 | tag: 7 | description: Tag to release (e.g. 0.1.0 without the "v") 8 | required: true 9 | 10 | jobs: 11 | publish: 12 | name: Release neoprism artifacts 13 | runs-on: ubuntu-latest 14 | env: 15 | DOCKERHUB_ORG: ${{ vars.DOCKERHUB_ORG }} 16 | RELEASE_VERSION: ${{ github.event.inputs.tag }} 17 | 18 | steps: 19 | - name: Checkout code 20 | uses: actions/checkout@v4 21 | with: 22 | ref: v${{ github.event.inputs.tag }} 23 | 24 | - name: Install Nix 25 | uses: cachix/install-nix-action@v31 26 | with: 27 | extra_nix_config: | 28 | experimental-features = nix-command flakes 29 | 30 | - name: Set up Docker 31 | uses: docker/setup-docker-action@v4 32 | 33 | - name: Login to Container Registry 34 | uses: docker/login-action@v3 35 | with: 36 | username: ${{ vars.DOCKERHUB_USERNAME }} 37 | password: ${{ secrets.DOCKERHUB_TOKEN }} 38 | 39 | - name: Build image (amd64) 40 | run: nix build .#neoprism-docker-linux-amd64 -L -o result-amd64 41 | 42 | - name: Build image (arm64) 43 | run: nix build .#neoprism-docker-linux-arm64 -L -o result-arm64 44 | 45 | - name: Load and push docker images 46 | run: | 47 | docker load < result-amd64 48 | docker load < result-arm64 49 | docker tag "identus-neoprism:$RELEASE_VERSION-amd64" "$DOCKERHUB_ORG/identus-neoprism:$RELEASE_VERSION-amd64" 50 | docker tag "identus-neoprism:$RELEASE_VERSION-arm64" "$DOCKERHUB_ORG/identus-neoprism:$RELEASE_VERSION-arm64" 51 | docker images 52 | docker push "$DOCKERHUB_ORG/identus-neoprism:$RELEASE_VERSION-amd64" 53 | docker push "$DOCKERHUB_ORG/identus-neoprism:$RELEASE_VERSION-arm64" 54 | 55 | # create multi-arch image 56 | docker manifest create "$DOCKERHUB_ORG/identus-neoprism:$RELEASE_VERSION" \ 57 | "$DOCKERHUB_ORG/identus-neoprism:$RELEASE_VERSION-amd64" \ 58 | "$DOCKERHUB_ORG/identus-neoprism:$RELEASE_VERSION-arm64" 59 | docker manifest push "$DOCKERHUB_ORG/identus-neoprism:$RELEASE_VERSION" 60 | -------------------------------------------------------------------------------- /lib/did-prism/proto/prism.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package proto; 4 | 5 | import "prism-version.proto"; 6 | import "prism-ssi.proto"; 7 | import "prism-storage.proto"; 8 | 9 | /** 10 | * Wraps an PrismBlock and its metadata. 11 | */ 12 | message PrismObject { 13 | reserved 1, 2, 3; 14 | reserved "block_hash"; 15 | reserved "block_operation_count"; // Number of operations in the block. 16 | reserved "block_byte_length"; // Byte length of the block. 17 | 18 | PrismBlock block_content = 4; // The block content. 19 | } 20 | 21 | /** 22 | * Represent a block that holds evetns/operations. 23 | */ 24 | message PrismBlock { 25 | reserved 1; // Represents the version of the block. Deprecated 26 | repeated SignedPrismOperation operations = 2; // A signed operation, necessary to post anything on the blockchain. 27 | } 28 | 29 | // A signed operation, necessary to post anything on the blockchain. 30 | message SignedPrismOperation { 31 | string signed_with = 1; // The key ID used to sign the operation, it must belong to the DID that signs the operation. 32 | bytes signature = 2; // The actual signature. 33 | PrismOperation operation = 3; // The operation that was signed. 34 | } 35 | 36 | 37 | // The possible events/operations affecting the blockchain. 38 | message PrismOperation { 39 | // https://github.com/input-output-hk/atala-prism-sdk/blob/master/protosLib/src/main/proto/node_models.proto 40 | reserved 3, 4; // fields used by an extension of the protocol. Not relevant for the DID method 41 | // The actual operation. 42 | oneof operation { 43 | // Used to create a public DID. 44 | ProtoCreateDID create_did = 1; 45 | 46 | // Used to update an existing public DID. 47 | ProtoUpdateDID update_did = 2; 48 | 49 | // Used to announce new protocol update 50 | ProtoProtocolVersionUpdate protocol_version_update = 5; 51 | 52 | // Used to deactivate DID 53 | ProtoDeactivateDID deactivate_did = 6; 54 | 55 | // Used to create a public storage entry. 56 | ProtoCreateStorageEntry create_storage_entry = 7; 57 | 58 | // Used to update a storage entry. 59 | ProtoUpdateStorageEntry update_storage_entry = 8; 60 | 61 | // Used to deactivate a storage entry. 62 | ProtoDeactivateStorageEntry deactivate_storage_entry = 9; 63 | }; 64 | } 65 | -------------------------------------------------------------------------------- /flake.nix: -------------------------------------------------------------------------------- 1 | { 2 | description = "A rust implementation of PRISM node"; 3 | 4 | nixConfig = { 5 | extra-substituters = [ "https://cache.iog.io" ]; 6 | extra-trusted-public-keys = [ "hydra.iohk.io:f/Ea+s+dFdN+3Y/G+FDgSq+a5NEWhJGzdjvKNGv0/EQ=" ]; 7 | }; 8 | 9 | inputs = { 10 | nixpkgs.url = "github:NixOS/nixpkgs/nixpkgs-unstable"; 11 | rust-overlay.url = "github:oxalica/rust-overlay"; 12 | sbt = { 13 | url = "github:zaninime/sbt-derivation"; 14 | inputs.nixpkgs.follows = "nixpkgs"; 15 | }; 16 | flake-utils.url = "github:numtide/flake-utils"; 17 | cardano-node.url = "github:IntersectMBO/cardano-node/10.5.1"; 18 | cardano-db-sync.url = "github:IntersectMBO/cardano-db-sync/13.6.0.5"; 19 | cardano-wallet.url = "github:cardano-foundation/cardano-wallet/v2025-03-31"; 20 | }; 21 | 22 | outputs = 23 | { 24 | self, 25 | nixpkgs, 26 | rust-overlay, 27 | sbt, 28 | flake-utils, 29 | cardano-node, 30 | cardano-db-sync, 31 | cardano-wallet, 32 | ... 33 | }: 34 | flake-utils.lib.eachSystem [ "x86_64-linux" "aarch64-darwin" ] ( 35 | system: 36 | let 37 | pkgs = import nixpkgs { 38 | inherit system; 39 | config.unfree = true; 40 | overlays = [ 41 | (import rust-overlay) 42 | (_: prev: { 43 | mkSbtDerivation = sbt.mkSbtDerivation.${system}; 44 | rustTools = prev.callPackage ./nix/rustTools.nix { inherit rust-overlay; }; 45 | pythonTools = prev.callPackage ./nix/pythonTools.nix { }; 46 | inherit (cardano-node.packages.${system}) 47 | cardano-cli 48 | cardano-node 49 | cardano-testnet 50 | cardano-submit-api 51 | ; 52 | inherit (cardano-wallet.packages.${system}) cardano-wallet; 53 | cardano-db-sync = cardano-db-sync.packages.${system}."cardano-db-sync:exe:cardano-db-sync"; 54 | }) 55 | ]; 56 | }; 57 | in 58 | { 59 | checks = import ./nix/checks/default.nix { inherit pkgs self; }; 60 | devShells = import ./nix/devShells/default.nix { inherit pkgs self; }; 61 | packages = import ./nix/packages/default.nix { inherit pkgs; }; 62 | } 63 | ); 64 | } 65 | -------------------------------------------------------------------------------- /nix/packages/cardano-testnet-docker.nix: -------------------------------------------------------------------------------- 1 | { 2 | bash, 3 | cardano-cli, 4 | cardano-node, 5 | cardano-testnet, 6 | coreutils, 7 | dockerTools, 8 | gawk, 9 | gnugrep, 10 | hurl, 11 | jq, 12 | tagSuffix ? "", 13 | writeShellApplication, 14 | }: 15 | 16 | let 17 | scripts = { 18 | initTestnet = writeShellApplication { 19 | name = "initTestnet"; 20 | text = '' 21 | cardano-testnet cardano \ 22 | --conway-era \ 23 | --testnet-magic "$CARDANO_NODE_NETWORK_ID" \ 24 | --active-slots-coeff 0.1 \ 25 | --epoch-length 60 \ 26 | --output-dir testnet 27 | ''; 28 | }; 29 | transactGenesis = writeShellApplication { 30 | name = "transactGenesis"; 31 | text = '' 32 | mkdir -p transactions 33 | 34 | echo "Getting utxos" 35 | TX_IN="$(cardano-cli conway query utxo --address "$(cat ./testnet/utxo-keys/utxo1/utxo.addr)" | jq -r '. | keys[]')" 36 | 37 | echo "Drafting transaction" 38 | cardano-cli conway transaction build \ 39 | --tx-in "$TX_IN" \ 40 | --tx-out "$GENESIS_PAYMENT_ADDR+100000000000" \ 41 | --change-address "$GENESIS_PAYMENT_ADDR" \ 42 | --out-file transactions/tx.raw 43 | 44 | echo "Signing transaction" 45 | cardano-cli conway transaction sign \ 46 | --tx-body-file transactions/tx.raw \ 47 | --signing-key-file ./testnet/utxo-keys/utxo1/utxo.skey \ 48 | --out-file transactions/tx.signed 49 | 50 | echo "Submitting transaction" 51 | cardano-cli conway transaction submit --tx-file transactions/tx.signed 52 | ''; 53 | }; 54 | }; 55 | basePackages = [ 56 | bash 57 | coreutils 58 | gawk 59 | gnugrep 60 | hurl 61 | jq 62 | ]; 63 | cardanoPackages = [ 64 | cardano-cli 65 | cardano-node 66 | cardano-testnet 67 | ]; 68 | in 69 | dockerTools.buildLayeredImage { 70 | name = "cardano-testnet"; 71 | tag = "latest${tagSuffix}"; 72 | contents = basePackages ++ cardanoPackages ++ (builtins.attrValues scripts); 73 | config = { 74 | Env = [ 75 | "CARDANO_CLI=${cardano-cli}/bin/cardano-cli" 76 | "CARDANO_NODE=${cardano-node}/bin/cardano-node" 77 | ]; 78 | Entrypoint = [ ]; 79 | Cmd = [ ]; 80 | WorkingDir = "/node"; 81 | }; 82 | } 83 | -------------------------------------------------------------------------------- /lib/did-prism/tests/ssi_operation.rs: -------------------------------------------------------------------------------- 1 | use identus_apollo::crypto::secp256k1::Secp256k1PrivateKey; 2 | use identus_did_prism::did::operation::KeyUsage; 3 | use identus_did_prism::proto; 4 | use identus_did_prism::protocol::resolver; 5 | 6 | mod test_utils; 7 | 8 | #[test] 9 | fn create_did_only_master_key() { 10 | let (create_did_op, _, _) = test_utils::new_create_did_operation(None); 11 | 12 | let operations = test_utils::populate_metadata(vec![create_did_op]); 13 | let state = resolver::resolve_published(operations).0.unwrap(); 14 | 15 | let master_key = state 16 | .public_keys 17 | .iter() 18 | .find(|pk| pk.id.as_str() == "master-0") 19 | .unwrap(); 20 | 21 | assert_eq!(state.services.len(), 0); 22 | assert_eq!(state.context.len(), 0); 23 | assert_eq!(state.storage.len(), 0); 24 | assert_eq!(state.public_keys.len(), 1); 25 | assert_eq!(master_key.data.usage(), KeyUsage::MasterKey); 26 | } 27 | 28 | #[test] 29 | fn create_did_with_non_master_key() { 30 | let vdr_sk = Secp256k1PrivateKey::from_slice(&[2; 32]).unwrap(); 31 | let auth_sk = Secp256k1PrivateKey::from_slice(&[3; 32]).unwrap(); 32 | let options = test_utils::CreateDidOptions { 33 | public_keys: Some(vec![ 34 | test_utils::new_public_key("vdr-0", proto::prism_ssi::KeyUsage::VDR_KEY, &vdr_sk), 35 | test_utils::new_public_key("auth-0", proto::prism_ssi::KeyUsage::AUTHENTICATION_KEY, &auth_sk), 36 | ]), 37 | ..Default::default() 38 | }; 39 | let (create_did_op, _, _) = test_utils::new_create_did_operation(Some(options)); 40 | 41 | let operations = test_utils::populate_metadata(vec![create_did_op]); 42 | let state = resolver::resolve_published(operations).0.unwrap(); 43 | 44 | let vdr_key = state.public_keys.iter().find(|pk| pk.id.as_str() == "vdr-0").unwrap(); 45 | let auth_key = state.public_keys.iter().find(|pk| pk.id.as_str() == "auth-0").unwrap(); 46 | let master_key = state 47 | .public_keys 48 | .iter() 49 | .find(|pk| pk.id.as_str() == "master-0") 50 | .unwrap(); 51 | 52 | assert_eq!(state.public_keys.len(), 3); 53 | assert_eq!(master_key.data.usage(), KeyUsage::MasterKey); 54 | assert_eq!(vdr_key.data.usage(), KeyUsage::VdrKey); 55 | assert_eq!(auth_key.data.usage(), KeyUsage::AuthenticationKey); 56 | } 57 | -------------------------------------------------------------------------------- /bin/neoprism-node/src/http/features/api/submitter.rs: -------------------------------------------------------------------------------- 1 | use axum::Json; 2 | use axum::extract::State; 3 | use utoipa::OpenApi; 4 | 5 | use crate::SubmitterState; 6 | use crate::http::features::api::error::{ApiError, ApiErrorResponseBody}; 7 | use crate::http::features::api::submitter::models::{ 8 | SignedOperationSubmissionRequest, SignedOperationSubmissionResponse, 9 | }; 10 | use crate::http::features::api::tags; 11 | use crate::http::urls; 12 | 13 | #[derive(OpenApi)] 14 | #[openapi(paths(submit_signed_operations))] 15 | pub struct SubmitterOpenApiDoc; 16 | 17 | mod models { 18 | use identus_did_prism::did::operation::SignedPrismOperationHexStr; 19 | use identus_did_prism::dlt::TxId; 20 | use serde::{Deserialize, Serialize}; 21 | use utoipa::ToSchema; 22 | 23 | #[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] 24 | pub struct SignedOperationSubmissionRequest { 25 | pub signed_operations: Vec, 26 | } 27 | 28 | #[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] 29 | pub struct SignedOperationSubmissionResponse { 30 | pub tx_id: TxId, 31 | } 32 | } 33 | 34 | #[utoipa::path( 35 | post, 36 | path = urls::ApiSignedOpSubmissions::AXUM_PATH, 37 | tags = [tags::OP_SUBMIT], 38 | request_body = SignedOperationSubmissionRequest, 39 | responses( 40 | (status = OK, description = "Operations submitted successfully", body = SignedOperationSubmissionResponse), 41 | (status = BAD_REQUEST, description = "Malformed request or invalid operations", body = ApiErrorResponseBody, content_type = "application/json"), 42 | (status = INTERNAL_SERVER_ERROR, description = "An unexpected error occurred during submission", body = ApiErrorResponseBody, content_type = "application/json"), 43 | ) 44 | )] 45 | pub async fn submit_signed_operations( 46 | State(state): State, 47 | Json(req): Json, 48 | ) -> Result, ApiError> { 49 | let ops = req.signed_operations.into_iter().map(|i| i.into()).collect(); 50 | let result = state.dlt_sink.publish_operations(ops).await; 51 | match result { 52 | Ok(tx_id) => Ok(Json(SignedOperationSubmissionResponse { tx_id })), 53 | Err(e) => Err(ApiError::Internal { 54 | source: anyhow::anyhow!(e), 55 | }), 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /tools/compose_gen/services/cardano_node.py: -------------------------------------------------------------------------------- 1 | from pydantic import BaseModel 2 | 3 | from ..models import Healthcheck, Service, ServiceDependency 4 | 5 | IMAGE = "patextreme/cardano-testnet:20251111-145358" 6 | 7 | 8 | class NodeOptions(BaseModel): 9 | network_magic: int 10 | testnet_volume: str 11 | 12 | 13 | class BootstrapOptions(BaseModel): 14 | network_magic: int 15 | testnet_volume: str 16 | cardano_node_host: str 17 | wallet_base_url: str 18 | wallet_passphrase: str 19 | wallet_payment_address: str 20 | init_wallet_hurl_file: str 21 | init_did_hurl_file: str 22 | 23 | 24 | def mk_node_service(options: NodeOptions) -> Service: 25 | return Service( 26 | image=IMAGE, 27 | restart=None, 28 | command=["initTestnet"], 29 | volumes=[f"{options.testnet_volume}:/node/testnet"], 30 | environment={ 31 | "CARDANO_NODE_SOCKET_PATH": "/node/testnet/socket/node1/sock", 32 | "CARDANO_NODE_NETWORK_ID": str(options.network_magic), 33 | }, 34 | healthcheck=Healthcheck(test=["CMD-SHELL", "cardano-cli query tip"]), 35 | ) 36 | 37 | 38 | def mk_bootstrap_service(options: BootstrapOptions) -> Service: 39 | return Service( 40 | image=IMAGE, 41 | restart=None, 42 | volumes=[ 43 | f"{options.testnet_volume}:/node/testnet", 44 | f"{options.init_wallet_hurl_file}:/node/init-wallet.hurl", 45 | f"{options.init_did_hurl_file}:/node/init-did.hurl", 46 | ], 47 | command=[ 48 | "bash", 49 | "-c", 50 | """transactGenesis 51 | hurl ./init-wallet.hurl 52 | hurl ./init-did.hurl 53 | 54 | # blockfrost-ryo expects a different location 55 | cp testnet/conway-genesis.json testnet/genesis.json 56 | cp testnet/byron-genesis.json testnet/byron_genesis.json 57 | """, 58 | ], 59 | environment={ 60 | "HURL_WALLET_BASE_URL": options.wallet_base_url, 61 | "HURL_WALLET_PASSPHRASE": options.wallet_passphrase, 62 | "GENESIS_PAYMENT_ADDR": options.wallet_payment_address, 63 | "CARDANO_NODE_SOCKET_PATH": "/node/testnet/socket/node1/sock", 64 | "CARDANO_NODE_NETWORK_ID": str(options.network_magic), 65 | }, 66 | depends_on={ 67 | options.cardano_node_host: ServiceDependency(condition="service_healthy") 68 | }, 69 | ) 70 | -------------------------------------------------------------------------------- /tools/compose_gen/services/ryo.py: -------------------------------------------------------------------------------- 1 | from pydantic import BaseModel 2 | 3 | from ..models import Service, ServiceDependency 4 | 5 | IMAGE = "blockfrost/backend-ryo:v4.3.0" 6 | 7 | 8 | class DbSyncDbArgs(BaseModel): 9 | host: str 10 | port: str 11 | db_name: str 12 | username: str 13 | password: str 14 | 15 | 16 | class Options(BaseModel): 17 | host_port: int | None = None 18 | dbsync_db: DbSyncDbArgs 19 | network: str = "mainnet" 20 | testnet_volume: str | None = None 21 | config_file: str 22 | bootstrap_testnet_host: str | None = None 23 | wait_for_db_sync: bool = True 24 | genesis_data_folder: str | None = "/node/testnet" 25 | 26 | 27 | def mk_service(options: Options) -> Service: 28 | ports = [f"{options.host_port}:3000"] if options.host_port else None 29 | 30 | # Build volumes 31 | volumes = [f"{options.config_file}:/app/config/development.yaml"] 32 | if options.testnet_volume: 33 | volumes.append(f"{options.testnet_volume}:/node/testnet") 34 | 35 | # Build environment 36 | environment = { 37 | "BLOCKFROST_CONFIG_DBSYNC_HOST": options.dbsync_db.host, 38 | "BLOCKFROST_CONFIG_DBSYNC_PORT": options.dbsync_db.port, 39 | "BLOCKFROST_CONFIG_DBSYNC_DATABASE": options.dbsync_db.db_name, 40 | "BLOCKFROST_CONFIG_DBSYNC_USER": options.dbsync_db.username, 41 | "BLOCKFROST_CONFIG_DBSYNC_PASSWORD": options.dbsync_db.password, 42 | "BLOCKFROST_CONFIG_NETWORK": options.network, 43 | "BLOCKFROST_MITHRIL_ENABLED": "false", 44 | "NODE_ENV": "development", 45 | } 46 | 47 | if options.genesis_data_folder: 48 | environment["BLOCKFROST_CONFIG_GENESIS_DATA_FOLDER"] = ( 49 | options.genesis_data_folder 50 | ) 51 | 52 | # Build depends_on 53 | depends_on: dict[str, ServiceDependency] = {} 54 | 55 | if options.wait_for_db_sync: 56 | depends_on[options.dbsync_db.host] = ServiceDependency( 57 | condition="service_healthy" 58 | ) 59 | 60 | if options.bootstrap_testnet_host: 61 | depends_on[options.bootstrap_testnet_host] = ServiceDependency( 62 | condition="service_completed_successfully" 63 | ) 64 | 65 | return Service( 66 | image=IMAGE, 67 | ports=ports, 68 | environment=environment, 69 | volumes=volumes, 70 | depends_on=depends_on if depends_on else None, 71 | ) 72 | -------------------------------------------------------------------------------- /lib/did-prism/src/protocol/resolver.rs: -------------------------------------------------------------------------------- 1 | use std::collections::VecDeque; 2 | 3 | use super::{OperationProcessingContext, ProcessError, Published, init_published_context}; 4 | use crate::did::DidState; 5 | use crate::dlt::OperationMetadata; 6 | use crate::prelude::*; 7 | use crate::protocol::init_unpublished_context; 8 | 9 | type OperationList = VecDeque<(OperationMetadata, SignedPrismOperation)>; 10 | pub type ResolutionDebug = Vec<(OperationMetadata, SignedPrismOperation, Option)>; 11 | 12 | pub fn resolve_unpublished(operation: PrismOperation) -> Result { 13 | tracing::debug!("resolving unpublished DID data"); 14 | init_unpublished_context(operation).map(|ctx| ctx.finalize()) 15 | } 16 | 17 | pub fn resolve_published( 18 | mut operations: Vec<(OperationMetadata, SignedPrismOperation)>, 19 | ) -> (Option, ResolutionDebug) { 20 | tracing::debug!("resolving published DID data from {} operations", operations.len()); 21 | operations.sort_by(|a, b| OperationMetadata::compare_time_asc(&a.0, &b.0)); 22 | let mut operations: OperationList = operations.into(); 23 | 24 | // Initialize first valid CreateOperation 25 | let (state_ctx, mut debug) = init_state_ops(&mut operations); 26 | let Some(mut state_ctx) = state_ctx else { 27 | return (None, debug); 28 | }; 29 | 30 | // Iterate all remaining operations and apply new state 31 | while let Some((metadata, operation)) = operations.pop_front() { 32 | let (new_ctx, error) = state_ctx.process(operation.clone(), metadata.clone()); 33 | state_ctx = new_ctx; 34 | debug.push((metadata, operation, error)); 35 | } 36 | 37 | (Some(state_ctx.finalize()), debug) 38 | } 39 | 40 | fn init_state_ops(operations: &mut OperationList) -> (Option>, ResolutionDebug) { 41 | let mut debug = Vec::with_capacity(operations.len()); 42 | while let Some((metadata, operation)) = operations.pop_front() { 43 | let result = init_published_context(operation.clone(), metadata.clone()); 44 | match result { 45 | Ok(state_ctx) => { 46 | debug.push((metadata, operation, None)); 47 | return (Some(state_ctx), debug); 48 | } 49 | Err(e) => { 50 | tracing::debug!("unable to initialize DIDState from operation: {:?}", e); 51 | debug.push((metadata, operation, Some(e))); 52 | } 53 | } 54 | } 55 | (None, debug) 56 | } 57 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | resolver = "3" 3 | members = [ 4 | "bin/neoprism-node", 5 | "lib/apollo", 6 | "lib/did-core", 7 | 8 | "lib/did-prism", 9 | "lib/did-prism-indexer", 10 | "lib/did-prism-ledger", 11 | "lib/did-prism-submitter", 12 | "lib/did-resolver-http", 13 | "lib/node-storage", 14 | ] 15 | 16 | [workspace.package] 17 | version = "0.8.0" 18 | edition = "2024" 19 | license = "Apache-2.0" 20 | 21 | [workspace.dependencies] 22 | # internal 23 | identus-apollo = { path = "lib/apollo" } 24 | identus-did-core = { path = "lib/did-core" } 25 | 26 | identus-did-prism = { path = "lib/did-prism" } 27 | identus-did-prism-indexer = { path = "lib/did-prism-indexer" } 28 | identus-did-prism-ledger = { path = "lib/did-prism-ledger" } 29 | identus-did-prism-submitter = { path = "lib/did-prism-submitter" } 30 | identus-did-resolver-http = { path = "lib/did-resolver-http" } 31 | node-storage = { path = "lib/node-storage" } 32 | # general 33 | anyhow = { version = "1" } 34 | async-trait = { version = "0.1" } 35 | axum = { version = "0.8" } 36 | chrono = { version = "0.4" } 37 | clap = { version = "4" } 38 | derive_more = { version = "2" } 39 | enum_dispatch = { version = "0.3" } 40 | graphql_client = { version = "0.14" } 41 | im-rc = { version = "15" } 42 | lazybe = { version = "0.2" } 43 | maud = { version = "0.27" } 44 | regex = { version = "1" } 45 | reqwest = { version = "0.12", default-features = false } 46 | serde = { version = "1" } 47 | serde_json = { version = "1" } 48 | strum = { version = "0.27" } 49 | tokio = { version = "1" } 50 | tower = { version = "0.5" } 51 | tower-http = { version = "0.6" } 52 | tracing = { version = "0.1" } 53 | tracing-subscriber = { version = "0.3" } 54 | ts-rs = { version = "11.0" } 55 | uriparse = { version = "0.6" } 56 | utoipa = { version = "5" } 57 | uuid = { version = "1" } 58 | # db 59 | sqlx = { version = "0.8" } 60 | sea-query = { version = "0.32" } 61 | dirs = { version = "5" } 62 | tempfile = { version = "3" } 63 | # cardano 64 | oura = { git = "https://github.com/patextreme/oura.git", rev = "3546c03dac9fac8b5173332c1fe5122882e7351e" } 65 | pallas-primitives = { version = "0.30" } 66 | # proto 67 | protobuf = { version = "3" } 68 | protobuf-codegen = { version = "3" } 69 | -------------------------------------------------------------------------------- /lib/node-storage/migrations/sqlite/20250424101829_migrate_from_seaorm.sql: -------------------------------------------------------------------------------- 1 | PRAGMA foreign_keys = ON; 2 | 3 | DROP VIEW IF EXISTS did_stats; 4 | DROP VIEW IF EXISTS raw_operation_by_did; 5 | DROP TABLE IF EXISTS indexed_vdr_operation; 6 | DROP TABLE IF EXISTS indexed_ssi_operation; 7 | DROP TABLE IF EXISTS raw_operation; 8 | DROP TABLE IF EXISTS dlt_cursor; 9 | 10 | CREATE TABLE IF NOT EXISTS dlt_cursor ( 11 | id BLOB PRIMARY KEY DEFAULT (randomblob(16)), 12 | slot INTEGER NOT NULL, 13 | block_hash BLOB NOT NULL, 14 | UNIQUE(slot, block_hash) 15 | ); 16 | 17 | CREATE TABLE IF NOT EXISTS raw_operation ( 18 | id BLOB PRIMARY KEY DEFAULT (randomblob(16)), 19 | signed_operation_data BLOB NOT NULL, 20 | slot INTEGER NOT NULL, 21 | block_number INTEGER NOT NULL, 22 | cbt TEXT NOT NULL, 23 | absn INTEGER NOT NULL, 24 | osn INTEGER NOT NULL, 25 | is_indexed INTEGER NOT NULL DEFAULT 0, 26 | UNIQUE(block_number, absn, osn) 27 | ); 28 | 29 | CREATE TABLE IF NOT EXISTS indexed_ssi_operation ( 30 | id BLOB PRIMARY KEY DEFAULT (randomblob(16)), 31 | raw_operation_id BLOB NOT NULL UNIQUE, 32 | did BLOB NOT NULL, 33 | indexed_at TEXT NOT NULL, 34 | FOREIGN KEY (raw_operation_id) REFERENCES raw_operation(id) ON DELETE CASCADE 35 | ); 36 | 37 | CREATE TABLE IF NOT EXISTS indexed_vdr_operation ( 38 | id BLOB PRIMARY KEY DEFAULT (randomblob(16)), 39 | raw_operation_id BLOB NOT NULL UNIQUE, 40 | operation_hash BLOB NOT NULL, 41 | init_operation_hash BLOB NOT NULL, 42 | prev_operation_hash BLOB, 43 | did BLOB NOT NULL, 44 | indexed_at TEXT NOT NULL, 45 | FOREIGN KEY (raw_operation_id) REFERENCES raw_operation(id) ON DELETE CASCADE 46 | ); 47 | 48 | CREATE VIEW raw_operation_by_did AS 49 | WITH unioned AS ( 50 | SELECT did, raw_operation_id FROM indexed_ssi_operation 51 | UNION 52 | SELECT did, raw_operation_id FROM indexed_vdr_operation 53 | ) 54 | SELECT 55 | ro.id, 56 | ro.signed_operation_data, 57 | ro.slot, 58 | ro.block_number, 59 | ro.cbt, 60 | ro.absn, 61 | ro.osn, 62 | ro.is_indexed, 63 | u.did 64 | FROM unioned AS u 65 | LEFT JOIN raw_operation AS ro ON u.raw_operation_id = ro.id; 66 | 67 | CREATE VIEW did_stats AS 68 | SELECT 69 | did, 70 | COUNT(*) AS operation_count, 71 | MAX(block_number) AS last_block, 72 | MAX(slot) AS last_slot, 73 | MAX(cbt) AS last_cbt, 74 | MIN(block_number) AS first_block, 75 | MIN(slot) AS first_slot, 76 | MIN(cbt) AS first_cbt 77 | FROM raw_operation_by_did 78 | GROUP BY 1; 79 | -------------------------------------------------------------------------------- /lib/apollo/src/hex.rs: -------------------------------------------------------------------------------- 1 | use std::str::FromStr; 2 | 3 | #[derive(Debug, derive_more::Display, derive_more::Error)] 4 | #[display("unable to hex decode '{value}' to type {type_name}")] 5 | pub struct Error { 6 | source: hex::FromHexError, 7 | type_name: &'static str, 8 | value: String, 9 | } 10 | 11 | /// # Example 12 | /// ``` 13 | /// use identus_apollo::hex::HexStr; 14 | /// 15 | /// let b = b"hello world"; 16 | /// let hexstr = HexStr::from(b); 17 | /// assert!(hexstr.to_string() == "68656c6c6f20776f726c64"); 18 | /// ``` 19 | #[derive(Debug, Clone, PartialEq, Eq, Hash, derive_more::Display, derive_more::Into, derive_more::AsRef)] 20 | #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] 21 | #[cfg_attr(feature = "openapi", derive(utoipa::ToSchema))] 22 | pub struct HexStr(#[cfg_attr(feature = "serde", serde(deserialize_with = "serde_impl::deserialize_hex"))] String); 23 | 24 | impl HexStr { 25 | pub fn to_bytes(&self) -> Vec { 26 | hex::decode(self.as_ref()) 27 | .unwrap_or_else(|_| unreachable!("{} should be a valid hex string", std::any::type_name::())) 28 | } 29 | } 30 | 31 | impl> From for HexStr { 32 | fn from(value: B) -> Self { 33 | Self(hex::encode(value.as_ref())) 34 | } 35 | } 36 | 37 | /// # Example 38 | /// ``` 39 | /// use std::str::FromStr; 40 | /// 41 | /// use identus_apollo::hex::{Error, HexStr}; 42 | /// 43 | /// let hexstr = HexStr::from_str("68656c6c6f20776f726c64").unwrap(); 44 | /// assert_eq!(hexstr, HexStr::from(b"hello world")); 45 | /// 46 | /// let hexstr = HexStr::from_str("invalid"); 47 | /// assert!(hexstr.is_err()); 48 | /// ``` 49 | impl FromStr for HexStr { 50 | type Err = Error; 51 | 52 | fn from_str(s: &str) -> Result { 53 | let bytes = hex::decode(s).map_err(|e| Error { 54 | source: e, 55 | type_name: std::any::type_name::(), 56 | value: s.to_string(), 57 | })?; 58 | Ok(bytes.as_slice().into()) 59 | } 60 | } 61 | 62 | #[cfg(feature = "serde")] 63 | mod serde_impl { 64 | use std::str::FromStr; 65 | 66 | use serde::{Deserialize, Deserializer}; 67 | 68 | use super::HexStr; 69 | 70 | pub fn deserialize_hex<'de, D>(deserializer: D) -> Result 71 | where 72 | D: Deserializer<'de>, 73 | { 74 | let raw = String::deserialize(deserializer)?; 75 | HexStr::from_str(&raw) 76 | .map(|i| i.to_string()) 77 | .map_err(|e| serde::de::Error::custom(e.to_string())) 78 | } 79 | } 80 | -------------------------------------------------------------------------------- /tools/compose_gen/services/prism_node.py: -------------------------------------------------------------------------------- 1 | from pydantic import BaseModel 2 | 3 | from ..models import Service, ServiceDependency 4 | 5 | IMAGE = "ghcr.io/input-output-hk/prism-node:2.6.1" 6 | 7 | 8 | class Options(BaseModel): 9 | image_override: str | None = None 10 | node_db_host: str 11 | db_sync_db_host: str 12 | bootstrap_testnet_host: str 13 | wallet_api_host: str 14 | wallet_api_port: int = 8090 15 | wallet_passphrase: str 16 | wallet_id: str 17 | wallet_payment_address: str 18 | host_port: int | None = None 19 | confirmation_blocks: int = 112 20 | 21 | 22 | def mk_service(options: Options) -> Service: 23 | image = options.image_override or IMAGE 24 | ports = [f"{options.host_port}:50053"] if options.host_port else None 25 | 26 | return Service( 27 | image=image, 28 | ports=ports, 29 | environment={ 30 | "NODE_PSQL_HOST": f"{options.node_db_host}:5432", 31 | "NODE_PSQL_DATABASE": "postgres", 32 | "NODE_PSQL_USERNAME": "postgres", 33 | "NODE_PSQL_PASSWORD": "postgres", 34 | "NODE_LEDGER": "cardano", 35 | "NODE_CARDANO_CONFIRMATION_BLOCKS": str(options.confirmation_blocks), 36 | "NODE_REFRESH_AND_SUBMIT_PERIOD": "1s", 37 | "NODE_MOVE_SCHEDULED_TO_PENDING_PERIOD": "1s", 38 | "NODE_SCHEDULE_SYNC_PERIOD": "1s", 39 | "NODE_CARDANO_NETWORK": "testnet", 40 | "NODE_CARDANO_WALLET_PASSPHRASE": options.wallet_passphrase, 41 | "NODE_CARDANO_WALLET_ID": options.wallet_id, 42 | "NODE_CARDANO_PAYMENT_ADDRESS": options.wallet_payment_address, 43 | "NODE_CARDANO_WALLET_API_HOST": options.wallet_api_host, 44 | "NODE_CARDANO_WALLET_API_PORT": str(options.wallet_api_port), 45 | "NODE_CARDANO_PRISM_GENESIS_BLOCK": "0", 46 | "NODE_CARDANO_DB_SYNC_HOST": f"{options.db_sync_db_host}:5432", 47 | "NODE_CARDANO_DB_SYNC_DATABASE": "postgres", 48 | "NODE_CARDANO_DB_SYNC_USERNAME": "postgres", 49 | "NODE_CARDANO_DB_SYNC_PASSWORD": "postgres", 50 | }, 51 | depends_on={ 52 | options.node_db_host: ServiceDependency(condition="service_healthy"), 53 | options.db_sync_db_host: ServiceDependency(condition="service_healthy"), 54 | options.wallet_api_host: ServiceDependency(condition="service_healthy"), 55 | options.bootstrap_testnet_host: ServiceDependency( 56 | condition="service_completed_successfully" 57 | ), 58 | }, 59 | ) 60 | -------------------------------------------------------------------------------- /lib/did-prism/src/did/operation/mod.rs: -------------------------------------------------------------------------------- 1 | mod ssi; 2 | mod storage; 3 | 4 | use std::str::FromStr; 5 | 6 | use identus_apollo::hex::HexStr; 7 | use serde::{Deserialize, Deserializer, Serialize, Serializer}; 8 | pub use ssi::*; 9 | pub use storage::*; 10 | 11 | use crate::prelude::SignedPrismOperation; 12 | use crate::proto::MessageExt; 13 | 14 | #[derive(Debug, Clone)] 15 | pub struct OperationParameters { 16 | pub max_services: usize, 17 | pub max_public_keys: usize, 18 | pub max_id_size: usize, 19 | pub max_type_size: usize, 20 | pub max_service_endpoint_size: usize, 21 | } 22 | 23 | impl OperationParameters { 24 | pub fn v1() -> Self { 25 | Self { 26 | max_services: 50, 27 | max_public_keys: 50, 28 | max_id_size: 50, 29 | max_type_size: 100, 30 | max_service_endpoint_size: 300, 31 | } 32 | } 33 | } 34 | 35 | #[derive(Debug, Clone, Serialize, Deserialize, derive_more::From, derive_more::Into)] 36 | #[cfg_attr(feature = "openapi", derive(utoipa::ToSchema))] 37 | #[cfg_attr(feature = "openapi", schema(description = "A hexadecimal string representing a SignedPrismOperation", value_type = String, example = "0a086d61737465722d30124630440220442eec28ec60464acd8df155e73f88a1c7faf4549975582ff0601449525aba31022019257250071818066b377b83a8b1765df1b7dc21d9bccfc7d5da036801d3ba0e1a420a400a3e123c0a086d61737465722d3010014a2e0a09736563703235366b3112210398e61c14328a6a844eec6dc084b825ae8525f10204e9244aaf61260bd221a457"))] 38 | pub struct SignedPrismOperationHexStr( 39 | #[serde( 40 | serialize_with = "SignedPrismOperationHexStr::serialize", 41 | deserialize_with = "SignedPrismOperationHexStr::deserialize" 42 | )] 43 | SignedPrismOperation, 44 | ); 45 | 46 | impl SignedPrismOperationHexStr { 47 | fn serialize(op: &SignedPrismOperation, serializer: S) -> Result 48 | where 49 | S: Serializer, 50 | { 51 | let hex_str = HexStr::from(&op.encode_to_vec()); 52 | serializer.serialize_str(&hex_str.to_string()) 53 | } 54 | 55 | fn deserialize<'de, D>(deserializer: D) -> Result 56 | where 57 | D: Deserializer<'de>, 58 | { 59 | let hex_str = String::deserialize(deserializer)?; 60 | let bytes = HexStr::from_str(&hex_str) 61 | .map_err(|e| serde::de::Error::custom(format!("Value is not a valid hex: {e}")))?; 62 | let op = SignedPrismOperation::decode(&bytes.to_bytes()) 63 | .map_err(|e| serde::de::Error::custom(format!("Value cannot be decoded to SignedPrismOperation: {e}")))?; 64 | Ok(op) 65 | } 66 | } 67 | -------------------------------------------------------------------------------- /lib/did-prism/proto/node-api.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package io.iohk.atala.prism.protos; 4 | 5 | import "prism.proto"; 6 | import "prism-ssi.proto"; 7 | 8 | service NodeService { 9 | rpc GetDidDocument(GetDidDocumentRequest) returns (GetDidDocumentResponse) {} 10 | rpc ScheduleOperations(ScheduleOperationsRequest) returns (ScheduleOperationsResponse) {} 11 | rpc GetOperationInfo(GetOperationInfoRequest) returns (GetOperationInfoResponse) {} 12 | } 13 | 14 | message GetDidDocumentRequest { 15 | string did = 1; // The DID. 16 | } 17 | 18 | message GetDidDocumentResponse { 19 | DIDData document = 1; // The DID Document. 20 | } 21 | 22 | message DIDData { 23 | string id = 1; // The DID suffix, where DID is in form did:prism:[DID suffix] 24 | repeated proto.PublicKey public_keys = 2; // The keys that belong to this DID Document. 25 | repeated proto.Service services = 3; // The list of services that belong to this DID Document. 26 | repeated string context = 4; // The list of @context values to consider on JSON-LD representations 27 | repeated StorageData storage_data = 5; 28 | } 29 | 30 | message StorageData { 31 | bytes init_operation_hash = 1; 32 | bytes prev_operation_hash = 2; 33 | oneof data { 34 | bytes bytes = 100; 35 | } 36 | } 37 | 38 | message ScheduleOperationsRequest { 39 | repeated proto.SignedPrismOperation signed_operations = 1; // a list of signed operations 40 | } 41 | 42 | message ScheduleOperationsResponse { 43 | repeated OperationOutput outputs = 1; 44 | } 45 | 46 | message OperationOutput { 47 | oneof operation_maybe { 48 | bytes operation_id = 5; // Operation identifier. 49 | string error = 6; // Error description if PRISM Node service haven't scheduled the operation. 50 | } 51 | } 52 | 53 | message GetOperationInfoRequest { 54 | bytes operation_id = 1; // Operation identifier. The identifier is returned to the corresponding operation request. 55 | } 56 | 57 | message GetOperationInfoResponse { 58 | OperationStatus operation_status = 1; // Contains the status of this operation. 59 | } 60 | 61 | enum OperationStatus { 62 | UNKNOWN_OPERATION = 0; // The operation hasn't been received by the node service yet. 63 | PENDING_SUBMISSION = 1; // The transaction containing this operation hasn't been published to the chain yet. 64 | AWAIT_CONFIRMATION = 2; // The transaction containing this operation has been published to the chain, but hasn't been processed by PRISM yet. 65 | CONFIRMED_AND_APPLIED = 3; // The operation has been successfully applied to the PRISM. 66 | CONFIRMED_AND_REJECTED = 4; // The operation has been processed by PRISM, but rejected because of some error. 67 | } 68 | -------------------------------------------------------------------------------- /docs/src/configuration/database.md: -------------------------------------------------------------------------------- 1 | # Database Backends 2 | 3 | NeoPRISM can persist indexed operations in PostgreSQL or in an embedded SQLite file. The backend is inferred from the `NPRISM_DB_URL` / `--db-url` scheme: 4 | 5 | ``` 6 | NPRISM_DB_URL=postgres://user:pass@host:5432/db 7 | NPRISM_DB_URL=sqlite:///absolute/path/to/neoprism.db 8 | ``` 9 | 10 | If you omit `NPRISM_DB_URL`, NeoPRISM defaults to a SQLite database under your platform app-data directory (for example `~/Library/Application Support/NeoPRISM//neoprism.db` on macOS). 11 | 12 | ## Comparison 13 | 14 | | Mode | Recommended usage | Pros | Trade-offs | 15 | |------|-------------------|------|------------| 16 | | PostgreSQL | Production deployments or any scenario that needs horizontal scaling and concurrent writers | Battle-tested RDBMS, works with existing replicas/backups, matches historical NeoPRISM behavior | Requires a managed Postgres instance; compose stacks need the extra container | 17 | | SQLite | Local development, demos, CI smoke tests, single-node appliances | No external service, tiny footprint, file is bundled with backups | Single writer, WAL/locking semantics, best kept to one running node | 18 | 19 | ## PostgreSQL specifics 20 | 21 | - Provide `NPRISM_DB_URL` / `--db-url` in standard libpq form (`postgres://user:pass@host:port/db`). 22 | - The helper targets `just postgres-up`, `postgres-down`, `postgres-dump`, and `postgres-restore` spin up and manage a Dockerized Postgres instance for local work. 23 | - All pre-existing migrations live under `lib/node-storage/migrations/postgres` and continue to be linted via `sqlfluff`. 24 | - The Docker images and compose stacks ship with PostgreSQL enabled so existing deployments do not need any additional flags. 25 | 26 | ## SQLite specifics 27 | 28 | - Provide `NPRISM_DB_URL=sqlite://...` (or `--db-url`) to point at a specific file, or omit it to use the default app-data location (for example `~/Library/Application Support/NeoPRISM//neoprism.db` on macOS). 29 | - The helper commands `just sqlite-init` and `just sqlite-clean` manage migrations for the default file under `data/sqlite/`. 30 | - The parent directory is created with `700` permissions on Unix hosts to keep the file private. 31 | - SQLite enforces WAL mode automatically, but remember that only one process should write to the file at a time. Schedule periodic `VACUUM` runs if you prune large chunks of data. 32 | 33 | ## Testing both backends 34 | 35 | The e2e suite and the `just full-check` command exercise both PostgreSQL and SQLite compose stacks (`dev`, `dev-sqlite`, `ci`, `ci-sqlite`). When troubleshooting, you can run any stack in isolation: 36 | 37 | ```bash 38 | just e2e::up dev-sqlite 39 | (cd tests/prism-test && sbt test) 40 | just e2e::down dev-sqlite 41 | ``` 42 | 43 | See the [PRISM specification tests](../prism-test/README.md) section for more detail. 44 | -------------------------------------------------------------------------------- /bin/neoprism-node/src/app/service/error.rs: -------------------------------------------------------------------------------- 1 | use identus_did_core::{DidResolutionError, DidResolutionErrorCode, DidResolutionMetadata, ResolutionResult}; 2 | use identus_did_prism::{did, protocol}; 3 | 4 | #[derive(Debug, derive_more::From, derive_more::Display, derive_more::Error)] 5 | pub enum ResolutionError { 6 | #[from] 7 | #[display("invalid did input")] 8 | InvalidDid { source: InvalidDid }, 9 | #[display("did is not found")] 10 | NotFound, 11 | #[from] 12 | #[display("unexpected server error")] 13 | InternalError { source: anyhow::Error }, 14 | #[display("did resolution is not supported for this did method")] 15 | MethodNotSupported, 16 | } 17 | 18 | impl ResolutionError { 19 | pub fn log_internal_error(&self) { 20 | if let ResolutionError::InternalError { source } = self { 21 | let msg = source.chain().map(|e| e.to_string()).collect::>().join("\n"); 22 | tracing::error!("{msg}"); 23 | } 24 | } 25 | } 26 | 27 | #[derive(Debug, derive_more::From, derive_more::Display, derive_more::Error)] 28 | pub enum InvalidDid { 29 | #[from] 30 | #[display("failed to process did state from did")] 31 | ProcessStateFailed { source: protocol::error::ProcessError }, 32 | #[from] 33 | #[display("failed to parse prism did")] 34 | InvalidPrismDid { source: did::Error }, 35 | } 36 | 37 | impl From for ResolutionResult { 38 | fn from(err: ResolutionError) -> Self { 39 | let error = match err { 40 | ResolutionError::InvalidDid { .. } => DidResolutionError { 41 | r#type: DidResolutionErrorCode::InvalidDid, 42 | title: Some("Invalid DID".to_string()), 43 | detail: Some(err.to_string()), 44 | }, 45 | ResolutionError::NotFound => DidResolutionError { 46 | r#type: DidResolutionErrorCode::NotFound, 47 | title: Some("DID Not Found".to_string()), 48 | detail: Some(err.to_string()), 49 | }, 50 | ResolutionError::InternalError { .. } => DidResolutionError { 51 | r#type: DidResolutionErrorCode::InternalError, 52 | title: Some("Internal Error".to_string()), 53 | detail: Some(err.to_string()), 54 | }, 55 | ResolutionError::MethodNotSupported => DidResolutionError { 56 | r#type: DidResolutionErrorCode::MethodNotSupported, 57 | title: None, 58 | detail: None, 59 | }, 60 | }; 61 | 62 | ResolutionResult { 63 | did_resolution_metadata: DidResolutionMetadata { 64 | content_type: None, 65 | error: Some(error), 66 | }, 67 | did_document_metadata: Default::default(), 68 | did_document: Default::default(), 69 | } 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /lib/node-storage/src/entity/indexer.rs: -------------------------------------------------------------------------------- 1 | use chrono::{DateTime, Utc}; 2 | use lazybe::macros::Entity; 3 | use lazybe::uuid::Uuid; 4 | 5 | use crate::entity::DidSuffix; 6 | 7 | #[derive(Entity, sqlx::FromRow)] 8 | #[lazybe(table = "dlt_cursor")] 9 | pub struct DltCursor { 10 | #[lazybe(primary_key)] 11 | pub id: Uuid, 12 | pub slot: i64, 13 | pub block_hash: Vec, 14 | } 15 | 16 | #[derive(Entity, sqlx::FromRow)] 17 | #[lazybe(table = "raw_operation")] 18 | #[allow(unused)] 19 | pub struct RawOperation { 20 | #[lazybe(primary_key)] 21 | pub id: Uuid, 22 | pub signed_operation_data: Vec, 23 | pub slot: i64, 24 | pub block_number: i64, 25 | pub cbt: DateTime, 26 | pub absn: i32, 27 | pub osn: i32, 28 | pub is_indexed: bool, 29 | } 30 | 31 | #[derive(Entity, sqlx::FromRow)] 32 | #[lazybe(table = "indexed_ssi_operation")] 33 | #[allow(unused)] 34 | pub struct IndexedSsiOperation { 35 | #[lazybe(primary_key)] 36 | pub id: Uuid, 37 | pub raw_operation_id: Uuid, 38 | pub did: DidSuffix, 39 | #[lazybe(created_at)] 40 | pub indexed_at: DateTime, 41 | } 42 | 43 | #[derive(Entity, sqlx::FromRow)] 44 | #[lazybe(table = "indexed_vdr_operation")] 45 | #[allow(unused)] 46 | pub struct IndexedVdrOperation { 47 | #[lazybe(primary_key)] 48 | pub id: Uuid, 49 | pub raw_operation_id: Uuid, 50 | pub operation_hash: Vec, 51 | pub init_operation_hash: Vec, 52 | pub prev_operation_hash: Option>, 53 | pub did: DidSuffix, 54 | #[lazybe(created_at)] 55 | pub indexed_at: DateTime, 56 | } 57 | 58 | #[derive(Entity)] 59 | #[lazybe(table = "did_stats")] 60 | #[allow(unused)] 61 | pub struct DidStats { 62 | #[lazybe(primary_key)] 63 | pub did: DidSuffix, 64 | pub operation_count: i64, 65 | pub last_block: i64, 66 | pub last_slot: i64, 67 | pub last_cbt: DateTime, 68 | pub first_block: i64, 69 | pub first_slot: i64, 70 | pub first_cbt: DateTime, 71 | } 72 | 73 | #[derive(Entity)] 74 | #[lazybe(table = "raw_operation_by_did")] 75 | #[allow(unused)] 76 | pub struct RawOperationByDid { 77 | #[lazybe(primary_key)] 78 | pub id: Uuid, 79 | pub signed_operation_data: Vec, 80 | pub slot: i64, 81 | pub block_number: i64, 82 | pub cbt: DateTime, 83 | pub absn: i32, 84 | pub osn: i32, 85 | pub is_indexed: bool, 86 | pub did: DidSuffix, 87 | } 88 | 89 | impl From for RawOperation { 90 | fn from(value: RawOperationByDid) -> Self { 91 | Self { 92 | id: value.id, 93 | signed_operation_data: value.signed_operation_data, 94 | slot: value.slot, 95 | block_number: value.block_number, 96 | cbt: value.cbt, 97 | absn: value.absn, 98 | osn: value.osn, 99 | is_indexed: value.is_indexed, 100 | } 101 | } 102 | } 103 | -------------------------------------------------------------------------------- /nix/checks/neoprism-checks.nix: -------------------------------------------------------------------------------- 1 | { 2 | lib, 3 | rustTools, 4 | makeRustPlatform, 5 | protobuf, 6 | sqlfluff, 7 | deadnix, 8 | }: 9 | 10 | let 11 | inherit (rustTools) rust; 12 | rustPlatform = makeRustPlatform { 13 | cargo = rust; 14 | rustc = rust; 15 | }; 16 | in 17 | rustPlatform.buildRustPackage { 18 | name = "neoprism-checks"; 19 | src = lib.cleanSource ./../..; 20 | inherit (rustTools) cargoLock; 21 | nativeBuildInputs = [ 22 | protobuf 23 | sqlfluff 24 | deadnix 25 | ]; 26 | buildPhase = "cargo b --all-features --all-targets"; 27 | checkPhase = '' 28 | deadnix -f 29 | sqlfluff lint --dialect postgres ./lib/node-storage/migrations/postgres 30 | cargo fmt --check 31 | cargo test 32 | cargo clippy --all-targets -- -D warnings 33 | 34 | cargo test --all-features 35 | cargo clippy --all-targets --all-features -- -D warnings 36 | 37 | # check individual feature if properly gated 38 | echo "checking feature gate for identus-apollo" 39 | cargo clippy -p identus-apollo --all-targets --features base64 -- -D warnings 40 | cargo clippy -p identus-apollo --all-targets --features ed25519 -- -D warnings 41 | cargo clippy -p identus-apollo --all-targets --features hash -- -D warnings 42 | cargo clippy -p identus-apollo --all-targets --features hex -- -D warnings 43 | cargo clippy -p identus-apollo --all-targets --features jwk -- -D warnings 44 | cargo clippy -p identus-apollo --all-targets --features openapi -- -D warnings 45 | cargo clippy -p identus-apollo --all-targets --features secp256k1 -- -D warnings 46 | cargo clippy -p identus-apollo --all-targets --features serde -- -D warnings 47 | cargo clippy -p identus-apollo --all-targets --features x25519 -- -D warnings 48 | 49 | echo "checking feature gate for identus-did-core" 50 | cargo clippy -p identus-did-core --all-targets --features openapi -- -D warnings 51 | cargo clippy -p identus-did-core --all-targets --features ts-types -- -D warnings 52 | 53 | echo "checking feature gate for identus-did-resolver-http" 54 | cargo clippy -p identus-did-resolver-http --all-targets --features openapi -- -D warnings 55 | 56 | echo "checking feature gate for identus-did-prism" 57 | cargo clippy -p identus-did-prism --all-targets --features openapi -- -D warnings 58 | 59 | echo "checking feature gate for identus-did-prism-indexer" 60 | cargo clippy -p identus-did-prism-indexer --all-targets --features oura -- -D warnings 61 | cargo clippy -p identus-did-prism-indexer --all-targets --features dbsync -- -D warnings 62 | 63 | echo "checking feature gate for identus-did-prism-ledger" 64 | cargo clippy -p identus-did-prism-ledger --all-targets --features in-memory -- -D warnings 65 | 66 | echo "checking feature gate for identus-did-prism-submitter" 67 | cargo clippy -p identus-did-prism-submitter --all-targets --features cardano-wallet -- -D warnings 68 | ''; 69 | installPhase = "touch $out"; 70 | 71 | PROTOC = "${protobuf}/bin/protoc"; 72 | } 73 | -------------------------------------------------------------------------------- /lib/did-prism/proto/prism-storage.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package proto; 4 | 5 | /** StorageEventCreateEntry 6 | * To be valid, this operation needs to be signed by an issuing key of the DID: 7 | * - 1) The issuing key need to be valid at the Event/Operation momment 8 | * - 2) The DID needs not to be Deactivate 9 | */ 10 | message ProtoCreateStorageEntry { 11 | reserved 2; // Only used by ProtoUpdateStorageEntry & ProtoDeactivateStorageEntry 12 | reserved 3 to 49; // Those field will be used for validation the Storage Events in the future 13 | bytes did_prism_hash = 1; // The specificId of the did:prism. 14 | bytes nonce = 50; // Used to generate different reference hash (to make different entries with the same initial data possible) 15 | oneof data { 16 | // Nothing // The data field can be missing representing ANY type 17 | bytes bytes = 100; 18 | string ipfs = 101; // CID 19 | // string ipns = ??; // https://docs.ipfs.tech/concepts/ipns/ 20 | StatusListEntry statusListEntry = 102; 21 | } 22 | } 23 | 24 | /** StorageEventUpdateEntry 25 | * To be valid, this operation needs to be signed by an issuing key of the DID: 26 | * - 1) The issuing key need to be valid at the Event/Operation momment 27 | * - 2) The DID needs not to be Deactivate 28 | */ 29 | message ProtoUpdateStorageEntry { 30 | reserved 1, 50; // Only used by ProtoCreateStorageEntry 31 | reserved 3 to 49; // Those field will be used for validation the Storage Events in the future 32 | bytes previous_event_hash = 2; // The hash of the most recent event that was used to create or update the VDR Entry. 33 | oneof data { // The data field can be missing 34 | // Nothing // The data field can be missing representing ANY type 35 | bytes bytes = 100; // Replace the bytes 36 | string ipfs = 101; // Update/replace the data with a CID to IPFS. This is static data 37 | StatusListEntry statusListEntry = 102; // compliments the previous state with just the change (similar to a diff) 38 | } 39 | } 40 | 41 | message ProtoDeactivateStorageEntry{ 42 | reserved 1, 50; // Only used by ProtoCreateStorageEntry 43 | reserved 3 to 49; // Those field will be used for validation the Storage Events in the future 44 | bytes previous_event_hash = 2; // The hash of the most recent event that was used to create or update the VDR Entry. 45 | } 46 | 47 | // ****************** 48 | // *** DATA TYPES *** 49 | // ****************** 50 | 51 | /** TODO WIP Status List entry 52 | * 53 | * This is to be inspired on the following specs (Token Status List & BitstringStatusList): 54 | * - Token Status List: 55 | * - https://datatracker.ietf.org/doc/draft-ietf-oauth-sd-jwt-vc/: 56 | * - https://datatracker.ietf.org/doc/draft-ietf-oauth-status-list/06/ 57 | * - BitstringStatusList: 58 | * - https://www.w3.org/TR/vc-bitstring-status-list/#bitstringstatuslist 59 | * - https://datatracker.ietf.org/doc/draft-ietf-oauth-status-list/10/ 60 | */ 61 | message StatusListEntry { 62 | int64 state = 1; 63 | string name = 2; // optional 64 | string details = 3; // optional 65 | 66 | // uint32 listSize = 1; 67 | // uint32 statusSize = 2; 68 | // bytes intStatus = 3; 69 | } 70 | -------------------------------------------------------------------------------- /docker/prism-test/cardano-submit-api.yaml: -------------------------------------------------------------------------------- 1 | # Tx Submission Server Configuration 2 | 3 | EnableLogMetrics: False 4 | EnableLogging: True 5 | 6 | # ------------------------------------------------------------------------------ 7 | # Logging configuration follows. 8 | 9 | # global filter; messages must have at least this severity to pass: 10 | minSeverity: Info 11 | 12 | # global file rotation settings: 13 | rotation: 14 | rpLogLimitBytes: 5000000 15 | rpKeepFilesNum: 10 16 | rpMaxAgeHours: 24 17 | 18 | # these backends are initialized: 19 | setupBackends: 20 | - AggregationBK 21 | - KatipBK 22 | # - EditorBK 23 | # - EKGViewBK 24 | 25 | # if not indicated otherwise, then messages are passed to these backends: 26 | defaultBackends: 27 | - KatipBK 28 | 29 | # if wanted, the GUI is listening on this port: 30 | # hasGUI: 12787 31 | 32 | # if wanted, the EKG interface is listening on this port: 33 | # hasEKG: 12788 34 | 35 | # here we set up outputs of logging in 'katip': 36 | setupScribes: 37 | - scKind: StdoutSK 38 | scName: stdout 39 | scFormat: ScText 40 | scRotation: null 41 | 42 | # if not indicated otherwise, then log output is directed to this: 43 | defaultScribes: 44 | - - StdoutSK 45 | - stdout 46 | 47 | # more options which can be passed as key-value pairs: 48 | options: 49 | cfokey: 50 | value: "Release-1.0.0" 51 | mapSubtrace: 52 | benchmark: 53 | contents: 54 | - GhcRtsStats 55 | - MonotonicClock 56 | subtrace: ObservableTrace 57 | '#ekgview': 58 | contents: 59 | - - tag: Contains 60 | contents: 'cardano.epoch-validation.benchmark' 61 | - - tag: Contains 62 | contents: .monoclock.basic. 63 | - - tag: Contains 64 | contents: 'cardano.epoch-validation.benchmark' 65 | - - tag: Contains 66 | contents: diff.RTS.cpuNs.timed. 67 | - - tag: StartsWith 68 | contents: '#ekgview.#aggregation.cardano.epoch-validation.benchmark' 69 | - - tag: Contains 70 | contents: diff.RTS.gcNum.timed. 71 | subtrace: FilterTrace 72 | 'cardano.epoch-validation.utxo-stats': 73 | # Change the `subtrace` value to `Neutral` in order to log 74 | # `UTxO`-related messages during epoch validation. 75 | subtrace: NoTrace 76 | '#messagecounters.aggregation': 77 | subtrace: NoTrace 78 | '#messagecounters.ekgview': 79 | subtrace: NoTrace 80 | '#messagecounters.switchboard': 81 | subtrace: NoTrace 82 | '#messagecounters.katip': 83 | subtrace: NoTrace 84 | '#messagecounters.monitoring': 85 | subtrace: NoTrace 86 | 'cardano.#messagecounters.aggregation': 87 | subtrace: NoTrace 88 | 'cardano.#messagecounters.ekgview': 89 | subtrace: NoTrace 90 | 'cardano.#messagecounters.switchboard': 91 | subtrace: NoTrace 92 | 'cardano.#messagecounters.katip': 93 | subtrace: NoTrace 94 | 'cardano.#messagecounters.monitoring': 95 | subtrace: NoTrace 96 | mapBackends: 97 | cardano.epoch-validation.benchmark: 98 | - AggregationBK 99 | '#aggregation.cardano.epoch-validation.benchmark': 100 | - EKGViewBK 101 | -------------------------------------------------------------------------------- /tests/prism-test/src/test/scala/org/hyperledger/identus/prismtest/MainSpec.scala: -------------------------------------------------------------------------------- 1 | package org.hyperledger.identus.prismtest 2 | 3 | import org.hyperledger.identus.prismtest.suite.CreateDidOperationSuite 4 | import org.hyperledger.identus.prismtest.suite.CreateStorageOperationSuite 5 | import org.hyperledger.identus.prismtest.suite.DeactivateDidOperationSuite 6 | import org.hyperledger.identus.prismtest.suite.DeactivateStorageOperationSuite 7 | import org.hyperledger.identus.prismtest.suite.UpdateDidOperationSuite 8 | import org.hyperledger.identus.prismtest.suite.UpdateStorageOperationSuite 9 | import org.hyperledger.identus.prismtest.utils.TestUtils 10 | import proto.prism.PrismBlock 11 | import proto.prism.PrismObject 12 | import proto.prism_ssi.KeyUsage 13 | import zio.* 14 | import zio.http.Client 15 | import zio.test.* 16 | 17 | object MainSpec extends ZIOSpecDefault, TestUtils: 18 | 19 | override def spec = 20 | val allSpecs = 21 | CreateDidOperationSuite.allSpecs + 22 | UpdateDidOperationSuite.allSpecs + 23 | DeactivateDidOperationSuite.allSpecs + 24 | CreateStorageOperationSuite.allSpecs + 25 | UpdateStorageOperationSuite.allSpecs + 26 | DeactivateStorageOperationSuite.allSpecs 27 | 28 | val neoprismLayer = 29 | ZLayer(ZIO.systemWith(_.env("SKIP_CONFIRMATION_CHECK_MILLIS")).map(env => env.flatMap(_.toIntOption))) 30 | .flatMap(skipCheckMillis => NodeClient.neoprism("localhost", 18080)("localhost", 18081)(skipCheckMillis.get)) 31 | 32 | val neoprismSpec = suite("NeoPRISM suite")(allSpecs) 33 | .provide( 34 | Client.default, 35 | neoprismLayer, 36 | NodeName.layer("neoprism") 37 | ) 38 | 39 | // val prismNodeSpec = suite("PRISM node suite")(allSpecs) 40 | // .provide( 41 | // NodeClient.grpc("localhost", 50053), 42 | // NodeName.layer("prism-node") 43 | // ) 44 | 45 | (neoprismSpec + generateDidFixtureSpec).provide(Runtime.removeDefaultLoggers) 46 | @@ TestAspect.timed 47 | @@ TestAspect.withLiveEnvironment 48 | @@ TestAspect.parallelN(1) 49 | 50 | // Comment the ignore aspect and run `sbt testOnly -- -tags fixture` 51 | // to output the generated test vector 52 | private def generateDidFixtureSpec = test("generate did fixtures for testing") { 53 | val seed = Array.fill[Byte](64)(0) 54 | val vdrKeyName = "vdr-0" 55 | val makeVdrKey = KeyUsage.VDR_KEY secp256k1 "m/0'/8'/0'" 56 | 57 | val spo = builder(seed).createDid 58 | .key("master-0")(KeyUsage.MASTER_KEY secp256k1 "m/0'/1'/0'") 59 | .key(vdrKeyName)(makeVdrKey) 60 | .build 61 | .signWith("master-0", deriveSecp256k1(seed)("m/0'/1'/0'")) 62 | 63 | val did = spo.getDid.get 64 | val (_, vdrHdKey) = makeVdrKey(seed) 65 | val vdrPrivateKeyHex = vdrHdKey.getKMMSecp256k1PrivateKey().getEncoded().toHexString 66 | val prismObjectHex = PrismObject(blockContent = Some(PrismBlock(operations = Seq(spo)))).toByteArray.toHexString 67 | 68 | for 69 | _ <- ZIO.debug(s"DID : $did") 70 | _ <- ZIO.debug(s"VDR key name : $vdrKeyName") 71 | _ <- ZIO.debug(s"VDR privateKey hex : $vdrPrivateKeyHex") 72 | _ <- ZIO.debug(s"PrismObject hex : $prismObjectHex") 73 | yield assertCompletes 74 | } @@ TestAspect.tag("fixture") @@ TestAspect.ignore 75 | -------------------------------------------------------------------------------- /docs/src/configuration/indexer.md: -------------------------------------------------------------------------------- 1 | # Indexer Configuration 2 | 3 | The **Indexer node** monitors the Cardano blockchain for PRISM DID operations, validates and indexes them, and enables efficient lookup of DID Documents. 4 | It is typically used for DID resolution and verification services. 5 | 6 | ## DLT Source 7 | 8 | The Indexer node supports multiple DLT sources for ingesting DID operations: 9 | 10 | - **Oura:** 11 | Connects to a Cardano relay node and streams block data in real time. 12 | - Key options: 13 | - Cardano network: `--cardano-network` or `NPRISM_CARDANO_NETWORK` 14 | - Relay address: `--cardano-relay-addr` or `NPRISM_CARDANO_RELAY_ADDR` 15 | 16 | - **DB-Sync:** 17 | Connects to a Cardano DB-Sync instance and polls for new blocks and transactions. 18 | - Key options: 19 | - DB-Sync URL: `--db-sync-url` or `NPRISM_DB_SYNC_URL` 20 | - Poll interval: `--db-sync-poll-interval` or `NPRISM_DB_SYNC_POLL_INTERVAL` 21 | 22 | - **Common DLT Source Options:** 23 | - Index interval: `--index-interval` or `NPRISM_INDEX_INTERVAL` 24 | - Confirmation blocks: `--confirmation-blocks` or `NPRISM_CONFIRMATION_BLOCKS` 25 | 26 | --- 27 | 28 | ## DLT Source Comparison 29 | 30 | **Oura** 31 | 32 | Oura works by performing a chainsync protocol with a Cardano relay node. 33 | This setup is quite lean, as you can connect to any available public relay. 34 | The downside is that sync progress can be slow, since it performs a full sync from the blockchain. 35 | If possible, connect to a Cardano node close to your location, as syncing across different geographic regions can be very slow. 36 | The initial sync may take multiple days. The best option is to connect to your own Cardano node within the same network for optimal performance. 37 | 38 | **DB Sync** 39 | 40 | DBSync is a service that syncs the Cardano blockchain and writes the data to a PostgreSQL database. 41 | DBSync is known to be resource-heavy and requires significant disk space. 42 | The advantage is that sync speed is very fast, since NeoPRISM only needs to read the database tables and parse the operations. 43 | If you can afford to run DBSync, it is recommended to use this option, as the initial sync is much faster compared to Oura. 44 | 45 | --- 46 | 47 | ## How Common DLT Source Configuration Works 48 | 49 | NeoPRISM streams blocks from the Cardano blockchain and extracts PRISM metadata, which is then persisted to the database. These operations are initially stored as raw, unindexed data. At every configured interval (set by the index interval option), NeoPRISM wakes up and picks up unindexed operations from the database. It then runs the indexing logic, which extracts, validates, and transforms each raw operation into an efficient lookup data structure. 50 | 51 | A faster index interval reduces the lag between when an operation is streamed and when it becomes indexed and available for fast lookup. However, setting a very short interval can put additional pressure on the database due to more frequent indexing cycles. NeoPRISM comes with a sensible default value for the index interval to balance performance and resource usage. 52 | 53 | Choose the DLT source and interval settings that best fit your infrastructure and performance needs. 54 | 55 | --- 56 | 57 | **Next Steps:** 58 | - [CLI Options](../references/cli-options.md): Full list of flags and environment variables. 59 | -------------------------------------------------------------------------------- /docs/src/prism-test/README.md: -------------------------------------------------------------------------------- 1 | # PRISM Specification Tests 2 | 3 | The `prism-test` suite exercises NeoPRISM end-to-end against the official PRISM conformance scenarios (create, update, deactivate, and VDR extensions). Keeping this suite green is the primary guardrail for regressions. 4 | 5 | ## Recommended workflow 6 | 7 | All automation assumes you are inside the Nix development shell: 8 | 9 | ```bash 10 | nix develop 11 | ``` 12 | 13 | From there, run every stack (PostgreSQL + SQLite, developer + CI topologies) in one go: 14 | 15 | ```bash 16 | just e2e::run 17 | ``` 18 | 19 | `just e2e::run` performs the following loop: 20 | 21 | 1. Builds and `docker load`s the NeoPRISM image (`just e2e::docker-publish-local`). 22 | 2. Iterates through the compose stacks below, running `sbt test` for each. 23 | 3. Executes `docker-compose ... down --volumes --remove-orphans` so no containers linger between runs. 24 | 25 | | Target | Backend | Compose file | Notes | 26 | |--------|---------|--------------|-------| 27 | | `dev` | PostgreSQL | `docker/prism-test/compose-dev.yml` | Mirrors “developer friendly” defaults with Postgres | 28 | | `dev-sqlite` | SQLite | `docker/prism-test/compose-dev-sqlite.yml` | Fastest loop, great for laptop smoke tests | 29 | | `ci` | PostgreSQL | `docker/prism-test/compose-ci.yml` | Heavier topology that matches the CI pipeline | 30 | | `ci-sqlite` | SQLite | `docker/prism-test/compose-ci-sqlite.yml` | Ensures the embedded backend stays compatible under CI load | 31 | 32 | Need to focus on a single stack? Use `just e2e::up ` / `just e2e::down ` and run the Scala tests manually: 33 | 34 | ```bash 35 | just e2e::up dev-sqlite 36 | (cd tests/prism-test && sbt test) 37 | just e2e::down dev-sqlite 38 | ``` 39 | 40 | ## Full repository check 41 | 42 | Before opening a PR, run the umbrella command from the repository root: 43 | 44 | ```bash 45 | just full-check 46 | ``` 47 | 48 | It chains formatting, `cargo build`, `just test`, Docker image builds, and the full `just e2e::run` suite. This mirrors the checks we expect to pass in CI. 49 | 50 | ## Manual compose usage 51 | 52 | The compose files live in `docker/prism-test/` and are generated via `just build-config`. If you need to inspect or tweak them manually: 53 | 54 | 1. Start the desired stack (for example the developer Postgres topology): 55 | ```bash 56 | cd docker/prism-test 57 | docker-compose -f compose-dev.yml up 58 | ``` 59 | 2. In another terminal, run the Scala suite: 60 | ```bash 61 | cd tests/prism-test 62 | sbt test 63 | ``` 64 | 65 | Manually editing the YAML files is discouraged—change the Python sources under `tools/compose_gen/` and rerun `just build-config` instead. 66 | 67 | ## Who should run the suite? 68 | 69 | - **NeoPRISM contributors:** run either `just e2e::run` or `just full-check` whenever core logic changes. 70 | - **Downstream PRISM node teams:** point the compose stack at your image (override the `image:` field or use `docker load` to drop in local builds) and reuse the same test harness. 71 | 72 | ## Extending the suite 73 | 74 | To include an additional node implementation or scenario: 75 | 76 | 1. Update `tests/prism-test/src/test/scala/MainSpec.scala` to register your node layer or new scenarios. 77 | 2. Implement a matching `NodeClient` adapter if your HTTP surface differs from the existing NeoPRISM resolver/submitter endpoints. 78 | 79 | This keeps the shared conformance suite portable across implementations. 80 | -------------------------------------------------------------------------------- /lib/did-core/src/uri.rs: -------------------------------------------------------------------------------- 1 | use std::str::FromStr; 2 | use std::sync::LazyLock; 3 | 4 | use regex::Regex; 5 | use serde::{Deserialize, Serialize}; 6 | 7 | use crate::{Error, InvalidUri}; 8 | 9 | static URI_FRAGMENT_RE: LazyLock = LazyLock::new(|| { 10 | Regex::new(r"^([A-Za-z0-9\-._~!$&'()*+,;=:@/?]|%[0-9A-Fa-f]{2})*$").expect("URI regex is invalid") 11 | }); 12 | 13 | #[derive(Clone, Serialize, Deserialize, derive_more::Debug, derive_more::Display)] 14 | #[cfg_attr(feature = "openapi", derive(utoipa::ToSchema))] 15 | #[cfg_attr(feature = "openapi", schema(value_type = String, example = "http://example.com"))] 16 | #[cfg_attr( 17 | feature = "ts-types", 18 | derive(ts_rs::TS), 19 | ts(type = "string", export_to = "../../../bindings/ts-types/did_core_types.ts") 20 | )] 21 | #[debug("{}", self.0.to_string())] 22 | #[display("{}", self.0.to_string())] 23 | pub struct Uri(String); 24 | 25 | impl FromStr for Uri { 26 | type Err = Error; 27 | 28 | fn from_str(s: &str) -> Result { 29 | if is_uri(s) { 30 | Ok(Uri(s.to_string())) 31 | } else { 32 | Err(Error::InvalidUri(InvalidUri { msg: "not a valid uri" })) 33 | } 34 | } 35 | } 36 | 37 | /// Check if the given string is a valid URI 38 | /// 39 | /// # Example 40 | /// ``` 41 | /// use identus_did_core::uri::is_uri; 42 | /// assert_eq!(is_uri("http://example.com"), true); 43 | /// assert_eq!(is_uri("ftps://example.com/help?q=example"), true); 44 | /// assert_eq!(is_uri("urn:resource"), true); 45 | /// assert_eq!(is_uri("did:web:example.com"), true); 46 | /// assert_eq!(is_uri(""), false); 47 | /// assert_eq!(is_uri(" "), false); 48 | /// assert_eq!(is_uri("foo"), false); 49 | /// assert_eq!(is_uri("hello world"), false); 50 | /// ``` 51 | pub fn is_uri(s: &str) -> bool { 52 | let parsed = uriparse::URI::try_from(s); 53 | parsed.is_ok() 54 | } 55 | 56 | /// Check if the given string is a valid URI fragment. 57 | /// 58 | /// # Example 59 | /// ``` 60 | /// use identus_did_core::uri::is_uri_fragment; 61 | /// assert_eq!(is_uri_fragment("hello"), true); 62 | /// assert_eq!(is_uri_fragment("hello%20world"), true); 63 | /// assert_eq!(is_uri_fragment("@123"), true); 64 | /// assert_eq!(is_uri_fragment("+-*/"), true); 65 | /// assert_eq!(is_uri_fragment(""), true); 66 | /// assert_eq!(is_uri_fragment("hello world"), false); 67 | /// assert_eq!(is_uri_fragment(" "), false); 68 | /// assert_eq!(is_uri_fragment("hello%"), false); 69 | /// assert_eq!(is_uri_fragment("hello%2"), false); 70 | /// assert_eq!(is_uri_fragment("hello#"), false); 71 | /// ``` 72 | pub fn is_uri_fragment(s: &str) -> bool { 73 | URI_FRAGMENT_RE.is_match(s) 74 | } 75 | 76 | #[cfg(test)] 77 | mod tests { 78 | use std::str::FromStr; 79 | 80 | use super::*; 81 | 82 | #[test] 83 | fn test_uri_from_str_valid() { 84 | let s = "http://example.com"; 85 | let uri = Uri::from_str(s); 86 | assert!(uri.is_ok()); 87 | assert_eq!(uri.unwrap().0, s); 88 | } 89 | 90 | #[test] 91 | fn test_uri_from_str_invalid() { 92 | let s = "not a uri"; 93 | let uri = Uri::from_str(s); 94 | assert!(uri.is_err()); 95 | if let Err(crate::Error::InvalidUri(_)) = uri { 96 | // expected 97 | } else { 98 | panic!("Expected InvalidUri error"); 99 | } 100 | } 101 | } 102 | -------------------------------------------------------------------------------- /nix/packages/default.nix: -------------------------------------------------------------------------------- 1 | { pkgs }: 2 | 3 | let 4 | hostSystem = pkgs.stdenv.hostPlatform.system; 5 | version = builtins.replaceStrings [ "\n" ] [ "" ] (builtins.readFile ../../version); 6 | callPackageCrossWithRust = 7 | targetSystem: path: overrides: 8 | pkgs.pkgsCross."${targetSystem}".callPackage path ( 9 | { 10 | rust = pkgs.rustTools.mkRustCross { 11 | pkgsCross = pkgs.pkgsCross."${targetSystem}"; 12 | minimal = true; 13 | }; 14 | } 15 | // overrides 16 | ); 17 | neoprismPackages = { 18 | neoprism-ui-assets = pkgs.callPackage ./neoprism-ui-assets.nix { }; 19 | neoprism-bin = pkgs.callPackage ./neoprism-bin.nix { 20 | rust = pkgs.rustTools.rustMinimal; 21 | inherit (pkgs.rustTools) cargoLock; 22 | }; 23 | neoprism-bin-x86_64-linux = callPackageCrossWithRust "gnu64" ./neoprism-bin.nix { 24 | inherit (pkgs.rustTools) cargoLock; 25 | }; 26 | neoprism-bin-aarch64-linux = callPackageCrossWithRust "aarch64-multiplatform" ./neoprism-bin.nix { 27 | inherit (pkgs.rustTools) cargoLock; 28 | }; 29 | }; 30 | # Docker images target Linux, regardless of host platform 31 | dockerCrossPlatformConfig = { 32 | x86_64-linux = { 33 | callPackage = pkgs.pkgsCross.gnu64.callPackage; 34 | neoprism-bin = neoprismPackages.neoprism-bin-x86_64-linux; 35 | }; 36 | aarch64-darwin = { 37 | # macOS builds Linux ARM64 containers 38 | callPackage = pkgs.pkgsCross.aarch64-multiplatform.callPackage; 39 | neoprism-bin = neoprismPackages.neoprism-bin-aarch64-linux; 40 | }; 41 | }; 42 | in 43 | { 44 | # docs-site 45 | docs-site = pkgs.callPackage ./docs-site.nix { 46 | inherit version; 47 | inherit (neoprismPackages) neoprism-bin; 48 | }; 49 | 50 | # neoprism docker 51 | neoprism-docker = dockerCrossPlatformConfig.${hostSystem}.callPackage ./neoprism-docker.nix { 52 | inherit version; 53 | inherit (neoprismPackages) neoprism-ui-assets; 54 | inherit (dockerCrossPlatformConfig.${hostSystem}) neoprism-bin; 55 | }; 56 | neoprism-docker-latest = dockerCrossPlatformConfig.${hostSystem}.callPackage ./neoprism-docker.nix { 57 | inherit (neoprismPackages) neoprism-ui-assets; 58 | inherit (dockerCrossPlatformConfig.${hostSystem}) neoprism-bin; 59 | version = "latest"; 60 | }; 61 | neoprism-docker-linux-amd64 = pkgs.pkgsCross.gnu64.callPackage ./neoprism-docker.nix { 62 | inherit version; 63 | inherit (neoprismPackages) neoprism-ui-assets; 64 | neoprism-bin = neoprismPackages.neoprism-bin-x86_64-linux; 65 | tagSuffix = "-amd64"; 66 | }; 67 | neoprism-docker-linux-arm64 = 68 | pkgs.pkgsCross.aarch64-multiplatform.callPackage ./neoprism-docker.nix 69 | { 70 | inherit version; 71 | inherit (neoprismPackages) neoprism-ui-assets; 72 | neoprism-bin = neoprismPackages.neoprism-bin-aarch64-linux; 73 | tagSuffix = "-arm64"; 74 | }; 75 | 76 | # cardano-testnet docker 77 | cardano-testnet-docker = 78 | dockerCrossPlatformConfig.${hostSystem}.callPackage ./cardano-testnet-docker.nix 79 | { }; 80 | cardano-testnet-docker-linux-amd64 = pkgs.pkgsCross.gnu64.callPackage ./cardano-testnet-docker.nix { 81 | tagSuffix = "-amd64"; 82 | }; 83 | cardano-testnet-docker-linux-arm64 = 84 | pkgs.pkgsCross.aarch64-multiplatform.callPackage ./cardano-testnet-docker.nix 85 | { 86 | tagSuffix = "-arm64"; 87 | }; 88 | } 89 | // neoprismPackages 90 | -------------------------------------------------------------------------------- /lib/node-storage/src/lib.rs: -------------------------------------------------------------------------------- 1 | use identus_did_prism::did::Error as DidError; 2 | use identus_did_prism::did::error::DidSyntaxError; 3 | use identus_did_prism_indexer::repo::{DltCursorRepo, IndexedOperationRepo, IndexerStateRepo, RawOperationRepo}; 4 | 5 | pub mod backend; 6 | mod entity; 7 | 8 | pub use backend::postgres::PostgresDb; 9 | #[cfg(feature = "sqlite-storage")] 10 | pub use backend::sqlite::SqliteDb; 11 | 12 | pub trait StorageBackend: 13 | RawOperationRepo 14 | + IndexedOperationRepo 15 | + IndexerStateRepo 16 | + DltCursorRepo 17 | + Send 18 | + Sync 19 | + 'static 20 | { 21 | } 22 | 23 | impl StorageBackend for T where 24 | T: RawOperationRepo 25 | + IndexedOperationRepo 26 | + IndexerStateRepo 27 | + DltCursorRepo 28 | + Send 29 | + Sync 30 | + 'static 31 | { 32 | } 33 | 34 | #[derive(Debug, derive_more::From, derive_more::Display, derive_more::Error)] 35 | pub enum Error { 36 | #[from] 37 | #[display("database connection error")] 38 | Db { source: sqlx::Error }, 39 | #[from] 40 | #[display("database migration error")] 41 | DbMigration { source: sqlx::migrate::MigrateError }, 42 | #[display("unable to decode to protobuf message into type {target_type} from stored data")] 43 | ProtobufDecode { 44 | source: protobuf::Error, 45 | target_type: &'static str, 46 | }, 47 | #[from] 48 | #[display("failed to compute did index from signed-prism-operation")] 49 | DidIndexFromSignedPrismOperation { source: DidError }, 50 | #[from] 51 | #[display("failed to decode did from stored data")] 52 | DidDecode { source: DidSyntaxError }, 53 | } 54 | 55 | #[cfg(test)] 56 | mod tests { 57 | use std::fs; 58 | use std::path::Path; 59 | 60 | use super::*; 61 | 62 | fn assert_backend() {} 63 | 64 | #[test] 65 | fn postgres_backend_implements_storage_backend() { 66 | assert_backend::(); 67 | } 68 | 69 | #[cfg(feature = "sqlite-storage")] 70 | #[test] 71 | fn sqlite_backend_implements_storage_backend() { 72 | assert_backend::(); 73 | } 74 | 75 | #[test] 76 | fn sqlite_and_postgres_migrations_are_in_sync() { 77 | fn collect(dir: &str) -> Vec { 78 | let manifest_dir = Path::new(env!("CARGO_MANIFEST_DIR")).join(dir); 79 | let mut names = fs::read_dir(&manifest_dir) 80 | .unwrap_or_else(|_| panic!("failed to read {}", manifest_dir.display())) 81 | .filter_map(|entry| { 82 | entry.ok().and_then(|e| { 83 | let file_name = e.file_name(); 84 | let name = file_name.to_string_lossy().to_string(); 85 | if name.ends_with(".sql") { Some(name) } else { None } 86 | }) 87 | }) 88 | .collect::>(); 89 | names.sort(); 90 | names 91 | } 92 | 93 | let postgres = collect("migrations/postgres"); 94 | let sqlite = collect("migrations/sqlite"); 95 | assert_eq!( 96 | postgres, sqlite, 97 | "Postgres and SQLite migrations differ: {:?} vs {:?}", 98 | postgres, sqlite 99 | ); 100 | } 101 | } 102 | -------------------------------------------------------------------------------- /changes/archive/2025-11-26-sqlite-backend-support.md: -------------------------------------------------------------------------------- 1 | # SQLite Backend Support — Architecture Decision Record 2 | 3 | Status: implemented (2025-11-26) 4 | Issue: https://github.com/hyperledger-identus/neoprism/issues/108 5 | 6 | ## Why 7 | - Provide a lightweight embedded backend for local/dev/CI while keeping schema parity and behavior with PostgreSQL. 8 | - Avoid bespoke snapshot import/export; rely on engine-native backup/restore flows. 9 | 10 | ## What Changes 11 | 12 | **Backend Selection** 13 | - From: Dedicated `--db-backend` flag plus URL. 14 | - To: Backend inferred from `NPRISM_DB_URL` / `--db-url` scheme (`postgres://` vs `sqlite://`); missing URL defaults to per-network embedded SQLite path. 15 | - Reason: Simpler UX, fewer mismatches. 16 | - Impact: Non-breaking; file-path default is new for omitted URLs. 17 | 18 | **Storage Layer** 19 | - From: Single Postgres implementation. 20 | - To: `PostgresDb` and `SqliteDb` under `lib/node-storage/src/backend/{postgres,sqlite}.rs`, both implementing the shared `StorageBackend`; runtime selects via `init_database`. 21 | - Reason: Pluggable backends with identical trait surface. 22 | - Impact: Non-breaking; adds SQLite feature path. 23 | 24 | **Migrations** 25 | - From: Unified migration folder. 26 | - To: Split migrations under `lib/node-storage/migrations/{postgres,sqlite}` with backend-specific `sqlx` wiring; tests assert parity. 27 | - Reason: Dialect-specific schema while preventing drift. 28 | - Impact: Requires maintaining both folders. 29 | 30 | **Features & Tooling** 31 | - From: No SQLite toolchain. 32 | - To: `sqlite-backend` Cargo feature; dev shell bundles `sqlite`/`libsqlite3`/`sqlx-cli`; `just db-init-sqlite` / `db-clean-sqlite` manage the default file. 33 | - Reason: Make SQLite first-class for dev/CI. 34 | - Impact: Feature-guarded; optional dependency weight. 35 | 36 | **CLI / Docs / Compose** 37 | - From: Backend flag and Postgres-first docs. 38 | - To: Docs show scheme-based selection and default SQLite location; compose stacks set only `NPRISM_DB_URL`; README/config guide aligned. 39 | - Reason: Single-source configuration and clearer defaults. 40 | - Impact: Users omit `NPRISM_DB_BACKEND`; otherwise backward compatible. 41 | 42 | **E2E Coverage** 43 | - From: Postgres-only e2e. 44 | - To: `just e2e::run` and Docker builds cover both Postgres and SQLite stacks (with SQLite feature enabled). 45 | - Reason: Parity validation across backends. 46 | - Impact: Longer e2e run time; broader confidence. 47 | 48 | ## Impact 49 | - Affected components: node CLI/runtime, node-storage backends, migrations, docs, compose stacks, e2e harness. 50 | - Migration: Transparent; Postgres users keep URLs. Omitting `NPRISM_DB_URL` now implies SQLite default file. 51 | - Tooling/ops: Need to keep both migration trees in sync; ensure feature flags set for builds that require SQLite. 52 | 53 | ## Notes on scope changes 54 | - Backup/restore stays engine-native (pg dump/restore, SQLite file/`.backup`); no internal snapshot pipeline. 55 | 56 | ## Follow-ups / open questions 57 | - Do we need to ship pre-seeded SQLite files in release artifacts, or always migrate on first run? 58 | - Should we expose a toggle to avoid linking SQLite when unused (to trim binary size)? 59 | - Do we need additional telemetry/metrics specific to embedded mode (e.g., WAL checkpoints, file size alerts)? 60 | 61 | TODOs 62 | - Decide on release packaging policy for SQLite (pre-seeded file vs. migrate-on-first-run). 63 | - Add a build-time toggle to omit SQLite when unused, if binary size justifies it. 64 | - Add embedded-mode telemetry/alerts (WAL checkpointing/file growth) if operational feedback is needed. 65 | -------------------------------------------------------------------------------- /cliff.toml: -------------------------------------------------------------------------------- 1 | # git-cliff ~ default configuration file 2 | # https://git-cliff.org/docs/configuration 3 | # 4 | # Lines starting with "#" are comments. 5 | # Configuration options are organized into tables and keys. 6 | # See documentation for more information on available options. 7 | 8 | [changelog] 9 | # template for the changelog header 10 | header = """ 11 | # Changelog\n 12 | All notable changes to this project will be documented in this file.\n 13 | """ 14 | # template for the changelog body 15 | # https://keats.github.io/tera/docs/#introduction 16 | body = """ 17 | {% if version %}\ 18 | ## [{{ version | trim_start_matches(pat="v") }}](https://github.com/hyperledger-identus/neoprism/releases/tag/v{{ version | trim_start_matches(pat="v") }}) - {{ timestamp | date(format="%Y-%m-%d") }} 19 | {% else %}\ 20 | ## [unreleased] 21 | {% endif %}\ 22 | {% for group, commits in commits | group_by(attribute="group") %} 23 | ### {{ group | striptags | trim | upper_first }} 24 | {% for commit in commits %} 25 | - {% if commit.scope %}*({{ commit.scope }})* {% endif %}\ 26 | {% if commit.breaking %}[**breaking**] {% endif %}\ 27 | {{ commit.message | upper_first }}\ 28 | {% endfor %} 29 | {% endfor %}\n 30 | """ 31 | # template for the changelog footer 32 | footer = """ 33 | 34 | """ 35 | # remove the leading and trailing s 36 | trim = true 37 | # postprocessors 38 | postprocessors = [ 39 | { pattern = '', replace = "https://github.com/hyperledger-identus/neoprism" }, # replace repository URL 40 | ] 41 | # render body even when there are no releases to process 42 | # render_always = true 43 | # output file path 44 | # output = "test.md" 45 | 46 | [git] 47 | # parse the commits based on https://www.conventionalcommits.org 48 | conventional_commits = true 49 | # filter out the commits that are not conventional 50 | filter_unconventional = true 51 | # process each line of a commit as an individual commit 52 | split_commits = false 53 | # regex for preprocessing the commit messages 54 | commit_preprocessors = [ 55 | { pattern = '\((\w+\s)?#([0-9]+)\)', replace = "([#${2}](/pull/${2}))" }, 56 | # Check spelling of the commit with https://github.com/crate-ci/typos 57 | # If the spelling is incorrect, it will be automatically fixed. 58 | #{ pattern = '.*', replace_command = 'typos --write-changes -' }, 59 | ] 60 | # regex for parsing and grouping commits 61 | commit_parsers = [ 62 | { message = "^feat", group = "🚀 Features" }, 63 | { message = "^fix", group = "🐛 Bug Fixes" }, 64 | { message = "^doc", group = "📚 Documentation" }, 65 | { message = "^perf", group = "⚡ Performance" }, 66 | { message = "^refactor", group = "🚜 Refactor" }, 67 | { message = "^style", group = "🎨 Styling" }, 68 | { message = "^test", group = "🧪 Testing" }, 69 | { message = "^chore: update changelog", skip = true }, 70 | { message = "^chore\\(release\\): prepare for", skip = true }, 71 | { message = "^chore\\(deps.*\\)", skip = true }, 72 | { message = "^chore\\(pr\\)", skip = true }, 73 | { message = "^chore\\(pull\\)", skip = true }, 74 | { message = "^chore|^ci", group = "⚙️ Miscellaneous Tasks" }, 75 | { body = ".*security", group = "🛡️ Security" }, 76 | { message = "^revert", group = "◀️ Revert" }, 77 | { message = ".*", group = "💼 Other" }, 78 | ] 79 | # filter out the commits that are not matched by commit parsers 80 | filter_commits = false 81 | # sort the tags topologically 82 | topo_order = false 83 | # sort the commits inside sections by oldest/newest order 84 | sort_commits = "oldest" 85 | 86 | tag_pattern = "v[0-9].*" 87 | -------------------------------------------------------------------------------- /lib/did-core/src/did.rs: -------------------------------------------------------------------------------- 1 | use std::str::FromStr; 2 | 3 | use identity_did::DID; 4 | use serde::{Deserialize, Serialize}; 5 | 6 | use crate::{Error, InvalidDid}; 7 | 8 | #[derive(Clone, Serialize, Deserialize, derive_more::Debug, derive_more::Display)] 9 | #[cfg_attr(feature = "openapi", derive(utoipa::ToSchema))] 10 | #[cfg_attr(feature = "openapi", schema(value_type = String, example = "did:example:123456789abcdefghi"))] 11 | #[cfg_attr( 12 | feature = "ts-types", 13 | derive(ts_rs::TS), 14 | ts(type = "string", export_to = "../../../bindings/ts-types/did_core_types.ts") 15 | )] 16 | #[debug("{}", self.0.to_string())] 17 | #[display("{}", self.0.to_string())] 18 | pub struct Did(#[cfg_attr(feature = "ts-types", ts(type = "string"))] identity_did::CoreDID); 19 | 20 | #[derive(Clone, Serialize, Deserialize, derive_more::Debug, derive_more::Display)] 21 | #[cfg_attr(feature = "openapi", derive(utoipa::ToSchema))] 22 | #[cfg_attr(feature = "openapi", schema(value_type = String, example = "did:example:123456789abcdefghi#key-1?service=abc"))] 23 | #[display("{}", self.0.to_string())] 24 | #[debug("{}", self.0.to_string())] 25 | pub struct DidUrl(identity_did::DIDUrl); 26 | 27 | impl Did { 28 | pub fn to_did_url(&self) -> DidUrl { 29 | DidUrl::from_str(&self.to_string()).unwrap() 30 | } 31 | } 32 | 33 | impl DidUrl { 34 | pub fn to_did(&self) -> Did { 35 | let mut did_url = self.0.clone(); 36 | did_url.set_fragment(None).unwrap(); 37 | did_url.set_path(None).unwrap(); 38 | did_url.set_query(None).unwrap(); 39 | Did(did_url.did().clone()) 40 | } 41 | } 42 | 43 | impl FromStr for Did { 44 | type Err = Error; 45 | fn from_str(s: &str) -> Result { 46 | let did_url = DidUrl::from_str(s)?; 47 | if did_url.path().is_some() { 48 | Err(InvalidDid::from(identity_did::Error::Other( 49 | "DID cannot contain path segment(s)", 50 | )))?; 51 | } 52 | if did_url.query().is_some() { 53 | Err(InvalidDid::from(identity_did::Error::Other("DID cannot contain query")))?; 54 | } 55 | if did_url.fragment().is_some() { 56 | Err(InvalidDid::from(identity_did::Error::Other( 57 | "DID cannot contain fragment", 58 | )))?; 59 | } 60 | Ok(did_url.to_did()) 61 | } 62 | } 63 | 64 | impl FromStr for DidUrl { 65 | type Err = Error; 66 | fn from_str(s: &str) -> Result { 67 | Ok(Self(identity_did::DIDUrl::parse(s).map_err(InvalidDid::from)?)) 68 | } 69 | } 70 | 71 | pub trait DidOps: std::fmt::Display { 72 | fn method(&self) -> &str; 73 | fn method_id(&self) -> &str; 74 | } 75 | 76 | pub trait DidUrlOps: DidOps + std::fmt::Display { 77 | fn fragment(&self) -> Option<&str>; 78 | fn path(&self) -> Option<&str>; 79 | fn query(&self) -> Option<&str>; 80 | } 81 | 82 | impl DidOps for Did { 83 | fn method(&self) -> &str { 84 | self.0.method() 85 | } 86 | 87 | fn method_id(&self) -> &str { 88 | self.0.method_id() 89 | } 90 | } 91 | 92 | impl DidOps for DidUrl { 93 | fn method(&self) -> &str { 94 | self.0.did().method() 95 | } 96 | 97 | fn method_id(&self) -> &str { 98 | self.0.did().method_id() 99 | } 100 | } 101 | 102 | impl DidUrlOps for DidUrl { 103 | fn fragment(&self) -> Option<&str> { 104 | self.0.fragment() 105 | } 106 | 107 | fn path(&self) -> Option<&str> { 108 | self.0.path() 109 | } 110 | 111 | fn query(&self) -> Option<&str> { 112 | self.0.query() 113 | } 114 | } 115 | -------------------------------------------------------------------------------- /.github/workflows/scorecard.yml: -------------------------------------------------------------------------------- 1 | # This workflow uses actions that are not certified by GitHub. They are provided 2 | # by a third-party and are governed by separate terms of service, privacy 3 | # policy, and support documentation. 4 | 5 | name: Scorecard supply-chain security 6 | on: 7 | # For Branch-Protection check. Only the default branch is supported. See 8 | # https://github.com/ossf/scorecard/blob/main/docs/checks.md#branch-protection 9 | branch_protection_rule: 10 | # To guarantee Maintained check is occasionally updated. See 11 | # https://github.com/ossf/scorecard/blob/main/docs/checks.md#maintained 12 | schedule: 13 | - cron: '20 8 * * 6' 14 | push: 15 | branches: [ "main" ] 16 | 17 | # Declare default permissions as read only. 18 | permissions: read-all 19 | 20 | jobs: 21 | analysis: 22 | name: Scorecard analysis 23 | runs-on: ubuntu-latest 24 | # `publish_results: true` only works when run from the default branch. conditional can be removed if disabled. 25 | if: github.event.repository.default_branch == github.ref_name || github.event_name == 'pull_request' 26 | permissions: 27 | # Needed to upload the results to code-scanning dashboard. 28 | security-events: write 29 | # Needed to publish results and get a badge (see publish_results below). 30 | id-token: write 31 | # Uncomment the permissions below if installing in a private repository. 32 | # contents: read 33 | # actions: read 34 | 35 | steps: 36 | - name: "Checkout code" 37 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 38 | with: 39 | persist-credentials: false 40 | 41 | - name: "Run analysis" 42 | uses: ossf/scorecard-action@f49aabe0b5af0936a0987cfb85d86b75731b0186 # v2.4.1 43 | with: 44 | results_file: results.sarif 45 | results_format: sarif 46 | # (Optional) "write" PAT token. Uncomment the `repo_token` line below if: 47 | # - you want to enable the Branch-Protection check on a *public* repository, or 48 | # - you are installing Scorecard on a *private* repository 49 | # To create the PAT, follow the steps in https://github.com/ossf/scorecard-action?tab=readme-ov-file#authentication-with-fine-grained-pat-optional. 50 | # repo_token: ${{ secrets.SCORECARD_TOKEN }} 51 | 52 | # Public repositories: 53 | # - Publish results to OpenSSF REST API for easy access by consumers 54 | # - Allows the repository to include the Scorecard badge. 55 | # - See https://github.com/ossf/scorecard-action#publishing-results. 56 | # For private repositories: 57 | # - `publish_results` will always be set to `false`, regardless 58 | # of the value entered here. 59 | publish_results: true 60 | 61 | # (Optional) Uncomment file_mode if you have a .gitattributes with files marked export-ignore 62 | # file_mode: git 63 | 64 | # Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF 65 | # format to the repository Actions tab. 66 | - name: "Upload artifact" 67 | uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1 68 | with: 69 | name: SARIF file 70 | path: results.sarif 71 | retention-days: 5 72 | 73 | # Upload the results to GitHub's code scanning dashboard (optional). 74 | # Commenting out will disable upload of results to your repo's Code Scanning dashboard 75 | - name: "Upload to code-scanning" 76 | uses: github/codeql-action/upload-sarif@v3 77 | with: 78 | sarif_file: results.sarif 79 | --------------------------------------------------------------------------------