├── project ├── build.properties └── plugins.sbt ├── .scalafmt.conf ├── .gitignore ├── flake.lock ├── README.md ├── flake.nix ├── src ├── test │ └── scala │ │ ├── EventTest.scala │ │ ├── RelayTest.scala │ │ ├── FilterTest.scala │ │ └── NIP19Test.scala └── main │ └── scala │ ├── Filter.scala │ ├── Event.scala │ ├── Relay.scala │ └── NIP19.scala └── .github └── workflows ├── clean.yml └── ci.yml /project/build.properties: -------------------------------------------------------------------------------- 1 | sbt.version=1.10.7 2 | -------------------------------------------------------------------------------- /.scalafmt.conf: -------------------------------------------------------------------------------- 1 | version = 3.5.8 2 | runner.dialect = scala3 3 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | target 2 | .bsp/ 3 | .idea/ 4 | out/ 5 | debug.log 6 | .metals/ 7 | .bloop/ 8 | project/metals.sbt 9 | project/project 10 | data/ 11 | node_modules 12 | yarn* 13 | package*.json 14 | -------------------------------------------------------------------------------- /project/plugins.sbt: -------------------------------------------------------------------------------- 1 | addSbtPlugin("org.typelevel" % "sbt-typelevel" % "0.7.7") 2 | addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.16.0") 3 | addSbtPlugin("org.portable-scala" % "sbt-scalajs-crossproject" % "1.3.2") 4 | addSbtPlugin("com.fiatjaf" %% "sbt-esbuild" % "0.1.1") 5 | addSbtPlugin("io.chrisdavenport" % "sbt-npm-dependencies" % "0.0.1") 6 | -------------------------------------------------------------------------------- /flake.lock: -------------------------------------------------------------------------------- 1 | { 2 | "nodes": { 3 | "nixpkgs": { 4 | "locked": { 5 | "lastModified": 1738410390, 6 | "narHash": "sha256-xvTo0Aw0+veek7hvEVLzErmJyQkEcRk6PSR4zsRQFEc=", 7 | "owner": "nixos", 8 | "repo": "nixpkgs", 9 | "rev": "3a228057f5b619feb3186e986dbe76278d707b6e", 10 | "type": "github" 11 | }, 12 | "original": { 13 | "owner": "nixos", 14 | "ref": "nixos-unstable", 15 | "repo": "nixpkgs", 16 | "type": "github" 17 | } 18 | }, 19 | "root": { 20 | "inputs": { 21 | "nixpkgs": "nixpkgs" 22 | } 23 | } 24 | }, 25 | "root": "root", 26 | "version": 7 27 | } 28 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## snow 2 | A library for doing nostr things with scala. Used by [Nostr Army Knife](https://nak.nostr.com). 3 | 4 | ### Building/testing locally 5 | 6 | > note: this project is currently scalajs only, and is not yet cross built for jvm or native 7 | 8 | 1. install [Nix](https://nixos.org) the package manager, and make sure [flakes](https://wiki.nixos.org/wiki/Flakes) are enabled 9 | 2. checkout this repo and `cd` into it 10 | 3. `nix develop` will get you into a dev environment with all the things (`sbt`, `node`) 11 | 4. `sbt esInstall && cp -a target/esbuild/. ./` will make sure that the right npm 12 | modules are installed locally for testing 13 | 5. `sbt test` or `sbt testOnly snow.NIP19Test` to test specific things (`NIP19Test` in this case) 14 | 15 | #### Github Workflows 16 | 17 | 1. `sbt githubWorkflowGenerate` 18 | 2. `git add .github/workflows/ci.yml && git commit -m "update github workflow"` -------------------------------------------------------------------------------- /flake.nix: -------------------------------------------------------------------------------- 1 | { 2 | description = "A Nix-flake-based Scala development environment"; 3 | 4 | inputs.nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable"; 5 | outputs = { self, nixpkgs }: 6 | let 7 | javaVersion = 17; # Change this value to update the whole stack 8 | 9 | supportedSystems = [ "x86_64-linux" "aarch64-linux" "x86_64-darwin" "aarch64-darwin" ]; 10 | forEachSupportedSystem = f: nixpkgs.lib.genAttrs supportedSystems (system: f { 11 | pkgs = import nixpkgs { inherit system; overlays = [ self.overlays.default ]; }; 12 | }); 13 | 14 | 15 | in 16 | { 17 | overlays.default = final: prev: 18 | let 19 | jdk = prev."jdk${toString javaVersion}"; 20 | in 21 | { 22 | sbt = prev.sbt.override { jre = jdk; }; 23 | scala = prev.scala_3.override { jre = jdk; }; 24 | }; 25 | 26 | devShells = forEachSupportedSystem ({ pkgs }: { 27 | default = pkgs.mkShell { 28 | buildInputs = with pkgs; [ 29 | stdenv 30 | sbt 31 | openjdk 32 | boehmgc 33 | libunwind 34 | clang 35 | zlib 36 | secp256k1 37 | nodejs 38 | yarn 39 | just 40 | ]; 41 | packages = with pkgs; [ 42 | scala 43 | sbt 44 | coursier 45 | scala-cli 46 | ]; 47 | }; 48 | }); 49 | }; 50 | } 51 | -------------------------------------------------------------------------------- /src/test/scala/EventTest.scala: -------------------------------------------------------------------------------- 1 | package snow 2 | 3 | import utest.* 4 | import io.circe.syntax.* 5 | import io.circe.parser.{parse, decode} 6 | import scodec.bits.ByteVector 7 | import scoin.PrivateKey 8 | 9 | object EventTest extends TestSuite { 10 | val tests = Tests { 11 | test("decode event") { 12 | val evtj = 13 | """{"id":"dc90c95f09947507c1044e8f48bcf6350aa6bff1507dd4acfc755b9239b5c962","pubkey":"3bf0c63fcb93463407af97a5e5ee64fa883d107ef9e558472c4eb9aaaefa459d","created_at":1644271588,"kind":1,"tags":[],"content":"now that https://blueskyweb.org/blog/2-7-2022-overview was announced we can stop working on nostr?","sig":"230e9d8f0ddaf7eb70b5f7741ccfa37e87a455c9a469282e3464e2052d3192cd63a167e196e381ef9d7e69e9ea43af2443b839974dc85d8aaab9efe1d9296524"}""" 14 | val dec = decode[Event](evtj) 15 | assert(dec.isRight) 16 | 17 | val event = dec.toTry.get 18 | assert(event.isValid) 19 | 20 | event.kind ==> 1 21 | } 22 | 23 | test("sign and encode event") { 24 | val event = Event(1, "hello hello", created_at = 1234567).sign( 25 | PrivateKey( 26 | ByteVector.fromValidHex( 27 | "7708c95f09947507c1044e8f48bcf6350aa6bff1507dd4acfc755b9239b5c962" 28 | ) 29 | ) 30 | ) 31 | 32 | assert(event.isValid) 33 | assert(event.created_at == 1234567) 34 | 35 | val evtj = event.asJson.noSpaces 36 | assert(evtj.startsWith("""{""")) 37 | 38 | val evt = parse(evtj).toTry.get 39 | assert(evt.hcursor.get[String]("content") == Right("hello hello")) 40 | assert( 41 | evt.hcursor.get[String]("pubkey") == Right( 42 | "4d02cd6628a159d3817bfca98787189a332ea2edc0e3633236fcb7161bdf173e" 43 | ) 44 | ) 45 | } 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /.github/workflows/clean.yml: -------------------------------------------------------------------------------- 1 | # This file was automatically generated by sbt-github-actions using the 2 | # githubWorkflowGenerate task. You should add and commit this file to 3 | # your git repository. It goes without saying that you shouldn't edit 4 | # this file by hand! Instead, if you wish to make changes, you should 5 | # change your sbt build configuration to revise the workflow description 6 | # to meet your needs, then regenerate this file. 7 | 8 | name: Clean 9 | 10 | on: push 11 | 12 | jobs: 13 | delete-artifacts: 14 | name: Delete Artifacts 15 | runs-on: ubuntu-latest 16 | env: 17 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 18 | steps: 19 | - name: Delete artifacts 20 | run: | 21 | # Customize those three lines with your repository and credentials: 22 | REPO=${GITHUB_API_URL}/repos/${{ github.repository }} 23 | 24 | # A shortcut to call GitHub API. 25 | ghapi() { curl --silent --location --user _:$GITHUB_TOKEN "$@"; } 26 | 27 | # A temporary file which receives HTTP response headers. 28 | TMPFILE=/tmp/tmp.$$ 29 | 30 | # An associative array, key: artifact name, value: number of artifacts of that name. 31 | declare -A ARTCOUNT 32 | 33 | # Process all artifacts on this repository, loop on returned "pages". 34 | URL=$REPO/actions/artifacts 35 | while [[ -n "$URL" ]]; do 36 | 37 | # Get current page, get response headers in a temporary file. 38 | JSON=$(ghapi --dump-header $TMPFILE "$URL") 39 | 40 | # Get URL of next page. Will be empty if we are at the last page. 41 | URL=$(grep '^Link:' "$TMPFILE" | tr ',' '\n' | grep 'rel="next"' | head -1 | sed -e 's/.*.*//') 42 | rm -f $TMPFILE 43 | 44 | # Number of artifacts on this page: 45 | COUNT=$(( $(jq <<<$JSON -r '.artifacts | length') )) 46 | 47 | # Loop on all artifacts on this page. 48 | for ((i=0; $i < $COUNT; i++)); do 49 | 50 | # Get name of artifact and count instances of this name. 51 | name=$(jq <<<$JSON -r ".artifacts[$i].name?") 52 | ARTCOUNT[$name]=$(( $(( ${ARTCOUNT[$name]} )) + 1)) 53 | 54 | id=$(jq <<<$JSON -r ".artifacts[$i].id?") 55 | size=$(( $(jq <<<$JSON -r ".artifacts[$i].size_in_bytes?") )) 56 | printf "Deleting '%s' #%d, %'d bytes\n" $name ${ARTCOUNT[$name]} $size 57 | ghapi -X DELETE $REPO/actions/artifacts/$id 58 | done 59 | done 60 | -------------------------------------------------------------------------------- /src/test/scala/RelayTest.scala: -------------------------------------------------------------------------------- 1 | package snow 2 | 3 | import utest.* 4 | import scala.concurrent.duration.* 5 | import scala.scalajs 6 | import cats.implicits.* 7 | import cats.effect.* 8 | import cats.effect.unsafe.implicits.global 9 | import org.http4s.syntax.literals.uri 10 | import fs2.concurrent.Channel 11 | 12 | object RelayTest extends TestSuite { 13 | /** 14 | * Note: this is a pretty poorly written test and surely 15 | * can/should be cleaned up. However, when running this test 16 | * with `debugOn=true` passed to `Relay.mkResourceForIO` it 17 | * can be seen in the output that the test: 18 | - creates two subscriptions: 19 | 1. a subscription for kind 0 events (internally this is given subid 0) 20 | 2. a suscription for kind 1 events (internally this is given subid 1) 21 | - the correct events are sent to and received by the correct subscription 22 | - no events are skipped/lost 23 | */ 24 | val tests = Tests { 25 | test("connect to relay and subscribe") { 26 | val numStoredEvents = 3 27 | val program = 28 | Relay.mkResourceForIO(uri"wss://relay.damus.io", debugOn = true).flatMap { 29 | relay => 30 | ( 31 | relay.subscribe( 32 | Filter(kinds = List(0), limit = Some(numStoredEvents)) 33 | ), 34 | relay.subscribe( 35 | Filter(kinds = List(1), limit = Some(numStoredEvents)) 36 | )).parTupled 37 | }.use { 38 | case ((stored, live),(stored2, live2)) => 39 | stored.traverse(e => IO.println((e.kind, e.hash))) *> 40 | IO.println(s"done processing ${stored.size} stored events") *> 41 | IO.delay { 42 | assert(stored.size == numStoredEvents) 43 | } *> IO.println("now processing live stream of events (stopping after 1)") *> 44 | live 45 | .take(1) 46 | .evalTap(e => IO.println((e.kind,e.hash))) 47 | .compile 48 | .drain 49 | *> stored2.traverse(e => IO.println((e.kind, e.hash))) *> 50 | IO.println(s"done processing ${stored.size} stored2 events") *> 51 | IO.delay { 52 | assert(stored2.size == numStoredEvents) 53 | } *> IO.println("now processing live stream2 of events (stopping after 1)") *> 54 | live2 55 | .take(1) 56 | .evalTap(e => IO.println((e.kind,e.hash))) 57 | .compile 58 | .drain 59 | } 60 | program.unsafeToFuture() 61 | } 62 | 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /src/main/scala/Filter.scala: -------------------------------------------------------------------------------- 1 | package snow 2 | 3 | import scala.util.chaining.* 4 | import io.circe.* 5 | import io.circe.syntax.* 6 | 7 | object Filter { 8 | given Decoder[Filter] = new Decoder[Filter] { 9 | final def apply(c: HCursor): Decoder.Result[Filter] = { 10 | // tag fields 11 | val tags = 12 | c.keys 13 | .map( 14 | _.filter(_.startsWith("#")) 15 | .flatMap { key => 16 | c.downField(key).as[List[String]] match { 17 | case Right(v) => Some((key.drop(1), v)) 18 | case Left(_) => None 19 | } 20 | } 21 | .toMap 22 | ) 23 | .getOrElse(Map.empty) 24 | 25 | Right( 26 | Filter( 27 | authors = 28 | c.downField("authors").as[List[String]].getOrElse(List.empty), 29 | kinds = c.downField("kinds").as[List[Int]].getOrElse(List.empty), 30 | ids = c.downField("ids").as[List[String]].getOrElse(List.empty), 31 | tags = tags, 32 | since = c.downField("since").as[Long].toOption, 33 | until = c.downField("until").as[Long].toOption, 34 | limit = c.downField("limit").as[Int].toOption 35 | ) 36 | ) 37 | } 38 | } 39 | given Encoder[Filter] = new Encoder[Filter] { 40 | final def apply(f: Filter): Json = { 41 | var fj = JsonObject() 42 | if (f.ids.size > 0) fj = fj.add("ids", f.ids.asJson) 43 | if (f.authors.size > 0) fj = fj.add("authors", f.authors.asJson) 44 | if (f.kinds.size > 0) fj = fj.add("kinds", f.kinds.asJson) 45 | f.since.foreach { v => fj = fj.add("since", v.asJson) } 46 | f.until.foreach { v => fj = fj.add("until", v.asJson) } 47 | f.limit.foreach { v => fj = fj.add("limit", v.asJson) } 48 | f.tags.foreachEntry { (k, v) => 49 | fj = fj.add(s"#${k}", v.asJson) 50 | } 51 | fj.asJson 52 | } 53 | } 54 | } 55 | 56 | case class Filter( 57 | ids: List[String] = List.empty, 58 | authors: List[String] = List.empty, 59 | kinds: List[Int] = List.empty, 60 | tags: Map[String, List[String]] = Map.empty, 61 | since: Option[Long] = None, 62 | until: Option[Long] = None, 63 | limit: Option[Int] = None 64 | ) { 65 | def matches(event: Event): Boolean = 66 | (ids.isEmpty || event.id.map(id => ids.contains(id)).getOrElse(false)) && 67 | (kinds.isEmpty || kinds.contains(event.kind)) && 68 | (authors.isEmpty || event.pubkey 69 | .map(pubkey => authors.contains(pubkey.toHex)) 70 | .getOrElse(false)) && 71 | tags 72 | .map { case ((tag, vals)) => 73 | event 74 | .getTagValues(tag) 75 | .exists(tagValue => vals.contains(tagValue)) 76 | } 77 | .forall(_ == true) && 78 | since.map(event.created_at > _).getOrElse(true) && 79 | until.map(event.created_at < _).getOrElse(true) 80 | } 81 | -------------------------------------------------------------------------------- /src/test/scala/FilterTest.scala: -------------------------------------------------------------------------------- 1 | package snow 2 | 3 | import utest.* 4 | import scoin.PrivateKey 5 | import scodec.bits.ByteVector 6 | import io.circe.syntax.* 7 | import io.circe.parser.decode 8 | 9 | object FilterTest extends TestSuite { 10 | val tests = Tests { 11 | test("decode and encode filters") { 12 | val filterj = 13 | """{"authors": ["3bf0c63fcb93463407af97a5e5ee64fa883d107ef9e558472c4eb9aaaefa459d"], "#e": ["4bf0c63fcb93463407af97a5e5ee64fa883d107ef9e558472c4eb9aaaefa459d"]}""" 14 | val dec = decode[Filter](filterj) 15 | assert(dec.isRight) 16 | 17 | val filter = dec.toTry.get 18 | filter.authors( 19 | 0 20 | ) ==> "3bf0c63fcb93463407af97a5e5ee64fa883d107ef9e558472c4eb9aaaefa459d" 21 | filter.tags("e")( 22 | 0 23 | ) ==> "4bf0c63fcb93463407af97a5e5ee64fa883d107ef9e558472c4eb9aaaefa459d" 24 | 25 | val filterj2 = filter.asJson 26 | assert( 27 | filterj2.hcursor 28 | .get[List[String]]("#e") 29 | .toTry 30 | .get(0) == "4bf0c63fcb93463407af97a5e5ee64fa883d107ef9e558472c4eb9aaaefa459d" 31 | ) 32 | assert(filterj2.hcursor.get[List[Int]]("kinds").isLeft) 33 | } 34 | 35 | test("event matching") { 36 | val event = Event( 37 | 1, 38 | "nada", 39 | List( 40 | List( 41 | "e", 42 | "4bf0c63fcb93463407af97a5e5ee64fa883d107ef9e558472c4eb9aaaefa459d", 43 | "wss://relay.com" 44 | ), 45 | List( 46 | "p", 47 | "feefc63fcb93463407af97a5e5ee64fa883d107ef9e558472c4eb9aaaefa459d" 48 | ), 49 | List( 50 | "p", 51 | "a098c63fcb93463407af97a5e5ee64fa883d107ef9e558472c4eb9aaaefa459d" 52 | ) 53 | ), 54 | 123456789 55 | ).sign( 56 | PrivateKey( 57 | ByteVector.fromValidHex( 58 | "7708c95f09947507c1044e8f48bcf6350aa6bff1507dd4acfc755b9239b5c962" 59 | ) 60 | ) 61 | ) 62 | 63 | assert( 64 | Filter(ids = 65 | List( 66 | "407819c737d33b4fdc6b95a2a1d0d5fd7b4d642ab60b02b683aa7e9020500bd4" 67 | ) 68 | ).matches(event) 69 | ) 70 | 71 | assert( 72 | !Filter(ids = 73 | List( 74 | "507819c737d33b4fdc6b95a2a1d0d5fd7b4d642ab60b02b683aa7e9020500bd4" 75 | ) 76 | ).matches(event) 77 | ) 78 | 79 | assert( 80 | Filter( 81 | authors = List( 82 | "1d02cd6628a159d3817bfca98787189a332ea2edc0e3633236fcb7161bdf173e", 83 | "2d02cd6628a159d3817bfca98787189a332ea2edc0e3633236fcb7161bdf173e", 84 | "3d02cd6628a159d3817bfca98787189a332ea2edc0e3633236fcb7161bdf173e", 85 | "4d02cd6628a159d3817bfca98787189a332ea2edc0e3633236fcb7161bdf173e" 86 | ), 87 | tags = Map( 88 | "p" -> List( 89 | "a098c63fcb93463407af97a5e5ee64fa883d107ef9e558472c4eb9aaaefa459d" 90 | ) 91 | ) 92 | ).matches(event) 93 | ) 94 | } 95 | } 96 | } 97 | -------------------------------------------------------------------------------- /src/main/scala/Event.scala: -------------------------------------------------------------------------------- 1 | package snow 2 | 3 | import java.util.Date 4 | import scala.util.Try 5 | import scoin.{Crypto, PrivateKey, XOnlyPublicKey, ByteVector32, ByteVector64} 6 | import scodec.bits.ByteVector 7 | import cats.syntax.all.* 8 | import io.circe.* 9 | import io.circe.syntax.* 10 | 11 | object Event { 12 | given Encoder[Event] = new Encoder[Event] { 13 | final def apply(evt: Event): Json = { 14 | var jo = JsonObject( 15 | "created_at" := evt.created_at, 16 | "kind" := evt.kind, 17 | "tags" := evt.tags, 18 | "content" := evt.content 19 | ) 20 | 21 | evt.id.foreach { id => 22 | jo = jo.add("id", evt.id.getOrElse("").asJson) 23 | } 24 | 25 | evt.pubkey.foreach { pubkey => 26 | jo = jo.add("pubkey", evt.pubkey.map(_.toHex).getOrElse("").asJson) 27 | } 28 | 29 | evt.sig.foreach { sig => 30 | jo = jo.add("sig", evt.sig.map(_.toHex).getOrElse("").asJson) 31 | } 32 | 33 | jo.asJson 34 | } 35 | } 36 | 37 | given Decoder[Event] = new Decoder[Event] { 38 | final def apply(c: HCursor): Decoder.Result[Event] = { 39 | ( 40 | c.downField("kind").as[Int], 41 | c.downField("content").as[String], 42 | c.downField("tags").as[List[List[String]]], 43 | c.downField("created_at").as[Long], 44 | c 45 | .downField("pubkey") 46 | .as[String] 47 | .flatMap[DecodingFailure, XOnlyPublicKey](hex => 48 | ByteVector 49 | .fromHex(hex) 50 | .filter(_.size == 32) 51 | .map(b => XOnlyPublicKey(ByteVector32(b))) 52 | .toRight( 53 | DecodingFailure( 54 | DecodingFailure.Reason 55 | .CustomReason("pubkey is not 32 bytes valid hex"), 56 | List.empty 57 | ) 58 | ) 59 | ) 60 | .map(Some(_)) 61 | .recoverWith(_ => Right(None)), 62 | c 63 | .downField("id") 64 | .as[String] 65 | .map { 66 | case hex if ByteVector.fromHex(hex).isDefined && hex.length == 64 => 67 | Some(hex) 68 | case _ => 69 | None 70 | } 71 | .recoverWith(_ => Right(None)), 72 | c 73 | .downField("sig") 74 | .as[String] 75 | .flatMap(hex => 76 | ByteVector 77 | .fromHex(hex) 78 | .filter(_.size == 64) 79 | .map(ByteVector64(_)) 80 | .toRight( 81 | DecodingFailure( 82 | DecodingFailure.Reason 83 | .CustomReason("signature is not 64 bytes hex"), 84 | List.empty 85 | ) 86 | ) 87 | ) 88 | .map(Some(_)) 89 | .recoverWith(_ => Right(None)) 90 | ) 91 | .mapN(Event.apply) 92 | } 93 | } 94 | } 95 | 96 | case class Event( 97 | kind: Int, 98 | content: String, 99 | tags: List[List[String]] = List.empty, 100 | created_at: Long = new Date().getTime() / 1000, 101 | pubkey: Option[XOnlyPublicKey] = None, 102 | id: Option[String] = None, 103 | sig: Option[ByteVector64] = None 104 | ) { 105 | override def toString(): String = this.asJson.noSpaces 106 | 107 | lazy val serialized: String = 108 | List[Json]( 109 | 0.asJson, 110 | pubkey.map(_.toHex).getOrElse("").asJson, 111 | created_at.asJson, 112 | kind.asJson, 113 | tags.asJson, 114 | content.asJson 115 | ).asJson.noSpaces 116 | 117 | lazy val hash: ByteVector32 = 118 | Crypto.sha256(ByteVector.encodeUtf8(serialized).toOption.get) 119 | 120 | def sign(privateKey: PrivateKey): Event = { 121 | val event = this.copy(pubkey = Some(privateKey.publicKey.xonly)) 122 | event.copy( 123 | id = Some(event.hash.toHex), 124 | sig = Some(Crypto.signSchnorr(event.hash, privateKey)) 125 | ) 126 | } 127 | 128 | def isValid: Boolean = 129 | id == Some(hash.toHex) && (for { 130 | pk <- pubkey 131 | sig64 <- sig 132 | } yield Crypto.verifySignatureSchnorr(sig64, hash, pk)).getOrElse(false) 133 | 134 | def getTagValues(key: String): List[String] = 135 | tags.filter(items => items.size >= 2 && items(0) == key).map(_(1)) 136 | } 137 | -------------------------------------------------------------------------------- /src/test/scala/NIP19Test.scala: -------------------------------------------------------------------------------- 1 | package snow 2 | 3 | import utest.* 4 | import io.circe.syntax.* 5 | import io.circe.parser.{parse, decode} 6 | import scodec.bits.ByteVector 7 | import scoin.{PrivateKey,XOnlyPublicKey, ByteVector32} 8 | 9 | object NIP19Test extends TestSuite { 10 | val tests = Tests { 11 | test("decode and encode npub") { 12 | val value = NIP19 13 | .decode( 14 | "npub148ut8u4vr8xqd4gefhg6eyc5636p5zthw3zfse2njfkezegczers59ty0w" 15 | ) 16 | .toTry 17 | .get 18 | 19 | assertMatch(value) { case _: ProfilePointer => } 20 | 21 | value match { 22 | case pp: ProfilePointer => 23 | pp.pubkey.value.toHex ==> "a9f8b3f2ac19cc06d5194dd1ac9314d4741a09777444986553926d9165181647" 24 | NIP19.encode( 25 | pp.pubkey 26 | ) ==> "npub148ut8u4vr8xqd4gefhg6eyc5636p5zthw3zfse2njfkezegczers59ty0w" 27 | case _ => 28 | } 29 | 30 | NIP19.encode( 31 | value 32 | ) ==> "nprofile1qqs2n79n72kpnnqx65v5m5dvjv2dgaq6p9mhg3ycv4feymv3v5vpv3c5kp3v0" 33 | } 34 | 35 | test("decode and encode nsec") { 36 | val value = NIP19 37 | .decode( 38 | "nsec1qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqsmhltgl" 39 | ) 40 | .toTry 41 | .get 42 | 43 | assertMatch(value) { case _: PrivateKey => } 44 | 45 | value match { 46 | case sk: PrivateKey => 47 | sk.value.toHex ==> "0000000000000000000000000000000000000000000000000000000000000001" 48 | case _ => 49 | } 50 | 51 | NIP19.encode( 52 | value 53 | ) ==> "nsec1qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqsmhltgl" 54 | } 55 | 56 | test("decode and encode note") { 57 | val value = NIP19 58 | .decode( 59 | "note1yw5agdtgrkwytpy2ch9pahfewhf9dyr2zkl56gjqrf4hnz2wc3lqj8w45q" 60 | ) 61 | .toTry 62 | .get 63 | 64 | assertMatch(value) { case _: EventPointer => } 65 | 66 | value match { 67 | case evp: EventPointer => 68 | evp.id ==> "23a9d435681d9c45848ac5ca1edd3975d256906a15bf4d22401a6b79894ec47e" 69 | case _ => 70 | } 71 | } 72 | 73 | test("decode and encode nevent") { 74 | val value = NIP19 75 | .decode( 76 | "nevent1qqsz82w5x45pm8z9sj9vtjs7m5uht5jkjp4pt06dyfqp56me398vglspp3mhxue69uhhstnrdakj7q3q9klqjtr2mgfk0m9h8g80z8xjcv07kv340qjvvjsclrdgt93pf4cqur7gsc" 77 | ) 78 | .toTry 79 | .get 80 | 81 | assertMatch(value) { case _: EventPointer => } 82 | 83 | value match { 84 | case evp: EventPointer => 85 | evp.id ==> "23a9d435681d9c45848ac5ca1edd3975d256906a15bf4d22401a6b79894ec47e" 86 | evp.relays ==> List("wss://x.com/") 87 | evp.author.map(_.value.toHex) ==> Some( 88 | "2dbe092c6ada1367ecb73a0ef11cd2c31feb32357824c64a18f8da8596214d70" 89 | ) 90 | case _ => 91 | } 92 | 93 | val newNevent = NIP19.encode(value) 94 | NIP19.decode(newNevent).toTry.get match { 95 | case evp: EventPointer => 96 | evp.id ==> "23a9d435681d9c45848ac5ca1edd3975d256906a15bf4d22401a6b79894ec47e" 97 | evp.relays ==> List("wss://x.com/") 98 | evp.author.map(_.value.toHex) ==> Some( 99 | "2dbe092c6ada1367ecb73a0ef11cd2c31feb32357824c64a18f8da8596214d70" 100 | ) 101 | case _ => 102 | } 103 | } 104 | 105 | test("decode another nevent with optional kind") { 106 | val evp = EventPointer( 107 | id = "23a9d435681d9c45848ac5ca1edd3975d256906a15bf4d22401a6b79894ec47e", 108 | relays = List("wss://x.com/"), 109 | author = Some(XOnlyPublicKey(ByteVector32.fromValidHex("2dbe092c6ada1367ecb73a0ef11cd2c31feb32357824c64a18f8da8596214d70"))), 110 | kind = Some(1) 111 | ) 112 | val encoded = NIP19.encode(evp) 113 | val value = NIP19.decode(encoded).toTry.get 114 | assertMatch(value) { case _ : EventPointer => } 115 | assert( value == evp ) 116 | } 117 | 118 | test("decode and encode nprofile") { 119 | val value = NIP19 120 | .decode( 121 | "nprofile1qqsw96tn6z4zpgs24enrec7zak9mzcdekt0edf08vrfenln8t4m5v8sppdmhxue69uhhjtnrdakszrrhwden5te00qhxxmmd9um2rgu3" 122 | ) 123 | .toTry 124 | .get 125 | 126 | assertMatch(value) { case _: ProfilePointer => } 127 | 128 | value match { 129 | case pp: ProfilePointer => 130 | pp.pubkey.value.toHex ==> "e2e973d0aa20a20aae663ce3c2ed8bb161b9b2df96a5e760d399fe675d77461e" 131 | pp.relays.size ==> 2 132 | pp.relays.contains("wss://x.com/") 133 | pp.relays.contains("wss://y.com/") 134 | case _ => 135 | } 136 | 137 | NIP19.encode( 138 | value 139 | ) ==> "nprofile1qqsw96tn6z4zpgs24enrec7zak9mzcdekt0edf08vrfenln8t4m5v8sppdmhxue69uhhjtnrdakszrrhwden5te00qhxxmmd9um2rgu3" 140 | } 141 | 142 | test("decode and encode naddr") { 143 | val value = NIP19 144 | .decode( 145 | "naddr1qqrxyctwv9hxzq3qut5h8592yz3q4tnx8n3u9mvtk9smnvklj6j7wcxnn8lxwhthgc0qxpqqqzgauhfurwa" 146 | ) 147 | .toTry 148 | .get 149 | 150 | assertMatch(value) { case _: AddressPointer => } 151 | 152 | value match { 153 | case addr: AddressPointer => 154 | addr.author.value.toHex ==> "e2e973d0aa20a20aae663ce3c2ed8bb161b9b2df96a5e760d399fe675d77461e" 155 | addr.relays.size ==> 0 156 | addr.kind ==> 37342 157 | addr.d ==> "banana" 158 | case _ => 159 | } 160 | 161 | NIP19.encode( 162 | value 163 | ) ==> "naddr1qqrxyctwv9hxzq3qut5h8592yz3q4tnx8n3u9mvtk9smnvklj6j7wcxnn8lxwhthgc0qxpqqqzgauhfurwa" 164 | } 165 | } 166 | } 167 | -------------------------------------------------------------------------------- /src/main/scala/Relay.scala: -------------------------------------------------------------------------------- 1 | package snow 2 | 3 | import scala.concurrent.duration.* 4 | import scoin.{Crypto, ByteVector32} 5 | import scodec.bits.ByteVector 6 | import fs2.{io => *, *} 7 | import fs2.concurrent.{Topic, Channel} 8 | import cats.implicits.* 9 | import cats.effect.* 10 | import org.http4s.Uri 11 | import org.http4s.client.websocket.* 12 | import org.http4s.dom.* 13 | import io.circe.* 14 | import io.circe.parser.decode 15 | import io.circe.syntax.* 16 | import cats.Show 17 | 18 | /** 19 | * An implementation of Relay[F] provides methods like `subscribe` 20 | * and access to the underlying streams of things while 21 | * hiding the details about how it gets the things. 22 | */ 23 | trait Relay[F[_]]: 24 | def uri: Uri 25 | def nextId: Ref[F, Int] 26 | def commands: Channel[F, Json] 27 | def events: Topic[F, (String, Event)] 28 | /** 29 | * subscribing with a `Filter` will give us a list of stored events 30 | * and a stream of future events */ 31 | def subscribe( filter: Filter*): Resource[F, (List[Event], Stream[F,Event])] 32 | 33 | object Relay { 34 | 35 | /* this should ideally be for a generic F[_], but we hard code it to F = IO */ 36 | def apply(uri: Uri): Resource[IO, Relay[IO]] = mkResourceForIO(uri) 37 | 38 | /* can make this more generic eventually, but for now it is tied to F = IO */ 39 | def mkResourceForIO(uri: Uri, debugOn: Boolean = false): Resource[IO, Relay[IO]] = 40 | 41 | def debug[A](x: A)(using Show[A]) = if(debugOn) then IO.println(x) else IO.unit 42 | 43 | for 44 | nextId <- Ref[IO].of(0).toResource 45 | commands <- Channel.unbounded[IO, Json].toResource 46 | events <- Topic[IO, (String, Event)].toResource 47 | conn <- WebSocketClient[IO].connectHighLevel(WSRequest(uri)) 48 | 49 | // here we weave together the websocket streams and start the background 50 | // process that keeps them going 51 | background <- { 52 | val receive = debug("opening receive stream") *> conn.receiveStream 53 | .collect { case WSFrame.Text(line, _) => line } 54 | .map(line => decode[List[Json]](line.toString)) 55 | .collect { case Right(v) => v } 56 | .evalTap[IO, Unit] { msg => 57 | msg match { 58 | case msg if msg.size == 2 && msg(0).as[String] == Right("EOSE") => 59 | msg(1).as[String] match { 60 | case Right(subid) => events.publish1((subid, Event(kind = -1, content = ""))).void 61 | *> debug(s"$subid: eose") 62 | case _ => IO.unit 63 | } 64 | case msg if msg.size == 3 && msg(0).as[String] == Right("EVENT") => 65 | (msg(1).as[String], msg(2).as[Event]) match { 66 | case (Right(subid), Right(event)) if event.isValid => 67 | debug(s"$subid: ${event.hash}") 68 | *> events.publish1((subid, event)).void 69 | case _ => IO.unit 70 | } 71 | case _ => IO.unit 72 | } 73 | } 74 | .compile 75 | .drain 76 | 77 | val send = debug("opening send stream") *> commands.stream 78 | .evalMap { msg => debug(s"sending request: $msg") *> conn.sendText(msg.noSpaces) } 79 | .compile 80 | .drain 81 | 82 | (send, receive).parTupled.void.background 83 | } 84 | // only thing left to do now is return our Relay 85 | yield new RelayImplForIO(uri, nextId, commands, events, debugOn) 86 | } 87 | 88 | class RelayImplForIO( 89 | val uri: Uri, 90 | val nextId: Ref[IO, Int], 91 | val commands: Channel[IO, Json], 92 | val events: Topic[IO, (String, Event)], 93 | debugOn: Boolean 94 | ) extends Relay[IO]{ 95 | 96 | def debug[A](x: A)(using Show[A]) = if(debugOn) then IO.println(x.show) else IO.unit 97 | 98 | def subscribe( 99 | filter: Filter* 100 | ): Resource[IO, (List[Event], fs2.Stream[IO, Event])] = { 101 | nextId.getAndUpdate(_ + 1).map(_.toString).toResource.flatMap { 102 | currId => 103 | val send = commands.send( 104 | Seq("REQ".asJson, currId.asJson) 105 | .concat(filter.map(_.asJson)) 106 | .asJson 107 | ) 108 | 109 | val receive = 110 | events 111 | .subscribe(1) 112 | .collect { 113 | case (subid, event) if subid == currId => event 114 | } 115 | 116 | // we make sure to trigger `send` first 117 | send.background *> splitHistorical(receive) 118 | } 119 | } 120 | 121 | /** split into historical versus live events, where an event of kind -1 designates 122 | * the marker between past and present 123 | from SystemFw's help here: https://discord.com/channels/632277896739946517/632310980449402880/1337198474252255264 124 | */ 125 | def splitHistorical(in: Stream[IO, Event]): Resource[IO, (List[Event], Stream[IO, Event])] = 126 | (Deferred[IO, List[Event]].toResource, Channel.unbounded[IO, Event].toResource) 127 | .flatMapN { (historical, live) => 128 | def split(in: Stream[IO, Event], acc: Chunk[Event] = Chunk.empty): Pull[IO, Event, Unit] = 129 | in 130 | .pull 131 | .uncons1 // ideally done with uncons + chunk split, left for the reader 132 | .flatMap { 133 | case None => Pull.done 134 | case Some((n, rest)) => 135 | if n.kind == - 1 136 | then Pull.eval(historical.complete(acc.toList)) >> rest.pull.echo 137 | else split(rest, acc ++ Chunk(n)) 138 | } 139 | 140 | split(in) 141 | .stream 142 | .through(live.sendAll) 143 | .compile 144 | .drain 145 | .background 146 | .evalMap{ _ => 147 | historical.get.tupleRight(live.stream) 148 | } 149 | } 150 | } 151 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | # This file was automatically generated by sbt-github-actions using the 2 | # githubWorkflowGenerate task. You should add and commit this file to 3 | # your git repository. It goes without saying that you shouldn't edit 4 | # this file by hand! Instead, if you wish to make changes, you should 5 | # change your sbt build configuration to revise the workflow description 6 | # to meet your needs, then regenerate this file. 7 | 8 | name: Continuous Integration 9 | 10 | on: 11 | pull_request: 12 | branches: ['**', '!update/**', '!pr/**'] 13 | push: 14 | branches: ['**', '!update/**', '!pr/**'] 15 | tags: [v*] 16 | 17 | env: 18 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 19 | 20 | 21 | concurrency: 22 | group: ${{ github.workflow }} @ ${{ github.ref }} 23 | cancel-in-progress: true 24 | 25 | jobs: 26 | build: 27 | name: Test 28 | strategy: 29 | matrix: 30 | os: [ubuntu-22.04] 31 | scala: [3] 32 | java: [temurin@8] 33 | runs-on: ${{ matrix.os }} 34 | timeout-minutes: 60 35 | steps: 36 | - name: Checkout current branch (full) 37 | uses: actions/checkout@v4 38 | with: 39 | fetch-depth: 0 40 | 41 | - name: Setup sbt 42 | uses: sbt/setup-sbt@v1 43 | 44 | - name: Setup Java (temurin@8) 45 | id: setup-java-temurin-8 46 | if: matrix.java == 'temurin@8' 47 | uses: actions/setup-java@v4 48 | with: 49 | distribution: temurin 50 | java-version: 8 51 | cache: sbt 52 | 53 | - name: sbt update 54 | if: matrix.java == 'temurin@8' && steps.setup-java-temurin-8.outputs.cache-hit == 'false' 55 | run: sbt +update 56 | 57 | - name: Setup Node.js 58 | uses: actions/setup-node@v4 59 | with: 60 | node-version: 22 61 | 62 | - name: Install Node Modules 63 | run: | 64 | sbt esInstall 65 | cp -a target/esbuild/. ./ 66 | 67 | - name: Check that workflows are up to date 68 | run: sbt githubWorkflowCheck 69 | 70 | - name: Check headers and formatting 71 | if: matrix.java == 'temurin@8' && matrix.os == 'ubuntu-22.04' 72 | run: sbt '++ ${{ matrix.scala }}' headerCheckAll scalafmtCheckAll 'project /' scalafmtSbtCheck 73 | 74 | - name: Test 75 | run: sbt '++ ${{ matrix.scala }}' test 76 | 77 | - name: Check binary compatibility 78 | if: matrix.java == 'temurin@8' && matrix.os == 'ubuntu-22.04' 79 | run: sbt '++ ${{ matrix.scala }}' mimaReportBinaryIssues 80 | 81 | - name: Generate API documentation 82 | if: matrix.java == 'temurin@8' && matrix.os == 'ubuntu-22.04' 83 | run: sbt '++ ${{ matrix.scala }}' doc 84 | 85 | - name: Make target directories 86 | if: github.event_name != 'pull_request' && (startsWith(github.ref, 'refs/tags/v') || github.ref == 'refs/heads/main') 87 | run: mkdir -p target project/target 88 | 89 | - name: Compress target directories 90 | if: github.event_name != 'pull_request' && (startsWith(github.ref, 'refs/tags/v') || github.ref == 'refs/heads/main') 91 | run: tar cf targets.tar target project/target 92 | 93 | - name: Upload target directories 94 | if: github.event_name != 'pull_request' && (startsWith(github.ref, 'refs/tags/v') || github.ref == 'refs/heads/main') 95 | uses: actions/upload-artifact@v4 96 | with: 97 | name: target-${{ matrix.os }}-${{ matrix.java }}-${{ matrix.scala }} 98 | path: targets.tar 99 | 100 | publish: 101 | name: Publish Artifacts 102 | needs: [build] 103 | if: github.event_name != 'pull_request' && (startsWith(github.ref, 'refs/tags/v') || github.ref == 'refs/heads/main') 104 | strategy: 105 | matrix: 106 | os: [ubuntu-22.04] 107 | java: [temurin@8] 108 | runs-on: ${{ matrix.os }} 109 | steps: 110 | - name: Checkout current branch (full) 111 | uses: actions/checkout@v4 112 | with: 113 | fetch-depth: 0 114 | 115 | - name: Setup sbt 116 | uses: sbt/setup-sbt@v1 117 | 118 | - name: Setup Java (temurin@8) 119 | id: setup-java-temurin-8 120 | if: matrix.java == 'temurin@8' 121 | uses: actions/setup-java@v4 122 | with: 123 | distribution: temurin 124 | java-version: 8 125 | cache: sbt 126 | 127 | - name: sbt update 128 | if: matrix.java == 'temurin@8' && steps.setup-java-temurin-8.outputs.cache-hit == 'false' 129 | run: sbt +update 130 | 131 | - name: Download target directories (3) 132 | uses: actions/download-artifact@v4 133 | with: 134 | name: target-${{ matrix.os }}-${{ matrix.java }}-3 135 | 136 | - name: Inflate target directories (3) 137 | run: | 138 | tar xf targets.tar 139 | rm targets.tar 140 | 141 | - name: Import signing key 142 | if: env.PGP_SECRET != '' && env.PGP_PASSPHRASE == '' 143 | env: 144 | PGP_SECRET: ${{ secrets.PGP_SECRET }} 145 | PGP_PASSPHRASE: ${{ secrets.PGP_PASSPHRASE }} 146 | run: echo $PGP_SECRET | base64 -d -i - | gpg --import 147 | 148 | - name: Import signing key and strip passphrase 149 | if: env.PGP_SECRET != '' && env.PGP_PASSPHRASE != '' 150 | env: 151 | PGP_SECRET: ${{ secrets.PGP_SECRET }} 152 | PGP_PASSPHRASE: ${{ secrets.PGP_PASSPHRASE }} 153 | run: | 154 | echo "$PGP_SECRET" | base64 -d -i - > /tmp/signing-key.gpg 155 | echo "$PGP_PASSPHRASE" | gpg --pinentry-mode loopback --passphrase-fd 0 --import /tmp/signing-key.gpg 156 | (echo "$PGP_PASSPHRASE"; echo; echo) | gpg --command-fd 0 --pinentry-mode loopback --change-passphrase $(gpg --list-secret-keys --with-colons 2> /dev/null | grep '^sec:' | cut --delimiter ':' --fields 5 | tail -n 1) 157 | 158 | - name: Publish 159 | env: 160 | SONATYPE_USERNAME: ${{ secrets.SONATYPE_USERNAME }} 161 | SONATYPE_PASSWORD: ${{ secrets.SONATYPE_PASSWORD }} 162 | SONATYPE_CREDENTIAL_HOST: ${{ secrets.SONATYPE_CREDENTIAL_HOST }} 163 | run: sbt tlCiRelease 164 | 165 | dependency-submission: 166 | name: Submit Dependencies 167 | if: github.event.repository.fork == false && github.event_name != 'pull_request' 168 | strategy: 169 | matrix: 170 | os: [ubuntu-22.04] 171 | java: [temurin@8] 172 | runs-on: ${{ matrix.os }} 173 | steps: 174 | - name: Checkout current branch (full) 175 | uses: actions/checkout@v4 176 | with: 177 | fetch-depth: 0 178 | 179 | - name: Setup sbt 180 | uses: sbt/setup-sbt@v1 181 | 182 | - name: Setup Java (temurin@8) 183 | id: setup-java-temurin-8 184 | if: matrix.java == 'temurin@8' 185 | uses: actions/setup-java@v4 186 | with: 187 | distribution: temurin 188 | java-version: 8 189 | cache: sbt 190 | 191 | - name: sbt update 192 | if: matrix.java == 'temurin@8' && steps.setup-java-temurin-8.outputs.cache-hit == 'false' 193 | run: sbt +update 194 | 195 | - name: Submit Dependencies 196 | uses: scalacenter/sbt-dependency-submission@v2 197 | with: 198 | configs-ignore: test scala-tool scala-doc-tool test-internal 199 | -------------------------------------------------------------------------------- /src/main/scala/NIP19.scala: -------------------------------------------------------------------------------- 1 | package snow 2 | 3 | import scala.util.{Try, Success, Failure} 4 | import scodec.bits.* 5 | import scoin.{PrivateKey, XOnlyPublicKey, Bech32, ByteVector32} 6 | import scoin.Bech32.Bech32Encoding 7 | 8 | case class EventPointer( 9 | id: String, 10 | relays: List[String] = List.empty, 11 | author: Option[XOnlyPublicKey] = None, 12 | kind: Option[Int] = None 13 | ) 14 | 15 | case class ProfilePointer( 16 | pubkey: XOnlyPublicKey, 17 | relays: List[String] = List.empty 18 | ) 19 | 20 | case class AddressPointer( 21 | d: String, 22 | kind: Int, 23 | author: XOnlyPublicKey, 24 | relays: List[String] 25 | ) 26 | 27 | object NIP19 { 28 | import NIP19Decoder.* 29 | import NIP19Encoder.* 30 | 31 | def decode(bech32text: String): Either[ 32 | Throwable, 33 | PrivateKey | EventPointer | ProfilePointer | AddressPointer, 34 | ] = 35 | Try(Bech32.decode(bech32text)) match { 36 | case Failure(err) => Left(err) 37 | case Success((_, _, enc)) if enc == Bech32.Bech32mEncoding => 38 | Left(Error("encoding is in bech32, not bech32m")) 39 | case Success((prefix, data5, _)) => 40 | val data = Bech32.five2eight(data5) 41 | prefix match { 42 | case "npub" => decodeNpub(data) 43 | case "nsec" => decodeNsec(data) 44 | case "note" => decodeNote(data) 45 | case "nprofile" => decodeNprofile(data) 46 | case "nevent" => decodeNevent(data) 47 | case "naddr" => decodeNaddr(data) 48 | case _ => Left(Error(s"unsupported prefix '$prefix'")) 49 | } 50 | } 51 | 52 | def encode( 53 | thing: PrivateKey | XOnlyPublicKey | EventPointer | ProfilePointer | 54 | AddressPointer | ByteVector32 55 | ): String = { 56 | val (prefix, bytes) = thing match { 57 | case sk: PrivateKey => encodeNsec(sk) 58 | case pp: ProfilePointer => encodeNprofile(pp) 59 | case evp: EventPointer => encodeNevent(evp) 60 | case addr: AddressPointer => encodeNaddr(addr) 61 | case pk: XOnlyPublicKey => encodeNpub(pk) 62 | case id: ByteVector32 => encodeNote(id) 63 | } 64 | val bytes5 = Bech32.eight2five(bytes.toArray) 65 | Bech32.encode(prefix, bytes5, Bech32Encoding) 66 | } 67 | } 68 | 69 | object NIP19Decoder { 70 | import NIP19TLV.* 71 | 72 | def decodeNpub(data: Array[Byte]): Either[Throwable, ProfilePointer] = 73 | if data.size == 32 then 74 | Right( 75 | ProfilePointer(pubkey = XOnlyPublicKey(ByteVector32(ByteVector(data)))) 76 | ) 77 | else Left(Error("npub must contain 32 bytes")) 78 | 79 | def decodeNsec(data: Array[Byte]): Either[Throwable, PrivateKey] = 80 | if data.size == 32 then Right(PrivateKey(ByteVector32(ByteVector(data)))) 81 | else Left(Error("nsec must contain 32 bytes")) 82 | 83 | def decodeNote(data: Array[Byte]): Either[Throwable, EventPointer] = 84 | if data.size == 32 then Right(EventPointer(id = ByteVector(data).toHex)) 85 | else Left(Error("note must contain 32 bytes")) 86 | 87 | def decodeNprofile(data: Array[Byte]): Either[Throwable, ProfilePointer] = { 88 | var pubkey: XOnlyPublicKey = null 89 | var relays: List[String] = List.empty 90 | decodeTLV(data).foreach { 91 | case TLVRecord(TLVType.Special, v) => 92 | if (v.size == 32) { 93 | pubkey = XOnlyPublicKey(ByteVector32(v)) 94 | } 95 | case TLVRecord(TLVType.Relays, v) => 96 | relays = v.decodeUtf8Lenient :: relays 97 | case _ => 98 | } 99 | if pubkey != null then Right(ProfilePointer(pubkey, relays)) 100 | else Left(Error("nprofile pubkey record missing or invalid")) 101 | } 102 | 103 | def decodeNevent(data: Array[Byte]): Either[Throwable, EventPointer] = { 104 | var id: String = null 105 | var relays: List[String] = List.empty 106 | var author: Option[XOnlyPublicKey] = None 107 | var kind: Option[Int] = None 108 | decodeTLV(data).foreach { 109 | case TLVRecord(TLVType.Special, v) => 110 | id = v.toHex 111 | case TLVRecord(TLVType.Relays, v) => 112 | relays = v.decodeUtf8Lenient :: relays 113 | case TLVRecord(TLVType.Author, v) => 114 | if (v.size == 32) { 115 | author = Some(XOnlyPublicKey(ByteVector32(v))) 116 | } 117 | case TLVRecord(TLVType.Kind, v) => 118 | kind = Some(v.toInt(signed = false, ordering = ByteOrdering.BigEndian)) 119 | case _ => 120 | } 121 | if id != null then Right(EventPointer(id, relays, author, kind)) 122 | else Left(Error("nevent id record missing or invalid")) 123 | } 124 | 125 | def decodeNaddr(data: Array[Byte]): Either[Throwable, AddressPointer] = 126 | var d: String = null 127 | var relays: List[String] = List.empty 128 | var author: XOnlyPublicKey = null 129 | var kind: Int = -1 130 | decodeTLV(data).foreach { 131 | case TLVRecord(TLVType.Special, v) => 132 | d = v.decodeUtf8Lenient 133 | case TLVRecord(TLVType.Relays, v) => 134 | relays = v.decodeUtf8Lenient :: relays 135 | case TLVRecord(TLVType.Author, v) => 136 | if (v.size == 32) { 137 | author = XOnlyPublicKey(ByteVector32(v)) 138 | } 139 | case TLVRecord(TLVType.Kind, v) => 140 | kind = v.toInt(signed = false, ordering = ByteOrdering.BigEndian) 141 | case _ => 142 | } 143 | if d == null then Left(Error("naddr d record missing or invalid")) 144 | else if author == null then 145 | Left(Error("naddr author record missing or invalid")) 146 | else if kind == -1 then Left(Error("naddr kind record missing or invalid")) 147 | else Right(AddressPointer(d, kind, author, relays)) 148 | } 149 | 150 | object NIP19Encoder { 151 | import NIP19TLV.* 152 | 153 | def encodeNsec(sk: PrivateKey): (String, ByteVector) = 154 | ("nsec", sk.value.bytes) 155 | def encodeNpub(pk: XOnlyPublicKey): (String, ByteVector) = 156 | ("npub", pk.value.bytes) 157 | def encodeNote(id: ByteVector32): (String, ByteVector) = 158 | ("note", id.bytes) 159 | def encodeNprofile(pp: ProfilePointer): (String, ByteVector) = 160 | ( 161 | "nprofile", 162 | encodeTLVRecords( 163 | TLVRecord(TLVType.Special, pp.pubkey.value.bytes) :: 164 | pp.relays.map(url => 165 | TLVRecord(TLVType.Relays, ByteVector.encodeUtf8(url).toTry.get) 166 | ) 167 | ) 168 | ) 169 | def encodeNevent(evp: EventPointer): (String, ByteVector) = ( 170 | "nevent", 171 | encodeTLVRecords( 172 | (TLVRecord(TLVType.Special, ByteVector.fromValidHex(evp.id)) :: 173 | evp.author.toList.map(author => 174 | TLVRecord(TLVType.Author, author.value.bytes) 175 | )) ++ 176 | evp.relays.map(url => 177 | TLVRecord(TLVType.Relays, ByteVector.encodeUtf8(url).toTry.get) 178 | ) ++ 179 | evp.kind.toList.map(kind => 180 | TLVRecord(TLVType.Kind, ByteVector.fromInt( 181 | kind, size = 4, ordering = ByteOrdering.BigEndian 182 | ) 183 | ) 184 | ) 185 | 186 | ) 187 | ) 188 | def encodeNaddr(addr: AddressPointer): (String, ByteVector) = 189 | ( 190 | "naddr", 191 | encodeTLVRecords( 192 | TLVRecord(TLVType.Special, ByteVector.encodeUtf8(addr.d).toTry.get) :: 193 | TLVRecord(TLVType.Author, addr.author.value.bytes) :: 194 | TLVRecord( 195 | TLVType.Kind, 196 | ByteVector.fromInt( 197 | addr.kind, 198 | size = 4, 199 | ordering = ByteOrdering.BigEndian 200 | ) 201 | ) :: 202 | addr.relays.map(url => 203 | TLVRecord(TLVType.Relays, ByteVector.encodeUtf8(url).toTry.get) 204 | ) 205 | ) 206 | ) 207 | } 208 | 209 | enum TLVType: 210 | case Special 211 | case Relays 212 | case Author 213 | case Kind 214 | case Unknown 215 | 216 | object TLVType { 217 | def fromByte(b: ByteVector): TLVType = fromByte(b(0)) 218 | 219 | def fromByte(b: Byte): TLVType = b match { 220 | case 0 => TLVType.Special 221 | case 1 => TLVType.Relays 222 | case 2 => TLVType.Author 223 | case 3 => TLVType.Kind 224 | case _ => TLVType.Unknown 225 | } 226 | 227 | def toByte(t: TLVType): Byte = t match { 228 | case TLVType.Special => 0 229 | case TLVType.Relays => 1 230 | case TLVType.Author => 2 231 | case TLVType.Kind => 3 232 | case TLVType.Unknown => throw new Error("can't encode unknown TLV type") 233 | } 234 | } 235 | 236 | object NIP19TLV { 237 | case class TLVRecord(t: TLVType, v: ByteVector) 238 | 239 | def decodeTLV(data: Array[Byte]): List[TLVRecord] = 240 | decodeTLVRecords(List.empty, ByteVector(data)) 241 | 242 | def decodeTLVRecords( 243 | records: List[TLVRecord], 244 | bytes: ByteVector 245 | ): List[TLVRecord] = { 246 | if (bytes.size == 0) records 247 | else 248 | decodeTLVRecord(bytes) 249 | .map((rest, record) => decodeTLVRecords(record :: records, rest)) 250 | .getOrElse(List.empty) 251 | } 252 | 253 | def decodeTLVRecord( 254 | bytes: ByteVector 255 | ): Either[String, (ByteVector, TLVRecord)] = 256 | bytes.consume(1)(b => Right(TLVType.fromByte(b))).flatMap { (rest, t) => 257 | rest 258 | .consume(1)(b => 259 | Right( 260 | b.toInt(signed = false, ordering = ByteOrdering.BigEndian) 261 | ) 262 | ) 263 | .flatMap { (rest, l) => 264 | rest.consume(l)(b => Right(b)).map { (rest, v) => 265 | (rest, TLVRecord(t, v)) 266 | } 267 | } 268 | } 269 | 270 | def encodeTLVRecords(records: List[TLVRecord]): ByteVector = 271 | records.foldLeft(ByteVector.empty)((res, tlv) => 272 | res ++ encodeTLVRecord(tlv) 273 | ) 274 | 275 | def encodeTLVRecord(record: TLVRecord): ByteVector = 276 | ByteVector(TLVType.toByte(record.t), record.v.size.toByte) ++ record.v 277 | } 278 | --------------------------------------------------------------------------------