├── .github └── workflows │ ├── pr-csharp-test.yml │ ├── pr-ex-test.yml │ └── pr-rs-test.yml ├── .prettierrc ├── README.md └── impl ├── csharp ├── .gitignore ├── Pika.Tests │ ├── Pika.Tests.csproj │ ├── PikaTests.cs │ ├── SnowflakeTests.cs │ └── Usings.cs ├── Pika.sln ├── Pika │ ├── Base64UrlEncoding.cs │ ├── DecodedPika.cs │ ├── Extensions.cs │ ├── InvalidPrefixError.cs │ ├── Pika.csproj │ ├── PikaConstants.cs │ ├── PikaGenerator.cs │ ├── PikaInitializationOptions.cs │ ├── PikaPrefix.cs │ ├── Snowflakes │ │ ├── DeconstructedSnowflake.cs │ │ ├── EpochResolvable.cs │ │ ├── SequenceExhaustionError.cs │ │ ├── Snowflake.cs │ │ └── SnowflakeGenOptions.cs │ └── UnregisteredPrefixError.cs ├── README.md └── global.json ├── ex ├── .credo.exs ├── .formatter.exs ├── .gitignore ├── README.md ├── benchmarks │ └── generation.exs ├── config │ ├── config.exs │ ├── dev.exs │ └── test.exs ├── lib │ ├── pika.ex │ ├── snowflake.ex │ └── utils.ex ├── mix.exs ├── mix.lock └── test │ ├── pika_test.exs │ └── test_helper.exs ├── js ├── .gitignore ├── .prettierignore ├── .prettierrc ├── README.md ├── bench │ └── gen.js ├── package.json ├── src │ ├── index.ts │ ├── logger.ts │ ├── pika.ts │ └── snowflake.ts ├── tsconfig.json └── yarn.lock └── rs ├── .format.toml ├── .gitignore ├── Cargo.lock ├── Cargo.toml ├── README.md └── src ├── base64.rs ├── lib.rs ├── pika.rs ├── snowflake.rs └── utils.rs /.github/workflows/pr-csharp-test.yml: -------------------------------------------------------------------------------- 1 | name: Test C# implementation on pull request 2 | 3 | on: 4 | pull_request: 5 | branches: 6 | - main 7 | paths: 8 | - "impl/csharp/**" 9 | - ".github/workflows/pr-csharp-test.yml" 10 | 11 | jobs: 12 | build: 13 | runs-on: ubuntu-latest 14 | 15 | steps: 16 | - uses: actions/checkout@v3 17 | - name: Setup .NET 18 | uses: actions/setup-dotnet@v3 19 | with: 20 | dotnet-version: 7.0.x 21 | - name: Restore dependencies 22 | run: dotnet restore impl/csharp 23 | - name: Build 24 | run: dotnet build impl/csharp --no-restore 25 | - name: Test 26 | run: dotnet test impl/csharp --verbosity normal --no-build 27 | -------------------------------------------------------------------------------- /.github/workflows/pr-ex-test.yml: -------------------------------------------------------------------------------- 1 | name: Test Elixir implementation 2 | 3 | on: 4 | pull_request: 5 | branches: 6 | - main 7 | paths: 8 | - "impl/ex/**" 9 | - ".github/workflows/pr-ex-test.yml" 10 | 11 | jobs: 12 | credo: 13 | runs-on: ubuntu-latest 14 | steps: 15 | - uses: actions/checkout@v4 16 | - uses: erlef/setup-beam@v1 17 | with: 18 | elixir-version: "1.16.1" 19 | otp-version: "26" 20 | - name: Run credo 21 | run: | 22 | cd impl/ex 23 | 24 | mix deps.get && mix deps.compile 25 | 26 | mix credo 27 | 28 | test: 29 | runs-on: ubuntu-latest 30 | name: OTP ${{matrix.otp}} / Elixir ${{matrix.elixir}} 31 | strategy: 32 | matrix: 33 | otp: ["25"] 34 | elixir: ["1.16.1", "1.15.7", "1.14.0"] 35 | steps: 36 | - uses: actions/checkout@v4 37 | - uses: erlef/setup-beam@v1 38 | with: 39 | otp-version: ${{matrix.otp}} 40 | elixir-version: ${{matrix.elixir}} 41 | - name: Run Tests 42 | run: | 43 | cd impl/ex 44 | 45 | mix deps.get && mix deps.compile 46 | 47 | mix test 48 | 49 | -------------------------------------------------------------------------------- /.github/workflows/pr-rs-test.yml: -------------------------------------------------------------------------------- 1 | name: Test Rust implementation on pull request 2 | on: 3 | pull_request: 4 | branches: 5 | - main 6 | paths: 7 | - "impl/rs/**" 8 | - ".github/workflows/pr-rs-test.yml" 9 | 10 | jobs: 11 | cargo-test: 12 | runs-on: ubuntu-latest 13 | steps: 14 | - uses: actions/checkout@v2 15 | - uses: actions-rs/toolchain@v1 16 | with: 17 | toolchain: stable 18 | override: true 19 | - name: Test with cargo 20 | # Since github actions can't change the working directory, we have to cd int o the impl/rs directory 21 | # and thus run cargo clippy manually, instead of using the cargo action 22 | run: | 23 | cd impl/rs 24 | cargo test --all-targets --all-features 25 | 26 | cargo-clippy: 27 | runs-on: ubuntu-latest 28 | steps: 29 | - uses: actions/checkout@v2 30 | - uses: actions-rs/toolchain@v1 31 | with: 32 | toolchain: stable 33 | override: true 34 | - name: Clippy with cargo 35 | run: | 36 | cd impl/rs 37 | cargo clippy --all-targets --all-features -- -D warnings 38 | -------------------------------------------------------------------------------- /.prettierrc: -------------------------------------------------------------------------------- 1 | { 2 | "useTabs": false, 3 | "tabWidth": 2 4 | } 5 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # pika 2 | 3 | Combine Stripe IDs with Snowflake IDs and you get... pika! - the last ID system you'll ever need, combining pragmatism with functionality. 4 | 5 | Example ID: `user_MTI5Njg4Njg1MDQwODg5ODYx` 6 | 7 | ## Features 8 | 9 | - Object type prefixes 10 | - Type prefix atomicity 11 | - Guaranteed multi-node space-time uniqueness 12 | - Timestamped 13 | - Fast & simple 14 | - Shorter than UUIDs 15 | - Standalone 16 | - Option to be cryptographically secure 17 | 18 | ## Disadvantages / Trade-offs vs. Snowflakes 19 | 20 | - Unable to sequence by integer (pikas are strings) 21 | - Slower generation (by a few nanoseconds - pika is 1.5m ops/sec vs snowflake 2m ops/sec on an M1, however all Snowflakes and Pikas have a theoretical limit of generating a maximum of 1,024,000 IDs per node per second, so there won't be a difference in throughput either way) 22 | - Slightly larger sizes (pikas are ~28 bytes vs Snowflake's 8 bytes) 23 | 24 | ## Implementations 25 | 26 | - [JS (TypeScript)](/impl/js) 27 | - [Rust](/impl/rs) 28 | - [Elixir](/impl/ex) 29 | - [C#](/impl/csharp) 30 | 31 | ## The ID 32 | 33 | Pika IDs consist of 2 sections: the type prefix and the tail, separated by an underscore. 34 | 35 | ### Type Prefixes 36 | 37 | When creating a pika ID, you must specify the prefix to be prepended - the general rule of thumb should be to use a different prefix for each object type (e.g. `user`, `team`, `post`, etc). 38 | 39 | Type prefixes should be lowercase, short, alphanumeric strings. If you have an object type with a long name, then it's recommended to shorten it down into an acronym or similar. For example, if we had an object type called "connected account", then we'd make the type prefix `ca` - or even if we had a type called "channel", we might want to shorten it down to `ch` - it's up to you to decide what you think makes the most distinctive sense. 40 | 41 | ### Tail 42 | 43 | The tail is the part that comes after the underscore (e.g. `MTI5Njg4Njg1MDQwODg5ODYx`). Usually, this is just a base64-encoded Snowflake ID, however, if the pika is cryptographically secure, then the base64 decoded string value will start with an `s_` prefix, followed by a cryptographically random string, then followed by another underscore and the Snowflake ID. 44 | 45 | Example of a normal decoded tail: 46 | `129688685040889861` 47 | 48 | Example of a cryptographically secure decoded tail: 49 | `s_387d0775128c383fa8fbf5fd9863b84aba216bcc6872a877_129688685040889861` 50 | 51 | ## Type Prefix Atomicity 52 | 53 | To guarantee that developers use the correct pre-defined prefix types for the right object types, pika requires you to "register" them before they're used to prevent warnings from being thrown. This is also where you define if a prefix type should be cryptographically secure or not. 54 | -------------------------------------------------------------------------------- /impl/csharp/.gitignore: -------------------------------------------------------------------------------- 1 | ### Csharp template 2 | ## Ignore Visual Studio temporary files, build results, and 3 | ## files generated by popular Visual Studio add-ons. 4 | ## 5 | ## Get latest from https://github.com/github/gitignore/blob/main/VisualStudio.gitignore 6 | 7 | # User-specific files 8 | *.rsuser 9 | *.suo 10 | *.user 11 | *.userosscache 12 | *.sln.docstates 13 | 14 | # User-specific files (MonoDevelop/Xamarin Studio) 15 | *.userprefs 16 | 17 | # Mono auto generated files 18 | mono_crash.* 19 | 20 | # Build results 21 | [Dd]ebug/ 22 | [Dd]ebugPublic/ 23 | [Rr]elease/ 24 | [Rr]eleases/ 25 | x64/ 26 | x86/ 27 | [Ww][Ii][Nn]32/ 28 | [Aa][Rr][Mm]/ 29 | [Aa][Rr][Mm]64/ 30 | bld/ 31 | [Bb]in/ 32 | [Oo]bj/ 33 | [Ll]og/ 34 | [Ll]ogs/ 35 | 36 | # Visual Studio 2015/2017 cache/options directory 37 | .vs/ 38 | # Uncomment if you have tasks that create the project's static files in wwwroot 39 | #wwwroot/ 40 | 41 | # Visual Studio 2017 auto generated files 42 | Generated\ Files/ 43 | 44 | # MSTest test Results 45 | [Tt]est[Rr]esult*/ 46 | [Bb]uild[Ll]og.* 47 | 48 | # NUnit 49 | *.VisualState.xml 50 | TestResult.xml 51 | nunit-*.xml 52 | 53 | # Build Results of an ATL Project 54 | [Dd]ebugPS/ 55 | [Rr]eleasePS/ 56 | dlldata.c 57 | 58 | # Benchmark Results 59 | BenchmarkDotNet.Artifacts/ 60 | 61 | # .NET Core 62 | project.lock.json 63 | project.fragment.lock.json 64 | artifacts/ 65 | 66 | # ASP.NET Scaffolding 67 | ScaffoldingReadMe.txt 68 | 69 | # StyleCop 70 | StyleCopReport.xml 71 | 72 | # Files built by Visual Studio 73 | *_i.c 74 | *_p.c 75 | *_h.h 76 | *.ilk 77 | *.meta 78 | *.obj 79 | *.iobj 80 | *.pch 81 | *.pdb 82 | *.ipdb 83 | *.pgc 84 | *.pgd 85 | *.rsp 86 | *.sbr 87 | *.tlb 88 | *.tli 89 | *.tlh 90 | *.tmp 91 | *.tmp_proj 92 | *_wpftmp.csproj 93 | *.log 94 | *.tlog 95 | *.vspscc 96 | *.vssscc 97 | .builds 98 | *.pidb 99 | *.svclog 100 | *.scc 101 | 102 | # Chutzpah Test files 103 | _Chutzpah* 104 | 105 | # Visual C++ cache files 106 | ipch/ 107 | *.aps 108 | *.ncb 109 | *.opendb 110 | *.opensdf 111 | *.sdf 112 | *.cachefile 113 | *.VC.db 114 | *.VC.VC.opendb 115 | 116 | # Visual Studio profiler 117 | *.psess 118 | *.vsp 119 | *.vspx 120 | *.sap 121 | 122 | # Visual Studio Trace Files 123 | *.e2e 124 | 125 | # TFS 2012 Local Workspace 126 | $tf/ 127 | 128 | # Guidance Automation Toolkit 129 | *.gpState 130 | 131 | # ReSharper is a .NET coding add-in 132 | _ReSharper*/ 133 | *.[Rr]e[Ss]harper 134 | *.DotSettings.user 135 | 136 | # TeamCity is a build add-in 137 | _TeamCity* 138 | 139 | # DotCover is a Code Coverage Tool 140 | *.dotCover 141 | 142 | # AxoCover is a Code Coverage Tool 143 | .axoCover/* 144 | !.axoCover/settings.json 145 | 146 | # Coverlet is a free, cross platform Code Coverage Tool 147 | coverage*.json 148 | coverage*.xml 149 | coverage*.info 150 | 151 | # Visual Studio code coverage results 152 | *.coverage 153 | *.coveragexml 154 | 155 | # NCrunch 156 | _NCrunch_* 157 | .*crunch*.local.xml 158 | nCrunchTemp_* 159 | 160 | # MightyMoose 161 | *.mm.* 162 | AutoTest.Net/ 163 | 164 | # Web workbench (sass) 165 | .sass-cache/ 166 | 167 | # Installshield output folder 168 | [Ee]xpress/ 169 | 170 | # DocProject is a documentation generator add-in 171 | DocProject/buildhelp/ 172 | DocProject/Help/*.HxT 173 | DocProject/Help/*.HxC 174 | DocProject/Help/*.hhc 175 | DocProject/Help/*.hhk 176 | DocProject/Help/*.hhp 177 | DocProject/Help/Html2 178 | DocProject/Help/html 179 | 180 | # Click-Once directory 181 | publish/ 182 | 183 | # Publish Web Output 184 | *.[Pp]ublish.xml 185 | *.azurePubxml 186 | # Note: Comment the next line if you want to checkin your web deploy settings, 187 | # but database connection strings (with potential passwords) will be unencrypted 188 | *.pubxml 189 | *.publishproj 190 | 191 | # Microsoft Azure Web App publish settings. Comment the next line if you want to 192 | # checkin your Azure Web App publish settings, but sensitive information contained 193 | # in these scripts will be unencrypted 194 | PublishScripts/ 195 | 196 | # NuGet Packages 197 | *.nupkg 198 | # NuGet Symbol Packages 199 | *.snupkg 200 | # The packages folder can be ignored because of Package Restore 201 | **/[Pp]ackages/* 202 | # except build/, which is used as an MSBuild target. 203 | !**/[Pp]ackages/build/ 204 | # Uncomment if necessary however generally it will be regenerated when needed 205 | #!**/[Pp]ackages/repositories.config 206 | # NuGet v3's project.json files produces more ignorable files 207 | *.nuget.props 208 | *.nuget.targets 209 | 210 | # Microsoft Azure Build Output 211 | csx/ 212 | *.build.csdef 213 | 214 | # Microsoft Azure Emulator 215 | ecf/ 216 | rcf/ 217 | 218 | # Windows Store app package directories and files 219 | AppPackages/ 220 | BundleArtifacts/ 221 | Package.StoreAssociation.xml 222 | _pkginfo.txt 223 | *.appx 224 | *.appxbundle 225 | *.appxupload 226 | 227 | # Visual Studio cache files 228 | # files ending in .cache can be ignored 229 | *.[Cc]ache 230 | # but keep track of directories ending in .cache 231 | !?*.[Cc]ache/ 232 | 233 | # Others 234 | ClientBin/ 235 | ~$* 236 | *~ 237 | *.dbmdl 238 | *.dbproj.schemaview 239 | *.jfm 240 | *.pfx 241 | *.publishsettings 242 | orleans.codegen.cs 243 | 244 | # Including strong name files can present a security risk 245 | # (https://github.com/github/gitignore/pull/2483#issue-259490424) 246 | #*.snk 247 | 248 | # Since there are multiple workflows, uncomment next line to ignore bower_components 249 | # (https://github.com/github/gitignore/pull/1529#issuecomment-104372622) 250 | #bower_components/ 251 | 252 | # RIA/Silverlight projects 253 | Generated_Code/ 254 | 255 | # Backup & report files from converting an old project file 256 | # to a newer Visual Studio version. Backup files are not needed, 257 | # because we have git ;-) 258 | _UpgradeReport_Files/ 259 | Backup*/ 260 | UpgradeLog*.XML 261 | UpgradeLog*.htm 262 | ServiceFabricBackup/ 263 | *.rptproj.bak 264 | 265 | # SQL Server files 266 | *.mdf 267 | *.ldf 268 | *.ndf 269 | 270 | # Business Intelligence projects 271 | *.rdl.data 272 | *.bim.layout 273 | *.bim_*.settings 274 | *.rptproj.rsuser 275 | *- [Bb]ackup.rdl 276 | *- [Bb]ackup ([0-9]).rdl 277 | *- [Bb]ackup ([0-9][0-9]).rdl 278 | 279 | # Microsoft Fakes 280 | FakesAssemblies/ 281 | 282 | # GhostDoc plugin setting file 283 | *.GhostDoc.xml 284 | 285 | # Node.js Tools for Visual Studio 286 | .ntvs_analysis.dat 287 | node_modules/ 288 | 289 | # Visual Studio 6 build log 290 | *.plg 291 | 292 | # Visual Studio 6 workspace options file 293 | *.opt 294 | 295 | # Visual Studio 6 auto-generated workspace file (contains which files were open etc.) 296 | *.vbw 297 | 298 | # Visual Studio 6 auto-generated project file (contains which files were open etc.) 299 | *.vbp 300 | 301 | # Visual Studio 6 workspace and project file (working project files containing files to include in project) 302 | *.dsw 303 | *.dsp 304 | 305 | # Visual Studio 6 technical files 306 | *.ncb 307 | *.aps 308 | 309 | # Visual Studio LightSwitch build output 310 | **/*.HTMLClient/GeneratedArtifacts 311 | **/*.DesktopClient/GeneratedArtifacts 312 | **/*.DesktopClient/ModelManifest.xml 313 | **/*.Server/GeneratedArtifacts 314 | **/*.Server/ModelManifest.xml 315 | _Pvt_Extensions 316 | 317 | # Paket dependency manager 318 | .paket/paket.exe 319 | paket-files/ 320 | 321 | # FAKE - F# Make 322 | .fake/ 323 | 324 | # CodeRush personal settings 325 | .cr/personal 326 | 327 | # Python Tools for Visual Studio (PTVS) 328 | __pycache__/ 329 | *.pyc 330 | 331 | # Cake - Uncomment if you are using it 332 | # tools/** 333 | # !tools/packages.config 334 | 335 | # Tabs Studio 336 | *.tss 337 | 338 | # Telerik's JustMock configuration file 339 | *.jmconfig 340 | 341 | # BizTalk build output 342 | *.btp.cs 343 | *.btm.cs 344 | *.odx.cs 345 | *.xsd.cs 346 | 347 | # OpenCover UI analysis results 348 | OpenCover/ 349 | 350 | # Azure Stream Analytics local run output 351 | ASALocalRun/ 352 | 353 | # MSBuild Binary and Structured Log 354 | *.binlog 355 | 356 | # NVidia Nsight GPU debugger configuration file 357 | *.nvuser 358 | 359 | # MFractors (Xamarin productivity tool) working folder 360 | .mfractor/ 361 | 362 | # Local History for Visual Studio 363 | .localhistory/ 364 | 365 | # Visual Studio History (VSHistory) files 366 | .vshistory/ 367 | 368 | # BeatPulse healthcheck temp database 369 | healthchecksdb 370 | 371 | # Backup folder for Package Reference Convert tool in Visual Studio 2017 372 | MigrationBackup/ 373 | 374 | # Ionide (cross platform F# VS Code tools) working folder 375 | .ionide/ 376 | 377 | # Fody - auto-generated XML schema 378 | FodyWeavers.xsd 379 | 380 | # VS Code files for those working on multiple tools 381 | .vscode/* 382 | !.vscode/settings.json 383 | !.vscode/tasks.json 384 | !.vscode/launch.json 385 | !.vscode/extensions.json 386 | *.code-workspace 387 | 388 | # Local History for Visual Studio Code 389 | .history/ 390 | 391 | # Windows Installer files from build outputs 392 | *.cab 393 | *.msi 394 | *.msix 395 | *.msm 396 | *.msp 397 | 398 | # JetBrains Rider 399 | *.sln.iml 400 | .idea 401 | -------------------------------------------------------------------------------- /impl/csharp/Pika.Tests/Pika.Tests.csproj: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | net7.0 5 | enable 6 | enable 7 | 8 | false 9 | true 10 | 11 | 12 | 13 | 14 | 15 | 16 | runtime; build; native; contentfiles; analyzers; buildtransitive 17 | all 18 | 19 | 20 | runtime; build; native; contentfiles; analyzers; buildtransitive 21 | all 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | -------------------------------------------------------------------------------- /impl/csharp/Pika.Tests/PikaTests.cs: -------------------------------------------------------------------------------- 1 | namespace Pika.Tests; 2 | 3 | public class PikaTests 4 | { 5 | [Fact] 6 | public void Pika_GenerateAndDecode_ValidUserAndSecretKey() 7 | { 8 | var pika = new PikaGenerator(new[] 9 | { 10 | new PikaPrefix 11 | { 12 | Prefix = "user", 13 | Description = "User ID", 14 | }, 15 | new PikaPrefix 16 | { 17 | Prefix = "sk", 18 | Description = "Secret key", 19 | Secure = true 20 | } 21 | }); 22 | 23 | // user_MjM4NDAxNDk2MTUzODYyMTQ1 24 | var userId = pika.Generate("user"); 25 | 26 | // sk_c19FMjdGRjMyMjhGNkE0MDdDRDFFMTZEMEY1Mzk1QUVGRl8yMzg0MDE0OTYxNTgwNTY0NTA 27 | var secretKey = pika.Generate("sk"); 28 | 29 | var decodedUserId = pika.Decode(userId); 30 | var decodedSecretKey = pika.Decode(secretKey); 31 | 32 | // Make sure the prefixes are correct 33 | Assert.Equal("user", decodedUserId.Prefix); 34 | Assert.Equal("sk", decodedSecretKey.Prefix); 35 | 36 | // Make sure sequencing is working 37 | Assert.Equal(1UL, decodedUserId.Seq); 38 | Assert.Equal(2UL, decodedSecretKey.Seq); 39 | } 40 | 41 | [Fact] 42 | public void Pika_GenerateAndDecode_ValidUserWithUnderscore() 43 | { 44 | var pika = new PikaGenerator(new[] 45 | { 46 | new PikaPrefix 47 | { 48 | Prefix = "user_id", 49 | Description = "User ID", 50 | } 51 | }); 52 | 53 | // user_id_MjM4NDAxNDk2MTUzODYyMTQ1 54 | var userId = pika.Generate("user_id"); 55 | 56 | var decodedUserId = pika.Decode(userId); 57 | 58 | // Make sure the prefixes are correct 59 | Assert.Equal("user_id", decodedUserId.Prefix); 60 | 61 | // Make sure sequencing is working 62 | Assert.Equal(1UL, decodedUserId.Seq); 63 | } 64 | 65 | [Fact] 66 | public void Pika_Uniqueness() 67 | { 68 | var pika = new PikaGenerator(new[] 69 | { 70 | new PikaPrefix 71 | { 72 | Prefix = "user", 73 | Description = "User ID", 74 | }, 75 | new PikaPrefix 76 | { 77 | Prefix = "sk", 78 | Description = "Secret key", 79 | Secure = true 80 | } 81 | }); 82 | 83 | var ids = new HashSet(); 84 | 85 | for (var i = 0; i < 10000; i++) 86 | { 87 | var id = pika.Generate("user"); 88 | Assert.DoesNotContain(id, ids); 89 | ids.Add(id); 90 | } 91 | 92 | ids.Clear(); 93 | 94 | for (var i = 0; i < 10000; i++) 95 | { 96 | var id = pika.Generate("sk"); 97 | Assert.DoesNotContain(id, ids); 98 | ids.Add(id); 99 | } 100 | 101 | ids.Clear(); 102 | } 103 | 104 | [Fact] 105 | public void Pika_Validation_ValidUserPrefix() 106 | { 107 | var pika = new PikaGenerator(new[] 108 | { 109 | new PikaPrefix 110 | { 111 | Prefix = "user", 112 | Description = "User ID", 113 | }, 114 | new PikaPrefix 115 | { 116 | Prefix = "sk", 117 | Description = "Secret key", 118 | Secure = true 119 | } 120 | }); 121 | 122 | Assert.True(pika.Validate("user_MjM4NDAxNDk2MTUzODYyMTQ1")); 123 | } 124 | 125 | [Fact] 126 | public void Pika_Validation_ValidSecureKeyPrefix() 127 | { 128 | var pika = new PikaGenerator(new[] 129 | { 130 | new PikaPrefix 131 | { 132 | Prefix = "user", 133 | Description = "User ID", 134 | }, 135 | new PikaPrefix 136 | { 137 | Prefix = "sk", 138 | Description = "Secret key", 139 | Secure = true 140 | } 141 | }); 142 | 143 | Assert.True(pika.Validate("sk_c19FMjdGRjMyMjhGNkE0MDdDRDFFMTZEMEY1Mzk1QUVGRl8yMzg4MDE0OTYxNTgwNTY0NTA")); 144 | } 145 | 146 | [Fact] 147 | public void Pika_Validation_InvalidUserPrefix() 148 | { 149 | var pika = new PikaGenerator(new[] 150 | { 151 | new PikaPrefix 152 | { 153 | Prefix = "user", 154 | Description = "User ID", 155 | }, 156 | new PikaPrefix 157 | { 158 | Prefix = "sk", 159 | Description = "Secret key", 160 | Secure = true 161 | } 162 | }); 163 | 164 | Assert.False(pika.Validate("user_MjM4NDAxNDk2MTUzODYyMTQ1", "sk")); 165 | } 166 | 167 | [Fact] 168 | public void Pika_Validation_InvalidSecureKeyPrefix() 169 | { 170 | var pika = new PikaGenerator(new[] 171 | { 172 | new PikaPrefix 173 | { 174 | Prefix = "user", 175 | Description = "User ID", 176 | }, 177 | new PikaPrefix 178 | { 179 | Prefix = "sk", 180 | Description = "Secret key", 181 | Secure = true 182 | } 183 | }); 184 | 185 | Assert.False( 186 | pika.Validate("sk_c19FMjdGRjMyMjhGNkE0MDdDRDFFMTZEMEY1Mzk1QUVGRl8yMzg4MDE0OTYxNTgwNTY0NTA", "user")); 187 | } 188 | 189 | [Fact] 190 | public void Pika_InvalidPrefix_UppercaseCharacters() 191 | { 192 | Assert.Throws(() => 193 | { 194 | var pika = new PikaGenerator(new[] 195 | { 196 | new PikaPrefix 197 | { 198 | Prefix = "USER", 199 | Description = "User ID", 200 | } 201 | }); 202 | }); 203 | } 204 | 205 | [Fact] 206 | public void Pika_ValidPrefix_UnderscoreCharacters() 207 | { 208 | var pika = new PikaGenerator(new[] 209 | { 210 | new PikaPrefix 211 | { 212 | Prefix = "user_id", 213 | Description = "User ID", 214 | } 215 | }); 216 | } 217 | 218 | [Fact] 219 | public void Pika_InvalidPrefix_NumericCharacters() 220 | { 221 | Assert.Throws(() => 222 | { 223 | var pika = new PikaGenerator(new[] 224 | { 225 | new PikaPrefix 226 | { 227 | Prefix = "user69", 228 | Description = "User ID", 229 | } 230 | }); 231 | }); 232 | } 233 | 234 | [Fact] 235 | public void Pika_UnregisteredPrefix_Generate() 236 | { 237 | Assert.Throws(() => 238 | { 239 | var pika = new PikaGenerator(new[] 240 | { 241 | new PikaPrefix 242 | { 243 | Prefix = "user", 244 | Description = "User ID", 245 | } 246 | }); 247 | 248 | pika.Generate("sk"); 249 | }); 250 | } 251 | } -------------------------------------------------------------------------------- /impl/csharp/Pika.Tests/SnowflakeTests.cs: -------------------------------------------------------------------------------- 1 | using Pika.Snowflakes; 2 | 3 | namespace Pika.Tests; 4 | 5 | public class SnowflakeTests 6 | { 7 | [Fact] 8 | public void Snowflake_GenerateAndDecode() 9 | { 10 | const ulong epoch = 1640995200000UL; 11 | const ulong nodeId = 1UL; 12 | var snowflake = new Snowflake(epoch, nodeId); 13 | var id = snowflake.Generate(); 14 | var decoded = snowflake.Decode(id); 15 | 16 | Assert.Equal(nodeId, decoded.NodeId); 17 | Assert.Equal(1U, decoded.Seq); 18 | Assert.Equal(epoch, decoded.Epoch); 19 | Assert.Equal(id, decoded.Id); 20 | } 21 | 22 | [Fact] 23 | public void Snowflake_UniqueIds() 24 | { 25 | var snowflake = new Snowflake(1640995200000UL, 1UL); 26 | var ids = new HashSet(); 27 | 28 | for (var i = 0; i < 10000; i++) 29 | { 30 | var id = snowflake.Generate(); 31 | Assert.DoesNotContain(id, ids); 32 | ids.Add(id); 33 | } 34 | } 35 | } -------------------------------------------------------------------------------- /impl/csharp/Pika.Tests/Usings.cs: -------------------------------------------------------------------------------- 1 | global using Xunit; -------------------------------------------------------------------------------- /impl/csharp/Pika.sln: -------------------------------------------------------------------------------- 1 |  2 | Microsoft Visual Studio Solution File, Format Version 12.00 3 | Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Pika", "Pika\Pika.csproj", "{10B20CED-2FF1-4078-AA12-69682FC1160B}" 4 | EndProject 5 | Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Pika.Tests", "Pika.Tests\Pika.Tests.csproj", "{5858B806-2C7D-4F02-8443-71B8D4D09AE5}" 6 | EndProject 7 | Global 8 | GlobalSection(SolutionConfigurationPlatforms) = preSolution 9 | Debug|Any CPU = Debug|Any CPU 10 | Release|Any CPU = Release|Any CPU 11 | EndGlobalSection 12 | GlobalSection(ProjectConfigurationPlatforms) = postSolution 13 | {10B20CED-2FF1-4078-AA12-69682FC1160B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU 14 | {10B20CED-2FF1-4078-AA12-69682FC1160B}.Debug|Any CPU.Build.0 = Debug|Any CPU 15 | {10B20CED-2FF1-4078-AA12-69682FC1160B}.Release|Any CPU.ActiveCfg = Release|Any CPU 16 | {10B20CED-2FF1-4078-AA12-69682FC1160B}.Release|Any CPU.Build.0 = Release|Any CPU 17 | {5858B806-2C7D-4F02-8443-71B8D4D09AE5}.Debug|Any CPU.ActiveCfg = Debug|Any CPU 18 | {5858B806-2C7D-4F02-8443-71B8D4D09AE5}.Debug|Any CPU.Build.0 = Debug|Any CPU 19 | {5858B806-2C7D-4F02-8443-71B8D4D09AE5}.Release|Any CPU.ActiveCfg = Release|Any CPU 20 | {5858B806-2C7D-4F02-8443-71B8D4D09AE5}.Release|Any CPU.Build.0 = Release|Any CPU 21 | EndGlobalSection 22 | EndGlobal 23 | -------------------------------------------------------------------------------- /impl/csharp/Pika/Base64UrlEncoding.cs: -------------------------------------------------------------------------------- 1 | namespace Pika; 2 | 3 | public class Base64UrlEncoding 4 | { 5 | public static byte[] Decode(string input) 6 | { 7 | var base64 = input.Replace("-", "+").Replace("_", "/"); 8 | switch (base64.Length % 4) 9 | { 10 | case 2: 11 | base64 += "=="; 12 | break; 13 | case 3: 14 | base64 += "="; 15 | break; 16 | } 17 | return Convert.FromBase64String(base64); 18 | } 19 | 20 | public static string Encode(byte[] input) 21 | { 22 | return Convert.ToBase64String(input).Replace("+", "-").Replace("/", "_").Replace("=", ""); 23 | } 24 | } -------------------------------------------------------------------------------- /impl/csharp/Pika/DecodedPika.cs: -------------------------------------------------------------------------------- 1 | namespace Pika; 2 | 3 | public class DecodedPika 4 | { 5 | public required string Prefix { get; set; } 6 | 7 | public required string Tail { get; set; } 8 | 9 | public ulong Snowflake { get; set; } 10 | 11 | public uint NodeId { get; set; } 12 | 13 | public ulong Seq { get; set; } 14 | 15 | public byte Version { get; set; } 16 | 17 | public ulong Timestamp { get; set; } 18 | 19 | public required PikaPrefix PrefixRecord { get; set; } 20 | } -------------------------------------------------------------------------------- /impl/csharp/Pika/Extensions.cs: -------------------------------------------------------------------------------- 1 | namespace Pika; 2 | 3 | public static class Extensions 4 | { 5 | public static string ToHexString(this byte[] bytes) 6 | { 7 | return BitConverter.ToString(bytes).Replace("-", string.Empty); 8 | } 9 | } -------------------------------------------------------------------------------- /impl/csharp/Pika/InvalidPrefixError.cs: -------------------------------------------------------------------------------- 1 | namespace Pika; 2 | 3 | public class InvalidPrefixError : Exception 4 | { 5 | public InvalidPrefixError(string prefix) : base($"The prefix \"{prefix}\" is invalid.") 6 | { 7 | } 8 | } -------------------------------------------------------------------------------- /impl/csharp/Pika/Pika.csproj: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | net7.0 5 | enable 6 | enable 7 | Pika 8 | 1.0.1 9 | cat419 10 | A C# implementation of hop.io's Pika, a combination of Stripe id's and Snowflakes. 11 | MIT 12 | 13 | https://github.com/hopinc/pika 14 | 15 | 16 | https://github.com/hopinc/pika 17 | 18 | git 19 | stripe, snowflake, pika 20 | 21 | 22 | 23 | -------------------------------------------------------------------------------- /impl/csharp/Pika/PikaConstants.cs: -------------------------------------------------------------------------------- 1 | using System.Text.RegularExpressions; 2 | 3 | namespace Pika; 4 | 5 | public static partial class PikaConstants 6 | { 7 | public const ulong DefaultEpoch = 1640995200000UL; 8 | 9 | public const ulong SequenceMask = 0b11111111111111111111111111; 10 | } -------------------------------------------------------------------------------- /impl/csharp/Pika/PikaGenerator.cs: -------------------------------------------------------------------------------- 1 | using System.Security.Cryptography; 2 | using System.Text; 3 | using System.Text.RegularExpressions; 4 | using Pika.Snowflakes; 5 | 6 | namespace Pika; 7 | 8 | public class PikaGenerator 9 | { 10 | public Dictionary Prefixes { get; set; } = new(); 11 | 12 | private readonly Snowflake _snowflake; 13 | 14 | public PikaGenerator(IEnumerable prefixes, PikaInitializationOptions? options = null) 15 | { 16 | options ??= new PikaInitializationOptions(); 17 | var nodeId = options.NodeId.HasValue ? (ulong) (options.NodeId.Value % (int) 1024ul) : ComputeNodeId(); 18 | _snowflake = new Snowflake(options.Epoch ?? PikaConstants.DefaultEpoch, nodeId); 19 | foreach (var definition in prefixes) 20 | { 21 | if (!options.DangerouslyDisablePrefixValidation) 22 | { 23 | if (!ValidatePrefix(definition.Prefix)) 24 | { 25 | throw new InvalidPrefixError(definition.Prefix); 26 | } 27 | } 28 | 29 | Prefixes[definition.Prefix] = definition; 30 | } 31 | } 32 | 33 | public bool Validate(string id, string? expectPrefix = default) 34 | { 35 | var s = id.Split('_'); 36 | var tail = s[^1]; 37 | var prefix = string.Join("_", s, 0, s.Length - 1); 38 | if (string.IsNullOrEmpty(tail)) 39 | { 40 | return false; 41 | } 42 | 43 | if (expectPrefix != null && prefix != expectPrefix) 44 | { 45 | return false; 46 | } 47 | 48 | if (expectPrefix != null) 49 | { 50 | return prefix == expectPrefix; 51 | } 52 | 53 | return Prefixes.ContainsKey(prefix); 54 | } 55 | 56 | public string Generate(string prefix) 57 | { 58 | if (!Prefixes.ContainsKey(prefix)) 59 | { 60 | throw new UnregisteredPrefixError(prefix); 61 | } 62 | 63 | var snowflake = _snowflake.Generate(); 64 | var secure = Prefixes[prefix].Secure; 65 | var tail = secure 66 | ? Encoding.UTF8.GetBytes($"s_{RandomHexString(16)}_{snowflake}") 67 | : Encoding.UTF8.GetBytes(snowflake.ToString()); 68 | 69 | return $"{prefix}_{Base64UrlEncoding.Encode(tail)}"; 70 | } 71 | 72 | private string RandomHexString(int length) 73 | { 74 | var bytes = new byte[length]; 75 | var rng = RandomNumberGenerator.Create(); 76 | rng.GetBytes(bytes); 77 | return bytes.ToHexString(); 78 | } 79 | 80 | public ulong GenerateSnowflake() 81 | { 82 | return _snowflake.Generate(); 83 | } 84 | 85 | public DecodedPika Decode(string id) 86 | { 87 | try 88 | { 89 | var split = id.Split('_'); 90 | var tail = split[^1]; 91 | var prefix = string.Join("_", split, 0, split.Length - 1); 92 | 93 | var decodedTail = Base64UrlEncoding.Decode(tail); 94 | var tailString = Encoding.UTF8.GetString(decodedTail); 95 | var snowflake = ulong.Parse(tailString.Split('_')[^1]); 96 | 97 | var deconstructed = _snowflake.Decode(snowflake); 98 | 99 | return new DecodedPika 100 | { 101 | Prefix = prefix, 102 | Tail = tail, 103 | PrefixRecord = Prefixes[prefix], 104 | Snowflake = snowflake, 105 | Version = 1, 106 | NodeId = deconstructed.NodeId, 107 | Seq = deconstructed.Seq, 108 | Timestamp = deconstructed.Timestamp 109 | }; 110 | } 111 | catch (Exception) 112 | { 113 | Console.WriteLine("Failed to decode ID " + id); 114 | throw; 115 | } 116 | } 117 | 118 | private static bool ValidatePrefix(string prefix) 119 | { 120 | return Regex.IsMatch(prefix, "^[a-z]+(_[a-z]+)*$"); 121 | } 122 | 123 | private static ulong ComputeNodeId() 124 | { 125 | try 126 | { 127 | foreach (var networkInterface in System.Net.NetworkInformation.NetworkInterface.GetAllNetworkInterfaces()) 128 | { 129 | if (networkInterface.GetPhysicalAddress().ToString() == "00:00:00:00:00:00") continue; 130 | var mac = networkInterface.GetPhysicalAddress().ToString(); 131 | return ulong.Parse(mac, System.Globalization.NumberStyles.HexNumber) % 1024; 132 | } 133 | 134 | throw new Exception("No network interfaces found"); 135 | } 136 | catch (Exception e) 137 | { 138 | Console.WriteLine("Failed to compute node ID, falling back to 0. Error:\n" + e); 139 | return 0UL; 140 | } 141 | } 142 | } -------------------------------------------------------------------------------- /impl/csharp/Pika/PikaInitializationOptions.cs: -------------------------------------------------------------------------------- 1 | namespace Pika; 2 | 3 | public class PikaInitializationOptions 4 | { 5 | public ulong? Epoch { get; set; } 6 | 7 | public int? NodeId { get; set; } 8 | 9 | public bool DangerouslyDisablePrefixValidation { get; set; } 10 | } -------------------------------------------------------------------------------- /impl/csharp/Pika/PikaPrefix.cs: -------------------------------------------------------------------------------- 1 | namespace Pika; 2 | 3 | public class PikaPrefix 4 | { 5 | public required string Prefix { get; init; } 6 | 7 | public string? Description { get; set; } 8 | 9 | public bool Secure { get; init; } 10 | } -------------------------------------------------------------------------------- /impl/csharp/Pika/Snowflakes/DeconstructedSnowflake.cs: -------------------------------------------------------------------------------- 1 | namespace Pika.Snowflakes; 2 | 3 | public class DeconstructedSnowflake 4 | { 5 | public ulong Id { get; init; } 6 | 7 | public ulong Timestamp { get; init; } 8 | 9 | public uint NodeId { get; init; } 10 | 11 | public ulong Seq { get; init; } 12 | 13 | public ulong Epoch { get; init; } 14 | } -------------------------------------------------------------------------------- /impl/csharp/Pika/Snowflakes/EpochResolvable.cs: -------------------------------------------------------------------------------- 1 | namespace Pika.Snowflakes; 2 | 3 | public class EpochResolvable 4 | { 5 | private readonly object _value; 6 | 7 | public EpochResolvable(ulong value) 8 | { 9 | _value = value; 10 | } 11 | 12 | public EpochResolvable(DateTime value) 13 | { 14 | _value = value; 15 | } 16 | 17 | public ulong ToULong() 18 | { 19 | return _value is DateTime dateTime 20 | ? (ulong) new DateTimeOffset(dateTime).ToUnixTimeMilliseconds() 21 | : (ulong) _value; 22 | } 23 | 24 | public static implicit operator long(EpochResolvable resolvable) 25 | { 26 | return (long) resolvable.ToULong(); 27 | } 28 | 29 | public static implicit operator ulong(EpochResolvable resolvable) 30 | { 31 | return resolvable.ToULong(); 32 | } 33 | 34 | public static implicit operator DateTime(EpochResolvable resolvable) 35 | { 36 | return resolvable._value is DateTime dateTime 37 | ? dateTime 38 | : DateTimeOffset.FromUnixTimeMilliseconds((long) resolvable._value).DateTime; 39 | } 40 | 41 | public static implicit operator DateTimeOffset(EpochResolvable resolvable) 42 | { 43 | return resolvable._value is DateTime dateTime 44 | ? new DateTimeOffset(dateTime) 45 | : DateTimeOffset.FromUnixTimeMilliseconds((long) resolvable._value); 46 | } 47 | 48 | public static implicit operator EpochResolvable(ulong value) 49 | { 50 | return new EpochResolvable(value); 51 | } 52 | } -------------------------------------------------------------------------------- /impl/csharp/Pika/Snowflakes/SequenceExhaustionError.cs: -------------------------------------------------------------------------------- 1 | namespace Pika.Snowflakes; 2 | 3 | public class SequenceExhaustionError : Exception 4 | { 5 | public SequenceExhaustionError() : base("Sequence exhausted for this millisecond.") 6 | { 7 | } 8 | } -------------------------------------------------------------------------------- /impl/csharp/Pika/Snowflakes/Snowflake.cs: -------------------------------------------------------------------------------- 1 | namespace Pika.Snowflakes; 2 | 3 | public class Snowflake 4 | { 5 | private readonly ulong _epoch; 6 | private ulong _seq; 7 | private long _lastSequenceExhaustion; 8 | 9 | public Snowflake(ulong epoch, ulong nodeId) 10 | { 11 | _epoch = NormalizeEpoch(epoch); 12 | NodeId = nodeId; 13 | } 14 | 15 | public ulong NodeId { get; } 16 | 17 | public ulong Generate(SnowflakeGenOptions? options = null) 18 | { 19 | options ??= new SnowflakeGenOptions(); 20 | var timestamp = options.Timestamp ?? DateTimeOffset.UtcNow.ToUnixTimeMilliseconds(); 21 | if (timestamp < _lastSequenceExhaustion) 22 | { 23 | throw new SequenceExhaustionError(); 24 | } 25 | 26 | _lastSequenceExhaustion = timestamp; 27 | if (timestamp == _lastSequenceExhaustion) 28 | { 29 | _seq++; 30 | if (_seq > PikaConstants.SequenceMask) 31 | { 32 | throw new SequenceExhaustionError(); 33 | } 34 | } 35 | else 36 | { 37 | _seq = 0; 38 | } 39 | 40 | var result = ((timestamp - (long) _epoch) << 22) | (long) (NodeId << 12) | (long) _seq; 41 | return (ulong) result; 42 | } 43 | 44 | public DeconstructedSnowflake Decode(ulong id) 45 | { 46 | return new DeconstructedSnowflake 47 | { 48 | Id = id, 49 | Timestamp = (id >> 22) + _epoch, 50 | NodeId = (uint) ((id >> 12) & 0x3FF), 51 | Seq = (uint) (id & 0xFFF), 52 | Epoch = _epoch, 53 | }; 54 | } 55 | 56 | private static ulong NormalizeEpoch(EpochResolvable epoch) 57 | { 58 | var timestamp = epoch.ToULong(); 59 | if (timestamp < 1420070400000) 60 | { 61 | throw new Exception("Epoch cannot be before 2015-01-01T00:00:00.000Z"); 62 | } 63 | 64 | return timestamp; 65 | } 66 | } -------------------------------------------------------------------------------- /impl/csharp/Pika/Snowflakes/SnowflakeGenOptions.cs: -------------------------------------------------------------------------------- 1 | namespace Pika.Snowflakes; 2 | 3 | public class SnowflakeGenOptions 4 | { 5 | public EpochResolvable? Timestamp { get; set; } 6 | } -------------------------------------------------------------------------------- /impl/csharp/Pika/UnregisteredPrefixError.cs: -------------------------------------------------------------------------------- 1 | namespace Pika; 2 | 3 | public class UnregisteredPrefixError : Exception 4 | { 5 | public UnregisteredPrefixError(string prefix) : base( 6 | $"Unregistered prefix ({prefix}) was used, but the prefix was not registered.") 7 | { 8 | } 9 | } -------------------------------------------------------------------------------- /impl/csharp/README.md: -------------------------------------------------------------------------------- 1 | # Pika 2 | 3 | C# implementation for Pika 4 | 5 | ## Installation 6 | 7 | ```bash 8 | dotnet add package Pika 9 | ``` 10 | 11 | ## Usage 12 | 13 | ```csharp 14 | var pika = new PikaGenerator(new[] 15 | { 16 | new PikaPrefix 17 | { 18 | Prefix = "user", 19 | Description = "User ID" 20 | }, 21 | new PikaPrefix 22 | { 23 | Prefix = "post", 24 | Description = "Post ID" 25 | }, 26 | new PikaPrefix 27 | { 28 | Prefix = "sk", 29 | Description = "Secret Key", 30 | Secure = true 31 | } 32 | }); 33 | 34 | var userId = pika.Generate("user"); 35 | // -> user_MjM4NDAxNDk2MTUzODYyMTQ1 36 | 37 | var postId = pika.Generate("post"); 38 | // -> post_MjM4NDAxNDk2MTUzODYyMTQ1 39 | 40 | var secretKey = pika.Generate("sk"); 41 | // -> sk_c19FMjdGRjMyMjhGNkE0MDdDRDFFMTZEMEY1Mzk1QUVGRl8yMzg0MDE0OTYxNTgwNTY0NTA 42 | ``` 43 | -------------------------------------------------------------------------------- /impl/csharp/global.json: -------------------------------------------------------------------------------- 1 | { 2 | "sdk": { 3 | "version": "7.0.0", 4 | "rollForward": "latestMinor", 5 | "allowPrerelease": false 6 | } 7 | } -------------------------------------------------------------------------------- /impl/ex/.credo.exs: -------------------------------------------------------------------------------- 1 | %{ 2 | configs: [ 3 | %{ 4 | name: "default", 5 | files: %{ 6 | included: [ 7 | "lib/", 8 | "src/", 9 | "test/", 10 | "web/" 11 | ], 12 | excluded: [~r"/_build/", ~r"/deps/", ~r"/node_modules/"] 13 | }, 14 | plugins: [], 15 | requires: [], 16 | strict: true, 17 | parse_timeout: 5000, 18 | color: false, 19 | checks: %{ 20 | enabled: [ 21 | # 22 | ## Consistency Checks 23 | # 24 | {Credo.Check.Consistency.ExceptionNames, []}, 25 | {Credo.Check.Consistency.LineEndings, []}, 26 | {Credo.Check.Consistency.ParameterPatternMatching, []}, 27 | {Credo.Check.Consistency.SpaceAroundOperators, []}, 28 | {Credo.Check.Consistency.SpaceInParentheses, []}, 29 | {Credo.Check.Consistency.TabsOrSpaces, []}, 30 | 31 | # 32 | ## Design Checks 33 | # 34 | # You can customize the priority of any check 35 | # Priority values are: `low, normal, high, higher` 36 | # 37 | {Credo.Check.Design.AliasUsage, 38 | [priority: :low, if_nested_deeper_than: 2, if_called_more_often_than: 0]}, 39 | {Credo.Check.Design.TagFIXME, []}, 40 | # You can also customize the exit_status of each check. 41 | # If you don't want TODO comments to cause `mix credo` to fail, just 42 | # set this value to 0 (zero). 43 | # 44 | {Credo.Check.Design.TagTODO, [exit_status: 2]}, 45 | 46 | # 47 | ## Readability Checks 48 | # 49 | {Credo.Check.Readability.AliasOrder, []}, 50 | {Credo.Check.Readability.FunctionNames, []}, 51 | {Credo.Check.Readability.LargeNumbers, []}, 52 | {Credo.Check.Readability.MaxLineLength, [priority: :low, max_length: 120]}, 53 | {Credo.Check.Readability.ModuleAttributeNames, []}, 54 | {Credo.Check.Readability.ModuleDoc, []}, 55 | {Credo.Check.Readability.ModuleNames, []}, 56 | {Credo.Check.Readability.ParenthesesInCondition, []}, 57 | {Credo.Check.Readability.ParenthesesOnZeroArityDefs, []}, 58 | {Credo.Check.Readability.PipeIntoAnonymousFunctions, []}, 59 | {Credo.Check.Readability.PredicateFunctionNames, []}, 60 | {Credo.Check.Readability.PreferImplicitTry, []}, 61 | {Credo.Check.Readability.RedundantBlankLines, []}, 62 | {Credo.Check.Readability.Semicolons, []}, 63 | {Credo.Check.Readability.SpaceAfterCommas, []}, 64 | {Credo.Check.Readability.StringSigils, []}, 65 | {Credo.Check.Readability.TrailingBlankLine, []}, 66 | {Credo.Check.Readability.TrailingWhiteSpace, []}, 67 | {Credo.Check.Readability.UnnecessaryAliasExpansion, []}, 68 | {Credo.Check.Readability.VariableNames, []}, 69 | {Credo.Check.Readability.WithSingleClause, []}, 70 | 71 | # 72 | ## Refactoring Opportunities 73 | # 74 | {Credo.Check.Refactor.Apply, []}, 75 | {Credo.Check.Refactor.CondStatements, []}, 76 | {Credo.Check.Refactor.CyclomaticComplexity, []}, 77 | {Credo.Check.Refactor.FilterCount, []}, 78 | {Credo.Check.Refactor.FilterFilter, []}, 79 | {Credo.Check.Refactor.FunctionArity, []}, 80 | {Credo.Check.Refactor.LongQuoteBlocks, []}, 81 | {Credo.Check.Refactor.MapJoin, []}, 82 | {Credo.Check.Refactor.MatchInCondition, []}, 83 | {Credo.Check.Refactor.NegatedConditionsInUnless, []}, 84 | {Credo.Check.Refactor.NegatedConditionsWithElse, []}, 85 | {Credo.Check.Refactor.Nesting, []}, 86 | {Credo.Check.Refactor.RedundantWithClauseResult, []}, 87 | {Credo.Check.Refactor.RejectReject, []}, 88 | {Credo.Check.Refactor.UnlessWithElse, []}, 89 | {Credo.Check.Refactor.WithClauses, []}, 90 | 91 | # 92 | ## Warnings 93 | # 94 | {Credo.Check.Warning.ApplicationConfigInModuleAttribute, []}, 95 | {Credo.Check.Warning.BoolOperationOnSameValues, []}, 96 | {Credo.Check.Warning.Dbg, []}, 97 | {Credo.Check.Warning.ExpensiveEmptyEnumCheck, []}, 98 | {Credo.Check.Warning.IExPry, []}, 99 | {Credo.Check.Warning.IoInspect, []}, 100 | {Credo.Check.Warning.MissedMetadataKeyInLoggerConfig, []}, 101 | {Credo.Check.Warning.OperationOnSameValues, []}, 102 | {Credo.Check.Warning.OperationWithConstantResult, []}, 103 | {Credo.Check.Warning.RaiseInsideRescue, []}, 104 | {Credo.Check.Warning.SpecWithStruct, []}, 105 | {Credo.Check.Warning.UnsafeExec, []}, 106 | {Credo.Check.Warning.UnusedEnumOperation, []}, 107 | {Credo.Check.Warning.UnusedFileOperation, []}, 108 | {Credo.Check.Warning.UnusedKeywordOperation, []}, 109 | {Credo.Check.Warning.UnusedListOperation, []}, 110 | {Credo.Check.Warning.UnusedPathOperation, []}, 111 | {Credo.Check.Warning.UnusedRegexOperation, []}, 112 | {Credo.Check.Warning.UnusedStringOperation, []}, 113 | {Credo.Check.Warning.UnusedTupleOperation, []}, 114 | {Credo.Check.Warning.WrongTestFileExtension, []} 115 | ], 116 | disabled: [ 117 | # 118 | # Checks scheduled for next check update (opt-in for now) 119 | {Credo.Check.Refactor.UtcNowTruncate, []}, 120 | 121 | # 122 | # Controversial and experimental checks (opt-in, just move the check to `:enabled` 123 | # and be sure to use `mix credo --strict` to see low priority checks) 124 | # 125 | {Credo.Check.Consistency.MultiAliasImportRequireUse, []}, 126 | {Credo.Check.Consistency.UnusedVariableNames, []}, 127 | {Credo.Check.Design.DuplicatedCode, []}, 128 | {Credo.Check.Design.SkipTestWithoutComment, []}, 129 | {Credo.Check.Readability.AliasAs, []}, 130 | {Credo.Check.Readability.BlockPipe, []}, 131 | {Credo.Check.Readability.ImplTrue, []}, 132 | {Credo.Check.Readability.MultiAlias, []}, 133 | {Credo.Check.Readability.NestedFunctionCalls, []}, 134 | {Credo.Check.Readability.OneArityFunctionInPipe, []}, 135 | {Credo.Check.Readability.OnePipePerLine, []}, 136 | {Credo.Check.Readability.SeparateAliasRequire, []}, 137 | {Credo.Check.Readability.SingleFunctionToBlockPipe, []}, 138 | {Credo.Check.Readability.SinglePipe, []}, 139 | {Credo.Check.Readability.Specs, []}, 140 | {Credo.Check.Readability.StrictModuleLayout, []}, 141 | {Credo.Check.Readability.WithCustomTaggedTuple, []}, 142 | {Credo.Check.Refactor.ABCSize, []}, 143 | {Credo.Check.Refactor.AppendSingleItem, []}, 144 | {Credo.Check.Refactor.DoubleBooleanNegation, []}, 145 | {Credo.Check.Refactor.FilterReject, []}, 146 | {Credo.Check.Refactor.IoPuts, []}, 147 | {Credo.Check.Refactor.MapMap, []}, 148 | {Credo.Check.Refactor.ModuleDependencies, []}, 149 | {Credo.Check.Refactor.NegatedIsNil, []}, 150 | {Credo.Check.Refactor.PassAsyncInTestCases, []}, 151 | {Credo.Check.Refactor.PipeChainStart, []}, 152 | {Credo.Check.Refactor.RejectFilter, []}, 153 | {Credo.Check.Refactor.VariableRebinding, []}, 154 | {Credo.Check.Warning.LazyLogging, []}, 155 | {Credo.Check.Warning.LeakyEnvironment, []}, 156 | {Credo.Check.Warning.MapGetUnsafePass, []}, 157 | {Credo.Check.Warning.MixEnv, []}, 158 | {Credo.Check.Warning.UnsafeToAtom, []} 159 | 160 | # {Credo.Check.Refactor.MapInto, []}, 161 | 162 | # 163 | # Custom checks can be created using `mix credo.gen.check`. 164 | # 165 | ] 166 | } 167 | } 168 | ] 169 | } 170 | -------------------------------------------------------------------------------- /impl/ex/.formatter.exs: -------------------------------------------------------------------------------- 1 | # Used by "mix format" 2 | [ 3 | inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"] 4 | ] 5 | -------------------------------------------------------------------------------- /impl/ex/.gitignore: -------------------------------------------------------------------------------- 1 | # The directory Mix will write compiled artifacts to. 2 | /_build/ 3 | 4 | # If you run "mix test --cover", coverage assets end up here. 5 | /cover/ 6 | 7 | # The directory Mix downloads your dependencies sources to. 8 | /deps/ 9 | 10 | # Where third-party dependencies like ExDoc output generated docs. 11 | /doc/ 12 | 13 | # Ignore .fetch files in case you like to edit your project deps locally. 14 | /.fetch 15 | 16 | # If the VM crashes, it generates a dump, let's ignore it too. 17 | erl_crash.dump 18 | 19 | # Also ignore archive artifacts (built via "mix archive.build"). 20 | *.ez 21 | 22 | # Ignore package tarball (built via "mix hex.build"). 23 | pika-*.tar 24 | 25 | # Temporary files, for example, from tests. 26 | /tmp/ 27 | -------------------------------------------------------------------------------- /impl/ex/README.md: -------------------------------------------------------------------------------- 1 | # pika 2 | 3 | > Elixir implementation of Pika 4 | 5 | Combine Stripe IDs with Snowflakes you get Pika! The last ID system you'll ever need! 6 | Combining pragmatism with functionality 7 | 8 | ## Features 9 | 10 | - Written in pure Elixir 11 | - Zero dependencies 12 | 13 | ## Installation 14 | 15 | The package can be installed by adding `pika` to your list of dependencies in `mix.exs`: 16 | 17 | ```elixir 18 | def deps do 19 | [ 20 | {:pika, "~> 0.1"} 21 | ] 22 | end 23 | ``` 24 | 25 | In your `config.exs`: 26 | 27 | ```elixir 28 | config :pika, 29 | prefixes: [ 30 | %{prefix: "user", description: "User IDs"}, 31 | %{prefix: "server", description: "Server IDs", secure: true}, 32 | # ... 33 | ] 34 | ``` 35 | 36 | ## Example 37 | 38 | `Pika.Snowflake` should be started under a `Supervisor` or `Application` before you start using 39 | `Pika.gen/1` or `Pika.deconstruct/1` 40 | 41 | ```elixir 42 | defmodule MyApp.Application do 43 | use Application 44 | 45 | def start(_type, _args) do 46 | children = [Pika.Snowflake] 47 | 48 | Supervisor.start_link(children, strategy: :one_for_one) 49 | end 50 | end 51 | ``` 52 | 53 | Somewhere in your application: 54 | 55 | ```elixir 56 | # ... 57 | Pika.gen("user") # or Pika.gen!("user") 58 | 59 | {:ok, "user_MjgyNDQ2NjY1OTk3MjEzNjk3"} 60 | ``` 61 | -------------------------------------------------------------------------------- /impl/ex/benchmarks/generation.exs: -------------------------------------------------------------------------------- 1 | defmodule Generation do 2 | def id(), do: Pika.gen("user") 3 | def id_secure(), do: Pika.gen("server") 4 | def snowflake(), do: Pika.Snowflake.generate() 5 | end 6 | 7 | Pika.Snowflake.start_link() 8 | 9 | Benchee.run( 10 | %{ 11 | "Generate IDs" => fn -> Generation.id() end, 12 | "Generate Secure IDs" => fn -> Generation.id_secure() end, 13 | "Generate Snowflakes" => fn -> Generation.snowflake() end 14 | }, 15 | time: 5 16 | ) 17 | -------------------------------------------------------------------------------- /impl/ex/config/config.exs: -------------------------------------------------------------------------------- 1 | import Config 2 | 3 | case config_env() do 4 | :docs -> 5 | :ok 6 | 7 | _ -> 8 | import_config "#{Mix.env()}.exs" 9 | end 10 | -------------------------------------------------------------------------------- /impl/ex/config/dev.exs: -------------------------------------------------------------------------------- 1 | import Config 2 | 3 | config :pika, 4 | prefixes: [ 5 | %{prefix: "user", description: "User IDs"}, 6 | %{prefix: "server", description: "Server IDs", secure: true} 7 | ] 8 | -------------------------------------------------------------------------------- /impl/ex/config/test.exs: -------------------------------------------------------------------------------- 1 | import Config 2 | 3 | config :pika, 4 | prefixes: [ 5 | %{prefix: "user", description: "User IDs"}, 6 | %{prefix: "server", description: "Server IDs", secure: true} 7 | ] 8 | -------------------------------------------------------------------------------- /impl/ex/lib/pika.ex: -------------------------------------------------------------------------------- 1 | defmodule Pika do 2 | alias Pika.Snowflake 3 | import Pika.Utils, only: [filter_prefixes: 2] 4 | 5 | @moduledoc nil 6 | 7 | @spec valid_prefix?(binary()) :: boolean() 8 | defp valid_prefix?(prefix) do 9 | # Checks if `prefix` is alphanumeric 10 | Regex.match?(~r/^[0-9a-z_]+$/, prefix) 11 | end 12 | 13 | @doc false 14 | defp _gen(prefix, snowflake, nil) do 15 | {:ok, "#{prefix}_#{Base.encode64(snowflake, padding: false)}"} 16 | end 17 | 18 | @doc false 19 | defp _gen(prefix, snowflake, false) do 20 | {:ok, "#{prefix}_#{Base.encode64(snowflake, padding: false)}"} 21 | end 22 | 23 | @doc false 24 | defp _gen(prefix, snowflake, true) do 25 | bytes = :rand.bytes(16) 26 | 27 | tail = 28 | "_s_#{Base.encode32(bytes, padding: false, case: :lower)}_#{snowflake}" 29 | 30 | {:ok, "#{prefix}_#{Base.encode64(tail, padding: false)}"} 31 | end 32 | 33 | @spec gen(binary()) :: {:error, binary()} | {:ok, binary()} 34 | @doc """ 35 | Generates an ID given a prefix (which should be configured). 36 | 37 | This function will return an `{:error, binary()}` if one of the follow conditions are met: 38 | 39 | 1. The prefix isn't valid 40 | 2. The prefix isn't configured 41 | """ 42 | def gen(prefix) do 43 | case valid_prefix?(prefix) do 44 | true -> 45 | prefixes = Application.get_env(:pika, :prefixes) 46 | 47 | case filter_prefixes(prefix, prefixes) do 48 | [prefix_record] -> 49 | snowflake = Snowflake.generate() |> Integer.to_string() 50 | 51 | _gen(prefix, snowflake, prefix_record[:secure]) 52 | 53 | _ -> 54 | {:error, "Prefix is undefined"} 55 | end 56 | 57 | _ -> 58 | {:error, "Prefix is invalid (must be Alphanumeric)"} 59 | end 60 | end 61 | 62 | @spec gen!(binary()) :: binary() 63 | def gen!(prefix) do 64 | {:ok, id} = gen(prefix) 65 | 66 | id 67 | end 68 | 69 | @doc """ 70 | Deconstructs a Pika ID and returns it's metadata: 71 | 72 | - prefix 73 | - tail 74 | - snowflake 75 | - timestamp 76 | - prefix_record 77 | - epoch 78 | - node_id 79 | - seq 80 | """ 81 | def deconstruct(id) do 82 | prefixes = Application.get_env(:pika, :prefixes) 83 | 84 | fragments = id |> String.split("_") 85 | [prefix, tail] = fragments 86 | 87 | [prefix_record] = Enum.filter(prefixes, fn m -> m.prefix == prefix end) 88 | decoded_tail = Base.decode64!(tail, padding: false) 89 | tail_fragments = decoded_tail |> String.split("_") 90 | snowflake = tail_fragments |> Enum.at(length(tail_fragments) - 1) |> String.to_integer() 91 | 92 | decoded_snowflake = Snowflake.decode(snowflake) 93 | 94 | Map.merge(decoded_snowflake, %{ 95 | prefix: prefix, 96 | tail: tail, 97 | snowflake: snowflake, 98 | prefix_record: prefix_record 99 | }) 100 | end 101 | end 102 | -------------------------------------------------------------------------------- /impl/ex/lib/snowflake.ex: -------------------------------------------------------------------------------- 1 | defmodule Pika.Snowflake do 2 | import Bitwise 3 | alias Pika.Utils 4 | use GenServer 5 | 6 | @moduledoc """ 7 | `Pika.Snowflake` holds the state, generates Snowflakes, and decodes Snowflakes. 8 | 9 | `Pika.Snowflake` should be started under a `Supervisor` or `Application` before you start using 10 | `Pika.gen/1` or `Pika.deconstruct/1` 11 | 12 | ```elixir 13 | defmodule MyApp.Application do 14 | use Application 15 | 16 | def start(_type, _args) do 17 | children = [Pika.Snowflake] 18 | 19 | Supervisor.start_link(children, strategy: :one_for_one) 20 | end 21 | end 22 | ``` 23 | 24 | or manually in `iex` 25 | 26 | ```elixir 27 | iex(1)> Pika.Snowflake.start_link() 28 | {:ok, #PID<0.190.0>} 29 | ``` 30 | 31 | ## Custom epoch 32 | 33 | You can start `Pika.Snowflake` with a custom epoch by passing it: 34 | 35 | ```elixir 36 | Pika.Snowflake.start_link(1_650_153_600_000) 37 | ``` 38 | """ 39 | 40 | def start_link([]), do: start_link() 41 | 42 | def start_link(epoch) when is_integer(epoch) do 43 | GenServer.start_link(__MODULE__, {Utils.compute_node_id(), epoch, 0, 0}, name: __MODULE__) 44 | end 45 | 46 | def start_link do 47 | # State: {node_id, epoch, seq, last_sequence_exhaustion} 48 | GenServer.start_link(__MODULE__, {Utils.compute_node_id(), 1_640_995_200_000, 0, 0}, 49 | name: __MODULE__ 50 | ) 51 | end 52 | 53 | def init(state) do 54 | {:ok, state} 55 | end 56 | 57 | @doc """ 58 | Generates a new Snowflake 59 | """ 60 | @spec generate() :: integer() 61 | def generate do 62 | GenServer.call(__MODULE__, {:generate, now_ts()}) 63 | end 64 | 65 | @doc """ 66 | Generates a new Snowflake with the given `timestamp` 67 | """ 68 | @spec generate(integer()) :: integer() 69 | def generate(timestamp) do 70 | GenServer.call(__MODULE__, {:generate, timestamp}) 71 | end 72 | 73 | @doc """ 74 | Decodes a Snowflake and returns: 75 | 76 | - timestamp 77 | - epoch 78 | - node_id 79 | - seq 80 | """ 81 | @spec decode(integer()) :: any() 82 | def decode(snowflake) when is_integer(snowflake) do 83 | GenServer.call(__MODULE__, {:decode, snowflake}) 84 | end 85 | 86 | def handle_call( 87 | {:decode, snowflake}, 88 | _from, 89 | state = {_node_id, epoch, _seq, _last_seq_exhaustion} 90 | ) do 91 | timestamp = (snowflake >>> 22) + epoch 92 | node_id = snowflake >>> 12 &&& 0b11_1111_1111 93 | seq = snowflake &&& 0b1111_1111_1111 94 | 95 | {:reply, %{timestamp: timestamp, epoch: epoch, node_id: node_id, seq: seq}, state} 96 | end 97 | 98 | def handle_call({:generate, timestamp}, _from, {node_id, epoch, seq, last_seq_exhaustion}) do 99 | if seq >= 4095 and timestamp == last_seq_exhaustion do 100 | block(timestamp) 101 | end 102 | 103 | snowflake = (timestamp - epoch) <<< 22 ||| node_id <<< 12 ||| seq 104 | 105 | seq = 106 | if seq >= 4095 do 107 | 0 108 | else 109 | seq + 1 110 | end 111 | 112 | if timestamp === last_seq_exhaustion do 113 | {:reply, snowflake, {node_id, epoch, seq, timestamp}} 114 | else 115 | {:reply, snowflake, {node_id, epoch, seq, now_ts()}} 116 | end 117 | end 118 | 119 | @doc false 120 | defp block(timestamp) do 121 | if now_ts() - timestamp < 1 do 122 | :timer.sleep(100) 123 | block(timestamp) 124 | end 125 | end 126 | 127 | @doc "Returns the current timestamp in milliseconds." 128 | def now_ts do 129 | System.os_time(:millisecond) 130 | end 131 | end 132 | -------------------------------------------------------------------------------- /impl/ex/lib/utils.ex: -------------------------------------------------------------------------------- 1 | defmodule Pika.Utils do 2 | @moduledoc false 3 | 4 | defp validate_address(address) do 5 | case address do 6 | nil -> :error 7 | [0, 0, 0, 0, 0, 0] -> :error 8 | [_, _, _, _, _, _] = addr -> {:ok, addr} 9 | _ -> :error 10 | end 11 | end 12 | 13 | def get_mac_address do 14 | {:ok, addresses} = :inet.getifaddrs() 15 | 16 | {_if_name, if_mac} = Enum.reduce(addresses, [], fn ({if_name, if_data}, acc) -> 17 | case Keyword.get(if_data, :hwaddr) |> validate_address do 18 | {:ok, address} -> [{to_string(if_name), address} | acc] 19 | _ -> acc 20 | end 21 | end) 22 | |> List.first() 23 | 24 | if_mac 25 | |> Enum.map_join(":", fn i -> Integer.to_string(i, 16) |> String.pad_leading(2, "0") end) 26 | end 27 | 28 | @spec compute_node_id() :: integer() 29 | def compute_node_id do 30 | {id, _} = get_mac_address() |> String.replace(":", "") |> Integer.parse(16) 31 | 32 | rem(id, 1024) 33 | end 34 | 35 | def filter_prefixes(prefix, prefixes) do 36 | Enum.filter(prefixes, fn record -> record.prefix == prefix end) 37 | end 38 | end 39 | -------------------------------------------------------------------------------- /impl/ex/mix.exs: -------------------------------------------------------------------------------- 1 | defmodule Pika.MixProject do 2 | use Mix.Project 3 | 4 | @source_url "https://github.com/ArtieFuzzz/pika/tree/ex-impl/impl/ex" 5 | 6 | def project do 7 | [ 8 | app: :pika, 9 | version: "0.1.2", 10 | elixir: "~> 1.14", 11 | start_permanent: Mix.env() == :prod, 12 | package: package(), 13 | description: """ 14 | Elixir implementation of hop.io's Pika. Combine Stripe IDs and Snowflakes. 15 | """, 16 | docs: docs(), 17 | deps: deps() 18 | ] 19 | end 20 | 21 | # Run "mix help compile.app" to learn about applications. 22 | def application do 23 | [] 24 | end 25 | 26 | def docs do 27 | [ 28 | source_url: @source_url, 29 | main: "readme", 30 | extras: ["./README.md"] 31 | ] 32 | end 33 | 34 | def package do 35 | [ 36 | files: ["lib", "mix.exs", "README.md"], 37 | licenses: ["ISC"], 38 | links: %{ 39 | "GitHub" => @source_url 40 | } 41 | ] 42 | end 43 | 44 | # Run "mix help deps" to learn about dependencies. 45 | defp deps do 46 | [ 47 | {:credo, "~> 1.7", only: [:dev, :test], runtime: false}, 48 | {:benchee, "~> 1.0", only: :dev}, 49 | {:ex_doc, ">= 0.0.0", only: :dev, runtime: false} 50 | # {:dep_from_hexpm, "~> 0.3.0"}, 51 | # {:dep_from_git, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"} 52 | ] 53 | end 54 | end 55 | -------------------------------------------------------------------------------- /impl/ex/mix.lock: -------------------------------------------------------------------------------- 1 | %{ 2 | "benchee": {:hex, :benchee, "1.3.0", "f64e3b64ad3563fa9838146ddefb2d2f94cf5b473bdfd63f5ca4d0657bf96694", [:mix], [{:deep_merge, "~> 1.0", [hex: :deep_merge, repo: "hexpm", optional: false]}, {:statistex, "~> 1.0", [hex: :statistex, repo: "hexpm", optional: false]}, {:table, "~> 0.1.0", [hex: :table, repo: "hexpm", optional: true]}], "hexpm", "34f4294068c11b2bd2ebf2c59aac9c7da26ffa0068afdf3419f1b176e16c5f81"}, 3 | "bunt": {:hex, :bunt, "1.0.0", "081c2c665f086849e6d57900292b3a161727ab40431219529f13c4ddcf3e7a44", [:mix], [], "hexpm", "dc5f86aa08a5f6fa6b8096f0735c4e76d54ae5c9fa2c143e5a1fc7c1cd9bb6b5"}, 4 | "credo": {:hex, :credo, "1.7.4", "68ca5cf89071511c12fd9919eb84e388d231121988f6932756596195ccf7fd35", [:mix], [{:bunt, "~> 0.2.1 or ~> 1.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2 or ~> 1.0", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "9cf776d062c78bbe0f0de1ecaee183f18f2c3ec591326107989b054b7dddefc2"}, 5 | "deep_merge": {:hex, :deep_merge, "1.0.0", "b4aa1a0d1acac393bdf38b2291af38cb1d4a52806cf7a4906f718e1feb5ee961", [:mix], [], "hexpm", "ce708e5f094b9cd4e8f2be4f00d2f4250c4095be93f8cd6d018c753894885430"}, 6 | "earmark_parser": {:hex, :earmark_parser, "1.4.39", "424642f8335b05bb9eb611aa1564c148a8ee35c9c8a8bba6e129d51a3e3c6769", [:mix], [], "hexpm", "06553a88d1f1846da9ef066b87b57c6f605552cfbe40d20bd8d59cc6bde41944"}, 7 | "ex_doc": {:hex, :ex_doc, "0.31.1", "8a2355ac42b1cc7b2379da9e40243f2670143721dd50748bf6c3b1184dae2089", [:mix], [{:earmark_parser, "~> 1.4.39", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_c, ">= 0.1.1", [hex: :makeup_c, repo: "hexpm", optional: true]}, {:makeup_elixir, "~> 0.14", [hex: :makeup_elixir, repo: "hexpm", optional: false]}, {:makeup_erlang, "~> 0.1", [hex: :makeup_erlang, repo: "hexpm", optional: false]}], "hexpm", "3178c3a407c557d8343479e1ff117a96fd31bafe52a039079593fb0524ef61b0"}, 8 | "file_system": {:hex, :file_system, "1.0.0", "b689cc7dcee665f774de94b5a832e578bd7963c8e637ef940cd44327db7de2cd", [:mix], [], "hexpm", "6752092d66aec5a10e662aefeed8ddb9531d79db0bc145bb8c40325ca1d8536d"}, 9 | "jason": {:hex, :jason, "1.4.1", "af1504e35f629ddcdd6addb3513c3853991f694921b1b9368b0bd32beb9f1b63", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "fbb01ecdfd565b56261302f7e1fcc27c4fb8f32d56eab74db621fc154604a7a1"}, 10 | "makeup": {:hex, :makeup, "1.1.1", "fa0bc768698053b2b3869fa8a62616501ff9d11a562f3ce39580d60860c3a55e", [:mix], [{:nimble_parsec, "~> 1.2.2 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "5dc62fbdd0de44de194898b6710692490be74baa02d9d108bc29f007783b0b48"}, 11 | "makeup_elixir": {:hex, :makeup_elixir, "0.16.1", "cc9e3ca312f1cfeccc572b37a09980287e243648108384b97ff2b76e505c3555", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.2.3 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "e127a341ad1b209bd80f7bd1620a15693a9908ed780c3b763bccf7d200c767c6"}, 12 | "makeup_erlang": {:hex, :makeup_erlang, "0.1.4", "29563475afa9b8a2add1b7a9c8fb68d06ca7737648f28398e04461f008b69521", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "f4ed47ecda66de70dd817698a703f8816daa91272e7e45812469498614ae8b29"}, 13 | "nimble_parsec": {:hex, :nimble_parsec, "1.4.0", "51f9b613ea62cfa97b25ccc2c1b4216e81df970acd8e16e8d1bdc58fef21370d", [:mix], [], "hexpm", "9c565862810fb383e9838c1dd2d7d2c437b3d13b267414ba6af33e50d2d1cf28"}, 14 | "statistex": {:hex, :statistex, "1.0.0", "f3dc93f3c0c6c92e5f291704cf62b99b553253d7969e9a5fa713e5481cd858a5", [:mix], [], "hexpm", "ff9d8bee7035028ab4742ff52fc80a2aa35cece833cf5319009b52f1b5a86c27"}, 15 | } 16 | -------------------------------------------------------------------------------- /impl/ex/test/pika_test.exs: -------------------------------------------------------------------------------- 1 | defmodule PikaTest do 2 | use ExUnit.Case 3 | doctest Pika 4 | 5 | setup do 6 | Pika.Snowflake.start_link() 7 | 8 | :ok 9 | end 10 | 11 | test "Generate an ID" do 12 | id = Pika.gen!("user") 13 | 14 | assert String.starts_with?(id, "user") 15 | end 16 | 17 | test "Generate a secure ID" do 18 | id = Pika.gen!("server") 19 | 20 | assert String.starts_with?(id, "server") 21 | end 22 | 23 | test "Fail to generate ID" do 24 | {status, _message} = Pika.gen("not_found") 25 | 26 | assert status == :error 27 | end 28 | 29 | test "Fail to validate ID" do 30 | {:error, message} = Pika.gen("!!!") 31 | 32 | assert message == "Prefix is invalid (must be Alphanumeric)" 33 | end 34 | 35 | test "Snowflake custom timestamp" do 36 | timestamp = 1_708_158_291_035 37 | snowflake = Pika.Snowflake.generate(timestamp) 38 | decoded = Pika.Snowflake.decode(snowflake) 39 | 40 | assert decoded.timestamp == timestamp 41 | end 42 | 43 | test "Test 4096+ ids" do 44 | Enum.map(0..4095, fn s -> 45 | id = Pika.gen!("user") 46 | deconstructed = Pika.deconstruct(id) 47 | 48 | assert deconstructed.seq == s 49 | end) 50 | 51 | last_id = Pika.gen!("user") 52 | deconstructed = Pika.deconstruct(last_id) 53 | 54 | assert deconstructed.seq == 0 55 | end 56 | 57 | test "Validate node_id" do 58 | id = Pika.gen!("user") 59 | deconstructed = Pika.deconstruct(id) 60 | 61 | assert deconstructed.node_id == Pika.Utils.compute_node_id() 62 | end 63 | end 64 | -------------------------------------------------------------------------------- /impl/ex/test/test_helper.exs: -------------------------------------------------------------------------------- 1 | ExUnit.start() 2 | -------------------------------------------------------------------------------- /impl/js/.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | lerna-debug.log* 8 | .pnpm-debug.log* 9 | 10 | # Diagnostic reports (https://nodejs.org/api/report.html) 11 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 12 | 13 | # Runtime data 14 | pids 15 | *.pid 16 | *.seed 17 | *.pid.lock 18 | 19 | # Directory for instrumented libs generated by jscoverage/JSCover 20 | lib-cov 21 | 22 | # Coverage directory used by tools like istanbul 23 | coverage 24 | *.lcov 25 | 26 | # nyc test coverage 27 | .nyc_output 28 | 29 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 30 | .grunt 31 | 32 | # Bower dependency directory (https://bower.io/) 33 | bower_components 34 | 35 | # node-waf configuration 36 | .lock-wscript 37 | 38 | # Compiled binary addons (https://nodejs.org/api/addons.html) 39 | build/Release 40 | 41 | # Dependency directories 42 | node_modules/ 43 | jspm_packages/ 44 | 45 | # Snowpack dependency directory (https://snowpack.dev/) 46 | web_modules/ 47 | 48 | # TypeScript cache 49 | *.tsbuildinfo 50 | 51 | # Optional npm cache directory 52 | .npm 53 | 54 | # Optional eslint cache 55 | .eslintcache 56 | 57 | # Optional stylelint cache 58 | .stylelintcache 59 | 60 | # Microbundle cache 61 | .rpt2_cache/ 62 | .rts2_cache_cjs/ 63 | .rts2_cache_es/ 64 | .rts2_cache_umd/ 65 | 66 | # Optional REPL history 67 | .node_repl_history 68 | 69 | # Output of 'npm pack' 70 | *.tgz 71 | 72 | # Yarn Integrity file 73 | .yarn-integrity 74 | 75 | # dotenv environment variable files 76 | .env 77 | .env.development.local 78 | .env.test.local 79 | .env.production.local 80 | .env.local 81 | 82 | # parcel-bundler cache (https://parceljs.org/) 83 | .cache 84 | .parcel-cache 85 | 86 | # Next.js build output 87 | .next 88 | out 89 | 90 | # Nuxt.js build / generate output 91 | .nuxt 92 | dist 93 | 94 | # Gatsby files 95 | .cache/ 96 | # Comment in the public line in if your project uses Gatsby and not Next.js 97 | # https://nextjs.org/blog/next-9-1#public-directory-support 98 | # public 99 | 100 | # vuepress build output 101 | .vuepress/dist 102 | 103 | # vuepress v2.x temp and cache directory 104 | .temp 105 | .cache 106 | 107 | # Docusaurus cache and generated files 108 | .docusaurus 109 | 110 | # Serverless directories 111 | .serverless/ 112 | 113 | # FuseBox cache 114 | .fusebox/ 115 | 116 | # DynamoDB Local files 117 | .dynamodb/ 118 | 119 | # TernJS port file 120 | .tern-port 121 | 122 | # Stores VSCode versions used for testing VSCode extensions 123 | .vscode-test 124 | 125 | # yarn v2 126 | .yarn/cache 127 | .yarn/unplugged 128 | .yarn/build-state.yml 129 | .yarn/install-state.gz 130 | .pnp.* -------------------------------------------------------------------------------- /impl/js/.prettierignore: -------------------------------------------------------------------------------- 1 | .next 2 | dist 3 | build 4 | out 5 | node_modules 6 | .yarn 7 | .git 8 | -------------------------------------------------------------------------------- /impl/js/.prettierrc: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "http://json.schemastore.org/prettierrc", 3 | "singleQuote": true, 4 | "semi": true, 5 | "printWidth": 120, 6 | "trailingComma": "all", 7 | "arrowParens": "avoid", 8 | "bracketSpacing": false, 9 | "useTabs": true, 10 | "quoteProps": "consistent" 11 | } 12 | -------------------------------------------------------------------------------- /impl/js/README.md: -------------------------------------------------------------------------------- 1 | # pika js 2 | 3 | Fully typed, 0 dependencies JS implementation of the full Pika specification. 4 | 5 | ## Install 6 | 7 | Yarn: 8 | 9 | ``` 10 | yarn add pika-id 11 | ``` 12 | 13 | npm: 14 | 15 | ``` 16 | npm i pika-id 17 | ``` 18 | 19 | ## Basic Usage 20 | 21 | ```ts 22 | import Pika from 'pika-id'; 23 | 24 | // Or in CJS 25 | const {Pika} = require('pika-id'); 26 | 27 | // Initialize Pika - do this once, then reuse the instance 28 | const pika = new Pika( 29 | // Define prefix types 30 | // You can specify either a string or an object per prefix 31 | // Make sure prefixes are lowercase 32 | [ 33 | 'user', 34 | { 35 | prefix: 'ch', 36 | description: 'Channels', 37 | }, 38 | { 39 | prefix: 'sk', 40 | description: 'Secret key', 41 | secure: true, // pika secure id 42 | }, 43 | ], 44 | { 45 | /** 46 | * Optional initialization parameters: 47 | * epoch: bigint | number - customize the epoch (millis) that IDs are derived from - by default, this is 1640995200000 (Jan 1 2022) 48 | * nodeId: bigint | number - see below 49 | * suppressPrefixWarnings: boolean - don't warn on undefined prefixes 50 | * disableLowercase: boolean - don't require prefixes to be lowercase 51 | **/ 52 | }, 53 | ); 54 | 55 | // Generate a pika id 56 | pika.gen('user'); 57 | // => user_Mzc5ODk1NTI4NzgxMTY4NjQ 58 | 59 | // Generate a secure id, as registered above 60 | pika.gen('sk'); 61 | // => sk_c19iMGI0NTM4ZjU3ZThjYTIyZThjNjNlMTgwOTg5MWMyM18zODA2NTE5MjcwNDc5NDYyNA 62 | ``` 63 | 64 | ## Node IDs 65 | 66 | By default, Node IDs are calculated by finding the MAC address of the first public network interface device, then calculating the modulo against 1024. 67 | 68 | This works well for smaller systems, but if you have a lot of nodes generating Snowflakes, then collision is possible. In this case, you should create an internal singleton service which keeps a rolling count of the assigned node IDs - from 1 to 1023. Then, services that generate Pikas should call this service to get assigned a node ID. 69 | 70 | You can then pass in the node ID when initializing Pika like this: 71 | 72 | ```ts 73 | const p = new Pika([], {nodeId: customNodeId}); 74 | ``` 75 | 76 | ## TypeScript 77 | 78 | Pika has strong definitions for TypeScript, including typing all prefix IDs. If you are using TypeScript, you can take advantage of the following utility types. 79 | 80 | ```ts 81 | import Pika, {InferPrefixes, InferId} from 'pika-id'; 82 | const pika = new Pika(['nuts', 'bolts']); 83 | 84 | // Infer what an ID will look like 85 | type Id = InferIds; 86 | // ^? `nuts_${string}` | `bolts_${string}` 87 | 88 | // Infer what the prefixes are 89 | type Prefixes = InferPrefixes; 90 | // ^? 'nuts' | 'bolts' 91 | ``` 92 | 93 | ## Benchmarks 94 | 95 | See [bench/gen.js](https://github.com/hopinc/pika/blob/main/impl/js/bench/gen.js) for benchmark implementation 96 | 97 | The benchmark below was ran on a 2021 MacBook Pro 14" with an m1 Pro chip and 16gb of memory. 98 | 99 | ``` 100 | Pika#gen x 1,370,869 ops/sec ±0.19% (100 runs sampled) 101 | Snowflake#gen x 2,015,012 ops/sec ±1.88% (97 runs sampled) 102 | ``` 103 | -------------------------------------------------------------------------------- /impl/js/bench/gen.js: -------------------------------------------------------------------------------- 1 | const Benchmark = require('benchmark'); 2 | const {Pika} = require('../dist'); 3 | 4 | const suite = new Benchmark.Suite(); 5 | 6 | const p = new Pika(['test']); 7 | 8 | suite 9 | .add('Pika#gen', () => { 10 | p.gen('test'); 11 | }) 12 | .add('Snowflake#gen', () => { 13 | p.genSnowflake(); 14 | }) 15 | .on('cycle', function (event) { 16 | console.log(String(event.target)); 17 | }) 18 | .run({async: true}); 19 | -------------------------------------------------------------------------------- /impl/js/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "pika-id", 3 | "version": "1.1.3", 4 | "description": "The pragmatic ID system", 5 | "main": "dist/index.js", 6 | "module": "dist/index.mjs", 7 | "types": "dist/index.d.ts", 8 | "files": [ 9 | "package.json", 10 | "dist" 11 | ], 12 | "scripts": { 13 | "build": "yarn tsup src/index.ts --dts --format cjs,esm", 14 | "release": "yarn release && yarn publish", 15 | "bench": "yarn build && node ./bench/gen.js" 16 | }, 17 | "repository": { 18 | "type": "git", 19 | "url": "git+https://github.com/hopinc/pika.git" 20 | }, 21 | "keywords": [ 22 | "id", 23 | "snowflake" 24 | ], 25 | "packageManager": "yarn@1.22.19", 26 | "author": "Phineas (https://phineas.io)", 27 | "license": "ISC", 28 | "bugs": { 29 | "url": "https://github.com/hopinc/pika/issues" 30 | }, 31 | "homepage": "https://github.com/hopinc/pika#readme", 32 | "devDependencies": { 33 | "@types/node": "^18.8.3", 34 | "benchmark": "^2.1.4", 35 | "prettier": "^2.7.1", 36 | "tsup": "^6.2.3", 37 | "tsx": "^3.10.1", 38 | "typescript": "^4.8.4" 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /impl/js/src/index.ts: -------------------------------------------------------------------------------- 1 | export {Pika, Pika as default} from './pika'; 2 | export type {InferIds, InferPrefixes} from './pika'; 3 | -------------------------------------------------------------------------------- /impl/js/src/logger.ts: -------------------------------------------------------------------------------- 1 | const PREFIX = '[pika]'; 2 | 3 | export const warn = (...args: unknown[]) => { 4 | console.warn(`${PREFIX}`, ...args); 5 | }; 6 | 7 | export const error = (...args: unknown[]) => { 8 | console.error(`${PREFIX}`, ...args); 9 | }; 10 | -------------------------------------------------------------------------------- /impl/js/src/pika.ts: -------------------------------------------------------------------------------- 1 | import {randomBytes} from 'crypto'; 2 | import {networkInterfaces} from 'os'; 3 | import {error, warn} from './logger'; 4 | import {DeconstructedSnowflake, EpochResolvable, Snowflake} from './snowflake'; 5 | 6 | export interface PikaPrefixDefinition

{ 7 | prefix: P; 8 | description?: string; 9 | secure?: boolean; 10 | metadata?: Record; 11 | } 12 | 13 | export interface DecodedPika

extends Omit { 14 | prefix: P; 15 | 16 | /** 17 | * The tail after the prefix, which is base64 encoding of the snowflake. 18 | * 19 | * However, if the pika is cryptographically secure, then the base64 decoded string value will start with an `s_` prefix, 20 | * followed by a cryptographically random string, then followed by another underscore and the Snowflake ID. 21 | */ 22 | tail: string; 23 | 24 | /** 25 | * The snowfake that was generated for this ID 26 | */ 27 | snowflake: bigint; 28 | 29 | /** 30 | * The ID this Pika was generated from. 31 | */ 32 | nodeId: number; 33 | 34 | /** 35 | * A rolling number between 1 to 4096 to introduce entropy. 36 | * Allows for doing 4096 ids per ms per node. 37 | */ 38 | seq: number; 39 | 40 | /** 41 | * The version of the pika encoding. 42 | */ 43 | version: 1; 44 | 45 | /** 46 | * The definition for this prefix 47 | */ 48 | prefixRecord: PikaPrefixDefinition

; 49 | 50 | /** 51 | * @deprecated use `.prefixRecord` instead 52 | */ 53 | prefix_record: PikaPrefixDefinition

; 54 | } 55 | 56 | export interface PikaInitializationOptions { 57 | epoch?: EpochResolvable; 58 | nodeId?: number; 59 | suppressPrefixWarnings?: boolean; 60 | disableLowercase?: boolean; 61 | } 62 | 63 | export const VALID_PREFIX = /^[a-z0-9_]+$/i; 64 | export const DEFAULT_EPOCH = 1640995200000n; // Jan 1 2022 65 | 66 | export type PrefixInit = V | PikaPrefixDefinition; 67 | export type LowercasePrefixInit = Lowercase extends V ? PrefixInit : PrefixInit>; 68 | 69 | export class InvalidPrefixError extends TypeError { 70 | constructor(prefix: string) { 71 | super(`invalid prefix; prefixes must be alphanumeric (a-z0-9_) and may include underscores; received: ${prefix}`); 72 | } 73 | } 74 | 75 | export class Pika { 76 | public readonly prefixes: Record> = {}; 77 | readonly #snowflake: Snowflake; 78 | readonly #suppressPrefixWarnings: boolean; 79 | 80 | /** 81 | * The generated or passed in node ID for this Pika instance 82 | * @internal 83 | */ 84 | #nodeId: bigint; 85 | 86 | /** 87 | * @param prefixes a list of PikaPrefixRecords to initialize pika with 88 | * @param opts misc. options to initialize pika with 89 | */ 90 | constructor(prefixes: readonly LowercasePrefixInit[], {nodeId, ...opts}: PikaInitializationOptions = {}) { 91 | this.#nodeId = nodeId ? BigInt(nodeId) % 1024n : this.computeNodeId(); 92 | this.#snowflake = new Snowflake(opts.epoch || DEFAULT_EPOCH, this.#nodeId); 93 | this.#suppressPrefixWarnings = opts.suppressPrefixWarnings ?? false; 94 | 95 | this.prefixes = prefixes.reduce((prefixes, definition) => { 96 | const prefix = typeof definition === 'string' ? definition : definition.prefix; 97 | 98 | if (!VALID_PREFIX.test(prefix)) { 99 | throw new InvalidPrefixError(prefix); 100 | } 101 | 102 | if (typeof definition === 'string') { 103 | return { 104 | ...prefixes, 105 | [definition]: {prefix}, 106 | }; 107 | } 108 | 109 | return { 110 | ...prefixes, 111 | [prefix]: definition, 112 | }; 113 | }, {}); 114 | } 115 | 116 | /** 117 | * Validates something that might be an ID is valid with our prefix set 118 | * @param maybeId the ID to validate 119 | * @param expectPrefix the prefix to expect 120 | * @returns 121 | */ 122 | validate(maybeId: string, expectPrefix?: T): maybeId is `${T}_${string}` { 123 | if (typeof maybeId !== 'string') { 124 | return false; 125 | } 126 | 127 | const s = maybeId.split('_'); 128 | const tail = s[s.length - 1]; 129 | const prefix = s.slice(0, s.length - 1).join('_'); 130 | 131 | if (!tail) { 132 | return false; 133 | } 134 | 135 | if (expectPrefix && prefix !== expectPrefix) { 136 | return false; 137 | } 138 | 139 | if (expectPrefix) { 140 | return prefix === expectPrefix; 141 | } 142 | 143 | return prefix in this.prefixes; 144 | } 145 | 146 | gen(prefix: Prefix): `${Prefix}_${string}` { 147 | if (!VALID_PREFIX.test(prefix)) { 148 | throw new InvalidPrefixError(prefix); 149 | } 150 | 151 | if (!this.prefixes[prefix] && !this.#suppressPrefixWarnings) { 152 | warn( 153 | `Unregistered prefix (${prefix}) was used. This can cause unknown behavior - see https://github.com/hopinc/pika/tree/main/impl/js for details.`, 154 | ); 155 | } 156 | 157 | const snowflake = this.#snowflake.gen(); 158 | 159 | return `${prefix.toLowerCase()}_${Buffer.from( 160 | (this.prefixes[prefix]?.secure ? `s_${randomBytes(16).toString('hex')}_` : '') + snowflake, 161 | ).toString('base64url')}` as `${Prefix}_${string}`; 162 | } 163 | 164 | /** 165 | * Gen a Snowflake, if you really need one 166 | */ 167 | public genSnowflake() { 168 | return this.#snowflake.gen(); 169 | } 170 | 171 | public decode(id: string): DecodedPika { 172 | try { 173 | const s = id.split('_'); 174 | const tail = s[s.length - 1]; 175 | const prefix = s.slice(0, s.length - 1).join('_') as Prefixes; 176 | 177 | const decodedTail = Buffer.from(tail, 'base64').toString(); 178 | const sf = decodedTail.split('_').pop(); 179 | 180 | if (!sf) { 181 | throw Error('attempted to decode invalid pika; tail was corrupt'); 182 | } 183 | 184 | const {id: snowflake, ...v} = this.#snowflake.deconstruct(sf); 185 | 186 | return { 187 | prefix, 188 | tail, 189 | prefix_record: this.prefixes[prefix], 190 | prefixRecord: this.prefixes[prefix], 191 | snowflake, 192 | version: 1, 193 | ...v, 194 | }; 195 | } catch (e: unknown) { 196 | error('Failed to decode ID', id); 197 | throw e; 198 | } 199 | } 200 | 201 | /** 202 | * Derives this machine's node ID from the MAC address of the first 203 | * public network interface it finds 204 | * @returns The computed node ID (0-1023) 205 | */ 206 | private computeNodeId(): bigint { 207 | try { 208 | const interfaces = Object.values(networkInterfaces()); 209 | const firstValidInterface = interfaces.filter(iface => iface && iface[0].mac !== '00:00:00:00:00:00')[0]; 210 | 211 | if (!firstValidInterface) { 212 | throw new Error('no valid mac address found'); 213 | } 214 | 215 | const mac = firstValidInterface[0].mac; 216 | 217 | return BigInt(parseInt(mac.split(':').join(''), 16) % 1024); 218 | } catch (e) { 219 | warn('Failed to compute node ID, falling back to 0. Error:\n', e); 220 | return 0n; 221 | } 222 | } 223 | } 224 | 225 | export type InferPrefixes> = T extends Pika ? P : never; 226 | export type InferIds> = T extends Pika ? `${P}_${string}` : never; 227 | -------------------------------------------------------------------------------- /impl/js/src/snowflake.ts: -------------------------------------------------------------------------------- 1 | export type EpochResolvable = number | bigint | Date; 2 | 3 | export interface SnowflakeGenOptions { 4 | timestamp?: EpochResolvable; 5 | } 6 | 7 | export interface DeconstructedSnowflake { 8 | id: bigint; 9 | timestamp: bigint; 10 | nodeId: number; 11 | seq: number; 12 | epoch: bigint; 13 | } 14 | 15 | /** 16 | * A class for generating and deconstructing snowflakes. 17 | * 18 | * Pika has put it's own spin on Twitter snowflakes to simplify deployment 19 | * and setup. Instead of having a separate worker and process ID, we have 20 | * one node ID that takes up the 10 bits these fields would usually use. 21 | * 22 | * A node ID is computed by taking the MAC address of the first available 23 | * public interface on the device, then calculating the modulo against 24 | * 1024 (10b) 25 | * 26 | * If we have a snowflake `963584775274749952n` we can represent it as binary: 27 | * ``` 28 | * 64 22 12 0 29 | * 000011010101111101011111011110000011001010 0001000101 000000000000 30 | * number of ms since epoch node id sequence 31 | * ``` 32 | */ 33 | export class Snowflake { 34 | /** 35 | * Snowflakes generated are derived from this epoch 36 | * @internal 37 | */ 38 | #epoch: bigint; 39 | 40 | /** 41 | * Passed in node ID for this Snowflake instance 42 | * @internal 43 | */ 44 | #nodeId: bigint; 45 | 46 | /** 47 | * Current sequence number (0-4095) 48 | * @internal 49 | */ 50 | #seq = 0n; 51 | 52 | /** 53 | * Last timestamp of the last time the sequence was exhausted 54 | * @internal 55 | */ 56 | #lastSequenceExhaustion: number = 0; 57 | 58 | /** 59 | * @param epoch the base epoch to use 60 | * @param nodeId optionally pass a static node identifier (0-1023) 61 | */ 62 | constructor(epoch: EpochResolvable, nodeId: number | bigint) { 63 | this.#epoch = this.normalizeEpoch(epoch); 64 | this.#nodeId = BigInt(nodeId); 65 | } 66 | 67 | public get nodeId(): number { 68 | return Number(this.nodeId); 69 | } 70 | 71 | public gen({timestamp = Date.now()}: SnowflakeGenOptions = {}): string { 72 | const nTimestamp = this.normalizeEpoch(timestamp); 73 | 74 | if (this.#seq === 4095n && timestamp === this.#lastSequenceExhaustion) { 75 | // purposely blocking 76 | while (Date.now() - timestamp < 1) { 77 | continue; 78 | } 79 | } 80 | 81 | this.#seq = this.#seq >= 4095n ? 0n : this.#seq + 1n; 82 | if (this.#seq === 4095n) this.#lastSequenceExhaustion = Date.now(); 83 | 84 | return ( 85 | ((nTimestamp - this.#epoch) << 22n) | // millis since epoch 86 | ((this.#nodeId & 0b1111111111n) << 12n) | 87 | this.#seq 88 | ).toString(); 89 | } 90 | 91 | public deconstruct(id: string | bigint): DeconstructedSnowflake { 92 | const bigIntId = BigInt(id); 93 | 94 | return { 95 | id: bigIntId, 96 | timestamp: (bigIntId >> 22n) + this.#epoch, 97 | nodeId: Number((bigIntId >> 12n) & 0b1111111111n), 98 | seq: Number(bigIntId & 0b111111111111n), 99 | epoch: this.#epoch, 100 | }; 101 | } 102 | 103 | private normalizeEpoch(epoch: EpochResolvable): bigint { 104 | return BigInt(epoch instanceof Date ? epoch.getTime() : epoch); 105 | } 106 | } 107 | -------------------------------------------------------------------------------- /impl/js/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "moduleResolution": "node", 4 | "target": "ES2020", 5 | "lib": ["esnext"], 6 | "module": "CommonJS", 7 | "outDir": "./dist/", 8 | "strict": true, 9 | "pretty": true, 10 | "declaration": true, 11 | "skipLibCheck": true, 12 | "noEmitOnError": false, 13 | "stripInternal": true, 14 | "noUnusedLocals": true, 15 | "isolatedModules": true, 16 | "esModuleInterop": true, 17 | "noImplicitReturns": true, 18 | "downlevelIteration": true, 19 | "noUnusedParameters": true, 20 | "experimentalDecorators": true, 21 | "importsNotUsedAsValues": "error", 22 | "forceConsistentCasingInFileNames": true, 23 | "allowJs": false, 24 | "removeComments": false 25 | }, 26 | "exclude": ["node_modules", "dist"] 27 | } 28 | -------------------------------------------------------------------------------- /impl/js/yarn.lock: -------------------------------------------------------------------------------- 1 | # THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. 2 | # yarn lockfile v1 3 | 4 | 5 | "@esbuild-kit/cjs-loader@^2.4.0": 6 | version "2.4.0" 7 | resolved "https://registry.yarnpkg.com/@esbuild-kit/cjs-loader/-/cjs-loader-2.4.0.tgz#643c4b2855a18f31cd983794536d4ff64d3b410d" 8 | integrity sha512-DBBCiHPgL2B/elUpvCDhNHXnlZQ9sfO2uyt1OJyAXKT41beQEFY4OxZ6gwS+ZesRCbZ6JV8M7GEyOPkjv8kdIw== 9 | dependencies: 10 | "@esbuild-kit/core-utils" "^3.0.0" 11 | get-tsconfig "^4.2.0" 12 | 13 | "@esbuild-kit/core-utils@^3.0.0": 14 | version "3.0.0" 15 | resolved "https://registry.yarnpkg.com/@esbuild-kit/core-utils/-/core-utils-3.0.0.tgz#e0f8463a32b4a9c9b456a7f9c31a5e038c8d2c19" 16 | integrity sha512-TXmwH9EFS3DC2sI2YJWJBgHGhlteK0Xyu1VabwetMULfm3oYhbrsWV5yaSr2NTWZIgDGVLHbRf0inxbjXqAcmQ== 17 | dependencies: 18 | esbuild "~0.15.10" 19 | source-map-support "^0.5.21" 20 | 21 | "@esbuild-kit/esm-loader@^2.5.0": 22 | version "2.5.0" 23 | resolved "https://registry.yarnpkg.com/@esbuild-kit/esm-loader/-/esm-loader-2.5.0.tgz#af208eb9e0449038e7f35957ec51b7e70135e116" 24 | integrity sha512-ySs0qOsiwj+hsgZM9/MniGdvfa9/WzqfFuIia8/5gSUPeIQIX2/tG91QakxPFOR35VFiwTB7wCiHtiS6dc6SkA== 25 | dependencies: 26 | "@esbuild-kit/core-utils" "^3.0.0" 27 | get-tsconfig "^4.2.0" 28 | 29 | "@esbuild/android-arm@0.15.10": 30 | version "0.15.10" 31 | resolved "https://registry.yarnpkg.com/@esbuild/android-arm/-/android-arm-0.15.10.tgz#a5f9432eb221afc243c321058ef25fe899886892" 32 | integrity sha512-FNONeQPy/ox+5NBkcSbYJxoXj9GWu8gVGJTVmUyoOCKQFDTrHVKgNSzChdNt0I8Aj/iKcsDf2r9BFwv+FSNUXg== 33 | 34 | "@esbuild/linux-loong64@0.15.10": 35 | version "0.15.10" 36 | resolved "https://registry.yarnpkg.com/@esbuild/linux-loong64/-/linux-loong64-0.15.10.tgz#78a42897c2cf8db9fd5f1811f7590393b77774c7" 37 | integrity sha512-w0Ou3Z83LOYEkwaui2M8VwIp+nLi/NA60lBLMvaJ+vXVMcsARYdEzLNE7RSm4+lSg4zq4d7fAVuzk7PNQ5JFgg== 38 | 39 | "@nodelib/fs.scandir@2.1.5": 40 | version "2.1.5" 41 | resolved "https://registry.yarnpkg.com/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz#7619c2eb21b25483f6d167548b4cfd5a7488c3d5" 42 | integrity sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g== 43 | dependencies: 44 | "@nodelib/fs.stat" "2.0.5" 45 | run-parallel "^1.1.9" 46 | 47 | "@nodelib/fs.stat@2.0.5", "@nodelib/fs.stat@^2.0.2": 48 | version "2.0.5" 49 | resolved "https://registry.yarnpkg.com/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz#5bd262af94e9d25bd1e71b05deed44876a222e8b" 50 | integrity sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A== 51 | 52 | "@nodelib/fs.walk@^1.2.3": 53 | version "1.2.8" 54 | resolved "https://registry.yarnpkg.com/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz#e95737e8bb6746ddedf69c556953494f196fe69a" 55 | integrity sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg== 56 | dependencies: 57 | "@nodelib/fs.scandir" "2.1.5" 58 | fastq "^1.6.0" 59 | 60 | "@types/node@^18.8.3": 61 | version "18.8.5" 62 | resolved "https://registry.yarnpkg.com/@types/node/-/node-18.8.5.tgz#6a31f820c1077c3f8ce44f9e203e68a176e8f59e" 63 | integrity sha512-Bq7G3AErwe5A/Zki5fdD3O6+0zDChhg671NfPjtIcbtzDNZTv4NPKMRFr7gtYPG7y+B8uTiNK4Ngd9T0FTar6Q== 64 | 65 | any-promise@^1.0.0: 66 | version "1.3.0" 67 | resolved "https://registry.yarnpkg.com/any-promise/-/any-promise-1.3.0.tgz#abc6afeedcea52e809cdc0376aed3ce39635d17f" 68 | integrity sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A== 69 | 70 | anymatch@~3.1.2: 71 | version "3.1.2" 72 | resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.2.tgz#c0557c096af32f106198f4f4e2a383537e378716" 73 | integrity sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg== 74 | dependencies: 75 | normalize-path "^3.0.0" 76 | picomatch "^2.0.4" 77 | 78 | array-union@^2.1.0: 79 | version "2.1.0" 80 | resolved "https://registry.yarnpkg.com/array-union/-/array-union-2.1.0.tgz#b798420adbeb1de828d84acd8a2e23d3efe85e8d" 81 | integrity sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw== 82 | 83 | balanced-match@^1.0.0: 84 | version "1.0.2" 85 | resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" 86 | integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== 87 | 88 | benchmark@^2.1.4: 89 | version "2.1.4" 90 | resolved "https://registry.yarnpkg.com/benchmark/-/benchmark-2.1.4.tgz#09f3de31c916425d498cc2ee565a0ebf3c2a5629" 91 | integrity sha512-l9MlfN4M1K/H2fbhfMy3B7vJd6AGKJVQn2h6Sg/Yx+KckoUA7ewS5Vv6TjSq18ooE1kS9hhAlQRH3AkXIh/aOQ== 92 | dependencies: 93 | lodash "^4.17.4" 94 | platform "^1.3.3" 95 | 96 | binary-extensions@^2.0.0: 97 | version "2.2.0" 98 | resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d" 99 | integrity sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA== 100 | 101 | brace-expansion@^1.1.7: 102 | version "1.1.11" 103 | resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" 104 | integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== 105 | dependencies: 106 | balanced-match "^1.0.0" 107 | concat-map "0.0.1" 108 | 109 | braces@^3.0.2, braces@~3.0.2: 110 | version "3.0.2" 111 | resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" 112 | integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A== 113 | dependencies: 114 | fill-range "^7.0.1" 115 | 116 | buffer-from@^1.0.0: 117 | version "1.1.2" 118 | resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5" 119 | integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ== 120 | 121 | bundle-require@^3.1.0: 122 | version "3.1.0" 123 | resolved "https://registry.yarnpkg.com/bundle-require/-/bundle-require-3.1.0.tgz#e07256ff02c72cd3a665afa84ce930d111ae4252" 124 | integrity sha512-IIXtAO7fKcwPHNPt9kY/WNVJqy7NDy6YqJvv6ENH0TOZoJ+yjpEsn1w40WKZbR2ibfu5g1rfgJTvmFHpm5aOMA== 125 | dependencies: 126 | load-tsconfig "^0.2.0" 127 | 128 | cac@^6.7.12: 129 | version "6.7.14" 130 | resolved "https://registry.yarnpkg.com/cac/-/cac-6.7.14.tgz#804e1e6f506ee363cb0e3ccbb09cad5dd9870959" 131 | integrity sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ== 132 | 133 | chokidar@^3.5.1: 134 | version "3.5.3" 135 | resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.5.3.tgz#1cf37c8707b932bd1af1ae22c0432e2acd1903bd" 136 | integrity sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw== 137 | dependencies: 138 | anymatch "~3.1.2" 139 | braces "~3.0.2" 140 | glob-parent "~5.1.2" 141 | is-binary-path "~2.1.0" 142 | is-glob "~4.0.1" 143 | normalize-path "~3.0.0" 144 | readdirp "~3.6.0" 145 | optionalDependencies: 146 | fsevents "~2.3.2" 147 | 148 | commander@^4.0.0: 149 | version "4.1.1" 150 | resolved "https://registry.yarnpkg.com/commander/-/commander-4.1.1.tgz#9fd602bd936294e9e9ef46a3f4d6964044b18068" 151 | integrity sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA== 152 | 153 | concat-map@0.0.1: 154 | version "0.0.1" 155 | resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" 156 | integrity sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg== 157 | 158 | cross-spawn@^7.0.3: 159 | version "7.0.3" 160 | resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" 161 | integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== 162 | dependencies: 163 | path-key "^3.1.0" 164 | shebang-command "^2.0.0" 165 | which "^2.0.1" 166 | 167 | debug@^4.3.1: 168 | version "4.3.4" 169 | resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" 170 | integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== 171 | dependencies: 172 | ms "2.1.2" 173 | 174 | dir-glob@^3.0.1: 175 | version "3.0.1" 176 | resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-3.0.1.tgz#56dbf73d992a4a93ba1584f4534063fd2e41717f" 177 | integrity sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA== 178 | dependencies: 179 | path-type "^4.0.0" 180 | 181 | esbuild-android-64@0.15.10: 182 | version "0.15.10" 183 | resolved "https://registry.yarnpkg.com/esbuild-android-64/-/esbuild-android-64-0.15.10.tgz#8a59a84acbf2eca96996cadc35642cf055c494f0" 184 | integrity sha512-UI7krF8OYO1N7JYTgLT9ML5j4+45ra3amLZKx7LO3lmLt1Ibn8t3aZbX5Pu4BjWiqDuJ3m/hsvhPhK/5Y/YpnA== 185 | 186 | esbuild-android-arm64@0.15.10: 187 | version "0.15.10" 188 | resolved "https://registry.yarnpkg.com/esbuild-android-arm64/-/esbuild-android-arm64-0.15.10.tgz#f453851dc1d8c5409a38cf7613a33852faf4915d" 189 | integrity sha512-EOt55D6xBk5O05AK8brXUbZmoFj4chM8u3riGflLa6ziEoVvNjRdD7Cnp82NHQGfSHgYR06XsPI8/sMuA/cUwg== 190 | 191 | esbuild-darwin-64@0.15.10: 192 | version "0.15.10" 193 | resolved "https://registry.yarnpkg.com/esbuild-darwin-64/-/esbuild-darwin-64-0.15.10.tgz#778bd29c8186ff47b176c8af58c08cf0fb8e6b86" 194 | integrity sha512-hbDJugTicqIm+WKZgp208d7FcXcaK8j2c0l+fqSJ3d2AzQAfjEYDRM3Z2oMeqSJ9uFxyj/muSACLdix7oTstRA== 195 | 196 | esbuild-darwin-arm64@0.15.10: 197 | version "0.15.10" 198 | resolved "https://registry.yarnpkg.com/esbuild-darwin-arm64/-/esbuild-darwin-arm64-0.15.10.tgz#b30bbefb46dc3c5d4708b0435e52f6456578d6df" 199 | integrity sha512-M1t5+Kj4IgSbYmunf2BB6EKLkWUq+XlqaFRiGOk8bmBapu9bCDrxjf4kUnWn59Dka3I27EiuHBKd1rSO4osLFQ== 200 | 201 | esbuild-freebsd-64@0.15.10: 202 | version "0.15.10" 203 | resolved "https://registry.yarnpkg.com/esbuild-freebsd-64/-/esbuild-freebsd-64-0.15.10.tgz#ab301c5f6ded5110dbdd611140bef1a7c2e99236" 204 | integrity sha512-KMBFMa7C8oc97nqDdoZwtDBX7gfpolkk6Bcmj6YFMrtCMVgoU/x2DI1p74DmYl7CSS6Ppa3xgemrLrr5IjIn0w== 205 | 206 | esbuild-freebsd-arm64@0.15.10: 207 | version "0.15.10" 208 | resolved "https://registry.yarnpkg.com/esbuild-freebsd-arm64/-/esbuild-freebsd-arm64-0.15.10.tgz#a5b09b867a6ff49110f52343b6f12265db63d43f" 209 | integrity sha512-m2KNbuCX13yQqLlbSojFMHpewbn8wW5uDS6DxRpmaZKzyq8Dbsku6hHvh2U+BcLwWY4mpgXzFUoENEf7IcioGg== 210 | 211 | esbuild-linux-32@0.15.10: 212 | version "0.15.10" 213 | resolved "https://registry.yarnpkg.com/esbuild-linux-32/-/esbuild-linux-32-0.15.10.tgz#5282fe9915641caf9c8070e4ba2c3e16d358f837" 214 | integrity sha512-guXrwSYFAvNkuQ39FNeV4sNkNms1bLlA5vF1H0cazZBOLdLFIny6BhT+TUbK/hdByMQhtWQ5jI9VAmPKbVPu1w== 215 | 216 | esbuild-linux-64@0.15.10: 217 | version "0.15.10" 218 | resolved "https://registry.yarnpkg.com/esbuild-linux-64/-/esbuild-linux-64-0.15.10.tgz#f3726e85a00149580cb19f8abfabcbb96f5d52bb" 219 | integrity sha512-jd8XfaSJeucMpD63YNMO1JCrdJhckHWcMv6O233bL4l6ogQKQOxBYSRP/XLWP+6kVTu0obXovuckJDcA0DKtQA== 220 | 221 | esbuild-linux-arm64@0.15.10: 222 | version "0.15.10" 223 | resolved "https://registry.yarnpkg.com/esbuild-linux-arm64/-/esbuild-linux-arm64-0.15.10.tgz#2f0056e9d5286edb0185b56655caa8c574d8dbe7" 224 | integrity sha512-GByBi4fgkvZFTHFDYNftu1DQ1GzR23jws0oWyCfhnI7eMOe+wgwWrc78dbNk709Ivdr/evefm2PJiUBMiusS1A== 225 | 226 | esbuild-linux-arm@0.15.10: 227 | version "0.15.10" 228 | resolved "https://registry.yarnpkg.com/esbuild-linux-arm/-/esbuild-linux-arm-0.15.10.tgz#40a9270da3c8ffa32cf72e24a79883e323dff08d" 229 | integrity sha512-6N8vThLL/Lysy9y4Ex8XoLQAlbZKUyExCWyayGi2KgTBelKpPgj6RZnUaKri0dHNPGgReJriKVU6+KDGQwn10A== 230 | 231 | esbuild-linux-mips64le@0.15.10: 232 | version "0.15.10" 233 | resolved "https://registry.yarnpkg.com/esbuild-linux-mips64le/-/esbuild-linux-mips64le-0.15.10.tgz#90ce1c4ee0202edb4ac69807dea77f7e5804abc4" 234 | integrity sha512-BxP+LbaGVGIdQNJUNF7qpYjEGWb0YyHVSKqYKrn+pTwH/SiHUxFyJYSP3pqkku61olQiSBnSmWZ+YUpj78Tw7Q== 235 | 236 | esbuild-linux-ppc64le@0.15.10: 237 | version "0.15.10" 238 | resolved "https://registry.yarnpkg.com/esbuild-linux-ppc64le/-/esbuild-linux-ppc64le-0.15.10.tgz#782837ae7bd5b279178106c9dd801755a21fabdf" 239 | integrity sha512-LoSQCd6498PmninNgqd/BR7z3Bsk/mabImBWuQ4wQgmQEeanzWd5BQU2aNi9mBURCLgyheuZS6Xhrw5luw3OkQ== 240 | 241 | esbuild-linux-riscv64@0.15.10: 242 | version "0.15.10" 243 | resolved "https://registry.yarnpkg.com/esbuild-linux-riscv64/-/esbuild-linux-riscv64-0.15.10.tgz#d7420d806ece5174f24f4634303146f915ab4207" 244 | integrity sha512-Lrl9Cr2YROvPV4wmZ1/g48httE8z/5SCiXIyebiB5N8VT7pX3t6meI7TQVHw/wQpqP/AF4SksDuFImPTM7Z32Q== 245 | 246 | esbuild-linux-s390x@0.15.10: 247 | version "0.15.10" 248 | resolved "https://registry.yarnpkg.com/esbuild-linux-s390x/-/esbuild-linux-s390x-0.15.10.tgz#21fdf0cb3494a7fb520a71934e4dffce67fe47be" 249 | integrity sha512-ReP+6q3eLVVP2lpRrvl5EodKX7EZ1bS1/z5j6hsluAlZP5aHhk6ghT6Cq3IANvvDdscMMCB4QEbI+AjtvoOFpA== 250 | 251 | esbuild-netbsd-64@0.15.10: 252 | version "0.15.10" 253 | resolved "https://registry.yarnpkg.com/esbuild-netbsd-64/-/esbuild-netbsd-64-0.15.10.tgz#6c06b3107e3df53de381e6299184d4597db0440f" 254 | integrity sha512-iGDYtJCMCqldMskQ4eIV+QSS/CuT7xyy9i2/FjpKvxAuCzrESZXiA1L64YNj6/afuzfBe9i8m/uDkFHy257hTw== 255 | 256 | esbuild-openbsd-64@0.15.10: 257 | version "0.15.10" 258 | resolved "https://registry.yarnpkg.com/esbuild-openbsd-64/-/esbuild-openbsd-64-0.15.10.tgz#4daef5f5d8e74bbda53b65160029445d582570cf" 259 | integrity sha512-ftMMIwHWrnrYnvuJQRJs/Smlcb28F9ICGde/P3FUTCgDDM0N7WA0o9uOR38f5Xe2/OhNCgkjNeb7QeaE3cyWkQ== 260 | 261 | esbuild-sunos-64@0.15.10: 262 | version "0.15.10" 263 | resolved "https://registry.yarnpkg.com/esbuild-sunos-64/-/esbuild-sunos-64-0.15.10.tgz#5fe7bef267a02f322fd249a8214d0274937388a7" 264 | integrity sha512-mf7hBL9Uo2gcy2r3rUFMjVpTaGpFJJE5QTDDqUFf1632FxteYANffDZmKbqX0PfeQ2XjUDE604IcE7OJeoHiyg== 265 | 266 | esbuild-windows-32@0.15.10: 267 | version "0.15.10" 268 | resolved "https://registry.yarnpkg.com/esbuild-windows-32/-/esbuild-windows-32-0.15.10.tgz#48e3dde25ab0135579a288b30ab6ddef6d1f0b28" 269 | integrity sha512-ttFVo+Cg8b5+qHmZHbEc8Vl17kCleHhLzgT8X04y8zudEApo0PxPg9Mz8Z2cKH1bCYlve1XL8LkyXGFjtUYeGg== 270 | 271 | esbuild-windows-64@0.15.10: 272 | version "0.15.10" 273 | resolved "https://registry.yarnpkg.com/esbuild-windows-64/-/esbuild-windows-64-0.15.10.tgz#387a9515bef3fee502d277a5d0a2db49a4ecda05" 274 | integrity sha512-2H0gdsyHi5x+8lbng3hLbxDWR7mKHWh5BXZGKVG830KUmXOOWFE2YKJ4tHRkejRduOGDrBvHBriYsGtmTv3ntA== 275 | 276 | esbuild-windows-arm64@0.15.10: 277 | version "0.15.10" 278 | resolved "https://registry.yarnpkg.com/esbuild-windows-arm64/-/esbuild-windows-arm64-0.15.10.tgz#5a6fcf2fa49e895949bf5495cf088ab1b43ae879" 279 | integrity sha512-S+th4F+F8VLsHLR0zrUcG+Et4hx0RKgK1eyHc08kztmLOES8BWwMiaGdoW9hiXuzznXQ0I/Fg904MNbr11Nktw== 280 | 281 | esbuild@^0.15.1, esbuild@~0.15.10: 282 | version "0.15.10" 283 | resolved "https://registry.yarnpkg.com/esbuild/-/esbuild-0.15.10.tgz#85c2f8446e9b1fe04fae68daceacba033eedbd42" 284 | integrity sha512-N7wBhfJ/E5fzn/SpNgX+oW2RLRjwaL8Y0ezqNqhjD6w0H2p0rDuEz2FKZqpqLnO8DCaWumKe8dsC/ljvVSSxng== 285 | optionalDependencies: 286 | "@esbuild/android-arm" "0.15.10" 287 | "@esbuild/linux-loong64" "0.15.10" 288 | esbuild-android-64 "0.15.10" 289 | esbuild-android-arm64 "0.15.10" 290 | esbuild-darwin-64 "0.15.10" 291 | esbuild-darwin-arm64 "0.15.10" 292 | esbuild-freebsd-64 "0.15.10" 293 | esbuild-freebsd-arm64 "0.15.10" 294 | esbuild-linux-32 "0.15.10" 295 | esbuild-linux-64 "0.15.10" 296 | esbuild-linux-arm "0.15.10" 297 | esbuild-linux-arm64 "0.15.10" 298 | esbuild-linux-mips64le "0.15.10" 299 | esbuild-linux-ppc64le "0.15.10" 300 | esbuild-linux-riscv64 "0.15.10" 301 | esbuild-linux-s390x "0.15.10" 302 | esbuild-netbsd-64 "0.15.10" 303 | esbuild-openbsd-64 "0.15.10" 304 | esbuild-sunos-64 "0.15.10" 305 | esbuild-windows-32 "0.15.10" 306 | esbuild-windows-64 "0.15.10" 307 | esbuild-windows-arm64 "0.15.10" 308 | 309 | execa@^5.0.0: 310 | version "5.1.1" 311 | resolved "https://registry.yarnpkg.com/execa/-/execa-5.1.1.tgz#f80ad9cbf4298f7bd1d4c9555c21e93741c411dd" 312 | integrity sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg== 313 | dependencies: 314 | cross-spawn "^7.0.3" 315 | get-stream "^6.0.0" 316 | human-signals "^2.1.0" 317 | is-stream "^2.0.0" 318 | merge-stream "^2.0.0" 319 | npm-run-path "^4.0.1" 320 | onetime "^5.1.2" 321 | signal-exit "^3.0.3" 322 | strip-final-newline "^2.0.0" 323 | 324 | fast-glob@^3.2.9: 325 | version "3.2.12" 326 | resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.12.tgz#7f39ec99c2e6ab030337142da9e0c18f37afae80" 327 | integrity sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w== 328 | dependencies: 329 | "@nodelib/fs.stat" "^2.0.2" 330 | "@nodelib/fs.walk" "^1.2.3" 331 | glob-parent "^5.1.2" 332 | merge2 "^1.3.0" 333 | micromatch "^4.0.4" 334 | 335 | fastq@^1.6.0: 336 | version "1.13.0" 337 | resolved "https://registry.yarnpkg.com/fastq/-/fastq-1.13.0.tgz#616760f88a7526bdfc596b7cab8c18938c36b98c" 338 | integrity sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw== 339 | dependencies: 340 | reusify "^1.0.4" 341 | 342 | fill-range@^7.0.1: 343 | version "7.0.1" 344 | resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40" 345 | integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ== 346 | dependencies: 347 | to-regex-range "^5.0.1" 348 | 349 | fs.realpath@^1.0.0: 350 | version "1.0.0" 351 | resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" 352 | integrity sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw== 353 | 354 | fsevents@~2.3.2: 355 | version "2.3.2" 356 | resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.2.tgz#8a526f78b8fdf4623b709e0b975c52c24c02fd1a" 357 | integrity sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA== 358 | 359 | get-stream@^6.0.0: 360 | version "6.0.1" 361 | resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-6.0.1.tgz#a262d8eef67aced57c2852ad6167526a43cbf7b7" 362 | integrity sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg== 363 | 364 | get-tsconfig@^4.2.0: 365 | version "4.2.0" 366 | resolved "https://registry.yarnpkg.com/get-tsconfig/-/get-tsconfig-4.2.0.tgz#ff368dd7104dab47bf923404eb93838245c66543" 367 | integrity sha512-X8u8fREiYOE6S8hLbq99PeykTDoLVnxvF4DjWKJmz9xy2nNRdUcV8ZN9tniJFeKyTU3qnC9lL8n4Chd6LmVKHg== 368 | 369 | glob-parent@^5.1.2, glob-parent@~5.1.2: 370 | version "5.1.2" 371 | resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4" 372 | integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== 373 | dependencies: 374 | is-glob "^4.0.1" 375 | 376 | glob@7.1.6: 377 | version "7.1.6" 378 | resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.6.tgz#141f33b81a7c2492e125594307480c46679278a6" 379 | integrity sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA== 380 | dependencies: 381 | fs.realpath "^1.0.0" 382 | inflight "^1.0.4" 383 | inherits "2" 384 | minimatch "^3.0.4" 385 | once "^1.3.0" 386 | path-is-absolute "^1.0.0" 387 | 388 | globby@^11.0.3: 389 | version "11.1.0" 390 | resolved "https://registry.yarnpkg.com/globby/-/globby-11.1.0.tgz#bd4be98bb042f83d796f7e3811991fbe82a0d34b" 391 | integrity sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g== 392 | dependencies: 393 | array-union "^2.1.0" 394 | dir-glob "^3.0.1" 395 | fast-glob "^3.2.9" 396 | ignore "^5.2.0" 397 | merge2 "^1.4.1" 398 | slash "^3.0.0" 399 | 400 | human-signals@^2.1.0: 401 | version "2.1.0" 402 | resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-2.1.0.tgz#dc91fcba42e4d06e4abaed33b3e7a3c02f514ea0" 403 | integrity sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw== 404 | 405 | ignore@^5.2.0: 406 | version "5.2.0" 407 | resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.2.0.tgz#6d3bac8fa7fe0d45d9f9be7bac2fc279577e345a" 408 | integrity sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ== 409 | 410 | inflight@^1.0.4: 411 | version "1.0.6" 412 | resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" 413 | integrity sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA== 414 | dependencies: 415 | once "^1.3.0" 416 | wrappy "1" 417 | 418 | inherits@2: 419 | version "2.0.4" 420 | resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" 421 | integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== 422 | 423 | is-binary-path@~2.1.0: 424 | version "2.1.0" 425 | resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09" 426 | integrity sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw== 427 | dependencies: 428 | binary-extensions "^2.0.0" 429 | 430 | is-extglob@^2.1.1: 431 | version "2.1.1" 432 | resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" 433 | integrity sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ== 434 | 435 | is-glob@^4.0.1, is-glob@~4.0.1: 436 | version "4.0.3" 437 | resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084" 438 | integrity sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg== 439 | dependencies: 440 | is-extglob "^2.1.1" 441 | 442 | is-number@^7.0.0: 443 | version "7.0.0" 444 | resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" 445 | integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== 446 | 447 | is-stream@^2.0.0: 448 | version "2.0.1" 449 | resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.1.tgz#fac1e3d53b97ad5a9d0ae9cef2389f5810a5c077" 450 | integrity sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg== 451 | 452 | isexe@^2.0.0: 453 | version "2.0.0" 454 | resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" 455 | integrity sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw== 456 | 457 | joycon@^3.0.1: 458 | version "3.1.1" 459 | resolved "https://registry.yarnpkg.com/joycon/-/joycon-3.1.1.tgz#bce8596d6ae808f8b68168f5fc69280996894f03" 460 | integrity sha512-34wB/Y7MW7bzjKRjUKTa46I2Z7eV62Rkhva+KkopW7Qvv/OSWBqvkSY7vusOPrNuZcUG3tApvdVgNB8POj3SPw== 461 | 462 | lilconfig@^2.0.5: 463 | version "2.0.6" 464 | resolved "https://registry.yarnpkg.com/lilconfig/-/lilconfig-2.0.6.tgz#32a384558bd58af3d4c6e077dd1ad1d397bc69d4" 465 | integrity sha512-9JROoBW7pobfsx+Sq2JsASvCo6Pfo6WWoUW79HuB1BCoBXD4PLWJPqDF6fNj67pqBYTbAHkE57M1kS/+L1neOg== 466 | 467 | lines-and-columns@^1.1.6: 468 | version "1.2.4" 469 | resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.2.4.tgz#eca284f75d2965079309dc0ad9255abb2ebc1632" 470 | integrity sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg== 471 | 472 | load-tsconfig@^0.2.0: 473 | version "0.2.3" 474 | resolved "https://registry.yarnpkg.com/load-tsconfig/-/load-tsconfig-0.2.3.tgz#08af3e7744943caab0c75f8af7f1703639c3ef1f" 475 | integrity sha512-iyT2MXws+dc2Wi6o3grCFtGXpeMvHmJqS27sMPGtV2eUu4PeFnG+33I8BlFK1t1NWMjOpcx9bridn5yxLDX2gQ== 476 | 477 | lodash.sortby@^4.7.0: 478 | version "4.7.0" 479 | resolved "https://registry.yarnpkg.com/lodash.sortby/-/lodash.sortby-4.7.0.tgz#edd14c824e2cc9c1e0b0a1b42bb5210516a42438" 480 | integrity sha512-HDWXG8isMntAyRF5vZ7xKuEvOhT4AhlRt/3czTSjvGUxjYCBVRQY48ViDHyfYz9VIoBkW4TMGQNapx+l3RUwdA== 481 | 482 | lodash@^4.17.4: 483 | version "4.17.21" 484 | resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" 485 | integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== 486 | 487 | merge-stream@^2.0.0: 488 | version "2.0.0" 489 | resolved "https://registry.yarnpkg.com/merge-stream/-/merge-stream-2.0.0.tgz#52823629a14dd00c9770fb6ad47dc6310f2c1f60" 490 | integrity sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w== 491 | 492 | merge2@^1.3.0, merge2@^1.4.1: 493 | version "1.4.1" 494 | resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae" 495 | integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg== 496 | 497 | micromatch@^4.0.4: 498 | version "4.0.5" 499 | resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.5.tgz#bc8999a7cbbf77cdc89f132f6e467051b49090c6" 500 | integrity sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA== 501 | dependencies: 502 | braces "^3.0.2" 503 | picomatch "^2.3.1" 504 | 505 | mimic-fn@^2.1.0: 506 | version "2.1.0" 507 | resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" 508 | integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg== 509 | 510 | minimatch@^3.0.4: 511 | version "3.1.2" 512 | resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b" 513 | integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw== 514 | dependencies: 515 | brace-expansion "^1.1.7" 516 | 517 | ms@2.1.2: 518 | version "2.1.2" 519 | resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" 520 | integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== 521 | 522 | mz@^2.7.0: 523 | version "2.7.0" 524 | resolved "https://registry.yarnpkg.com/mz/-/mz-2.7.0.tgz#95008057a56cafadc2bc63dde7f9ff6955948e32" 525 | integrity sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q== 526 | dependencies: 527 | any-promise "^1.0.0" 528 | object-assign "^4.0.1" 529 | thenify-all "^1.0.0" 530 | 531 | normalize-path@^3.0.0, normalize-path@~3.0.0: 532 | version "3.0.0" 533 | resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" 534 | integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== 535 | 536 | npm-run-path@^4.0.1: 537 | version "4.0.1" 538 | resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-4.0.1.tgz#b7ecd1e5ed53da8e37a55e1c2269e0b97ed748ea" 539 | integrity sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw== 540 | dependencies: 541 | path-key "^3.0.0" 542 | 543 | object-assign@^4.0.1: 544 | version "4.1.1" 545 | resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" 546 | integrity sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg== 547 | 548 | once@^1.3.0: 549 | version "1.4.0" 550 | resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" 551 | integrity sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w== 552 | dependencies: 553 | wrappy "1" 554 | 555 | onetime@^5.1.2: 556 | version "5.1.2" 557 | resolved "https://registry.yarnpkg.com/onetime/-/onetime-5.1.2.tgz#d0e96ebb56b07476df1dd9c4806e5237985ca45e" 558 | integrity sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg== 559 | dependencies: 560 | mimic-fn "^2.1.0" 561 | 562 | path-is-absolute@^1.0.0: 563 | version "1.0.1" 564 | resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" 565 | integrity sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg== 566 | 567 | path-key@^3.0.0, path-key@^3.1.0: 568 | version "3.1.1" 569 | resolved "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375" 570 | integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== 571 | 572 | path-type@^4.0.0: 573 | version "4.0.0" 574 | resolved "https://registry.yarnpkg.com/path-type/-/path-type-4.0.0.tgz#84ed01c0a7ba380afe09d90a8c180dcd9d03043b" 575 | integrity sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw== 576 | 577 | picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.3.1: 578 | version "2.3.1" 579 | resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" 580 | integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA== 581 | 582 | pirates@^4.0.1: 583 | version "4.0.5" 584 | resolved "https://registry.yarnpkg.com/pirates/-/pirates-4.0.5.tgz#feec352ea5c3268fb23a37c702ab1699f35a5f3b" 585 | integrity sha512-8V9+HQPupnaXMA23c5hvl69zXvTwTzyAYasnkb0Tts4XvO4CliqONMOnvlq26rkhLC3nWDFBJf73LU1e1VZLaQ== 586 | 587 | platform@^1.3.3: 588 | version "1.3.6" 589 | resolved "https://registry.yarnpkg.com/platform/-/platform-1.3.6.tgz#48b4ce983164b209c2d45a107adb31f473a6e7a7" 590 | integrity sha512-fnWVljUchTro6RiCFvCXBbNhJc2NijN7oIQxbwsyL0buWJPG85v81ehlHI9fXrJsMNgTofEoWIQeClKpgxFLrg== 591 | 592 | postcss-load-config@^3.0.1: 593 | version "3.1.4" 594 | resolved "https://registry.yarnpkg.com/postcss-load-config/-/postcss-load-config-3.1.4.tgz#1ab2571faf84bb078877e1d07905eabe9ebda855" 595 | integrity sha512-6DiM4E7v4coTE4uzA8U//WhtPwyhiim3eyjEMFCnUpzbrkK9wJHgKDT2mR+HbtSrd/NubVaYTOpSpjUl8NQeRg== 596 | dependencies: 597 | lilconfig "^2.0.5" 598 | yaml "^1.10.2" 599 | 600 | prettier@^2.7.1: 601 | version "2.7.1" 602 | resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.7.1.tgz#e235806850d057f97bb08368a4f7d899f7760c64" 603 | integrity sha512-ujppO+MkdPqoVINuDFDRLClm7D78qbDt0/NR+wp5FqEZOoTNAjPHWj17QRhu7geIHJfcNhRk1XVQmF8Bp3ye+g== 604 | 605 | punycode@^2.1.0: 606 | version "2.1.1" 607 | resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" 608 | integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== 609 | 610 | queue-microtask@^1.2.2: 611 | version "1.2.3" 612 | resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243" 613 | integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A== 614 | 615 | readdirp@~3.6.0: 616 | version "3.6.0" 617 | resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.6.0.tgz#74a370bd857116e245b29cc97340cd431a02a6c7" 618 | integrity sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA== 619 | dependencies: 620 | picomatch "^2.2.1" 621 | 622 | resolve-from@^5.0.0: 623 | version "5.0.0" 624 | resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69" 625 | integrity sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw== 626 | 627 | reusify@^1.0.4: 628 | version "1.0.4" 629 | resolved "https://registry.yarnpkg.com/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76" 630 | integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== 631 | 632 | rollup@^2.74.1: 633 | version "2.79.1" 634 | resolved "https://registry.yarnpkg.com/rollup/-/rollup-2.79.1.tgz#bedee8faef7c9f93a2647ac0108748f497f081c7" 635 | integrity sha512-uKxbd0IhMZOhjAiD5oAFp7BqvkA4Dv47qpOCtaNvng4HBwdbWtdOh8f5nZNuk2rp51PMGk3bzfWu5oayNEuYnw== 636 | optionalDependencies: 637 | fsevents "~2.3.2" 638 | 639 | run-parallel@^1.1.9: 640 | version "1.2.0" 641 | resolved "https://registry.yarnpkg.com/run-parallel/-/run-parallel-1.2.0.tgz#66d1368da7bdf921eb9d95bd1a9229e7f21a43ee" 642 | integrity sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA== 643 | dependencies: 644 | queue-microtask "^1.2.2" 645 | 646 | shebang-command@^2.0.0: 647 | version "2.0.0" 648 | resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea" 649 | integrity sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA== 650 | dependencies: 651 | shebang-regex "^3.0.0" 652 | 653 | shebang-regex@^3.0.0: 654 | version "3.0.0" 655 | resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172" 656 | integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== 657 | 658 | signal-exit@^3.0.3: 659 | version "3.0.7" 660 | resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.7.tgz#a9a1767f8af84155114eaabd73f99273c8f59ad9" 661 | integrity sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ== 662 | 663 | slash@^3.0.0: 664 | version "3.0.0" 665 | resolved "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634" 666 | integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q== 667 | 668 | source-map-support@^0.5.21: 669 | version "0.5.21" 670 | resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.21.tgz#04fe7c7f9e1ed2d662233c28cb2b35b9f63f6e4f" 671 | integrity sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w== 672 | dependencies: 673 | buffer-from "^1.0.0" 674 | source-map "^0.6.0" 675 | 676 | source-map@0.8.0-beta.0: 677 | version "0.8.0-beta.0" 678 | resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.8.0-beta.0.tgz#d4c1bb42c3f7ee925f005927ba10709e0d1d1f11" 679 | integrity sha512-2ymg6oRBpebeZi9UUNsgQ89bhx01TcTkmNTGnNO88imTmbSgy4nfujrgVEFKWpMTEGA11EDkTt7mqObTPdigIA== 680 | dependencies: 681 | whatwg-url "^7.0.0" 682 | 683 | source-map@^0.6.0: 684 | version "0.6.1" 685 | resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" 686 | integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== 687 | 688 | strip-final-newline@^2.0.0: 689 | version "2.0.0" 690 | resolved "https://registry.yarnpkg.com/strip-final-newline/-/strip-final-newline-2.0.0.tgz#89b852fb2fcbe936f6f4b3187afb0a12c1ab58ad" 691 | integrity sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA== 692 | 693 | sucrase@^3.20.3: 694 | version "3.28.0" 695 | resolved "https://registry.yarnpkg.com/sucrase/-/sucrase-3.28.0.tgz#7fd8b3118d2155fcdf291088ab77fa6eefd63c4c" 696 | integrity sha512-TK9600YInjuiIhVM3729rH4ZKPOsGeyXUwY+Ugu9eilNbdTFyHr6XcAGYbRVZPDgWj6tgI7bx95aaJjHnbffag== 697 | dependencies: 698 | commander "^4.0.0" 699 | glob "7.1.6" 700 | lines-and-columns "^1.1.6" 701 | mz "^2.7.0" 702 | pirates "^4.0.1" 703 | ts-interface-checker "^0.1.9" 704 | 705 | thenify-all@^1.0.0: 706 | version "1.6.0" 707 | resolved "https://registry.yarnpkg.com/thenify-all/-/thenify-all-1.6.0.tgz#1a1918d402d8fc3f98fbf234db0bcc8cc10e9726" 708 | integrity sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA== 709 | dependencies: 710 | thenify ">= 3.1.0 < 4" 711 | 712 | "thenify@>= 3.1.0 < 4": 713 | version "3.3.1" 714 | resolved "https://registry.yarnpkg.com/thenify/-/thenify-3.3.1.tgz#8932e686a4066038a016dd9e2ca46add9838a95f" 715 | integrity sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw== 716 | dependencies: 717 | any-promise "^1.0.0" 718 | 719 | to-regex-range@^5.0.1: 720 | version "5.0.1" 721 | resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4" 722 | integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ== 723 | dependencies: 724 | is-number "^7.0.0" 725 | 726 | tr46@^1.0.1: 727 | version "1.0.1" 728 | resolved "https://registry.yarnpkg.com/tr46/-/tr46-1.0.1.tgz#a8b13fd6bfd2489519674ccde55ba3693b706d09" 729 | integrity sha512-dTpowEjclQ7Kgx5SdBkqRzVhERQXov8/l9Ft9dVM9fmg0W0KQSVaXX9T4i6twCPNtYiZM53lpSSUAwJbFPOHxA== 730 | dependencies: 731 | punycode "^2.1.0" 732 | 733 | tree-kill@^1.2.2: 734 | version "1.2.2" 735 | resolved "https://registry.yarnpkg.com/tree-kill/-/tree-kill-1.2.2.tgz#4ca09a9092c88b73a7cdc5e8a01b507b0790a0cc" 736 | integrity sha512-L0Orpi8qGpRG//Nd+H90vFB+3iHnue1zSSGmNOOCh1GLJ7rUKVwV2HvijphGQS2UmhUZewS9VgvxYIdgr+fG1A== 737 | 738 | ts-interface-checker@^0.1.9: 739 | version "0.1.13" 740 | resolved "https://registry.yarnpkg.com/ts-interface-checker/-/ts-interface-checker-0.1.13.tgz#784fd3d679722bc103b1b4b8030bcddb5db2a699" 741 | integrity sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA== 742 | 743 | tsup@^6.2.3: 744 | version "6.2.3" 745 | resolved "https://registry.yarnpkg.com/tsup/-/tsup-6.2.3.tgz#87f57b2e53d49f1c1ab89aba21fed96aaab0ec9f" 746 | integrity sha512-J5Pu2Dx0E1wlpIEsVFv9ryzP1pZ1OYsJ2cBHZ7GrKteytNdzaSz5hmLX7/nAxtypq+jVkVvA79d7S83ETgHQ5w== 747 | dependencies: 748 | bundle-require "^3.1.0" 749 | cac "^6.7.12" 750 | chokidar "^3.5.1" 751 | debug "^4.3.1" 752 | esbuild "^0.15.1" 753 | execa "^5.0.0" 754 | globby "^11.0.3" 755 | joycon "^3.0.1" 756 | postcss-load-config "^3.0.1" 757 | resolve-from "^5.0.0" 758 | rollup "^2.74.1" 759 | source-map "0.8.0-beta.0" 760 | sucrase "^3.20.3" 761 | tree-kill "^1.2.2" 762 | 763 | tsx@^3.10.1: 764 | version "3.10.1" 765 | resolved "https://registry.yarnpkg.com/tsx/-/tsx-3.10.1.tgz#4ffb1229077f648bbf46c0ed2b098f4cd8dd6d6d" 766 | integrity sha512-Gh6xoW4xrdnLs6hYZydVHIQtrgmbZ/DbnJoLsYoI8MxhKAIyu8R7RyF0D5qg9UKi74Nmr4iSlijdz7Q43IGLyQ== 767 | dependencies: 768 | "@esbuild-kit/cjs-loader" "^2.4.0" 769 | "@esbuild-kit/core-utils" "^3.0.0" 770 | "@esbuild-kit/esm-loader" "^2.5.0" 771 | optionalDependencies: 772 | fsevents "~2.3.2" 773 | 774 | typescript@^4.8.4: 775 | version "4.8.4" 776 | resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.8.4.tgz#c464abca159669597be5f96b8943500b238e60e6" 777 | integrity sha512-QCh+85mCy+h0IGff8r5XWzOVSbBO+KfeYrMQh7NJ58QujwcE22u+NUSmUxqF+un70P9GXKxa2HCNiTTMJknyjQ== 778 | 779 | webidl-conversions@^4.0.2: 780 | version "4.0.2" 781 | resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-4.0.2.tgz#a855980b1f0b6b359ba1d5d9fb39ae941faa63ad" 782 | integrity sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg== 783 | 784 | whatwg-url@^7.0.0: 785 | version "7.1.0" 786 | resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-7.1.0.tgz#c2c492f1eca612988efd3d2266be1b9fc6170d06" 787 | integrity sha512-WUu7Rg1DroM7oQvGWfOiAK21n74Gg+T4elXEQYkOhtyLeWiJFoOGLXPKI/9gzIie9CtwVLm8wtw6YJdKyxSjeg== 788 | dependencies: 789 | lodash.sortby "^4.7.0" 790 | tr46 "^1.0.1" 791 | webidl-conversions "^4.0.2" 792 | 793 | which@^2.0.1: 794 | version "2.0.2" 795 | resolved "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1" 796 | integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA== 797 | dependencies: 798 | isexe "^2.0.0" 799 | 800 | wrappy@1: 801 | version "1.0.2" 802 | resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" 803 | integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ== 804 | 805 | yaml@^1.10.2: 806 | version "1.10.2" 807 | resolved "https://registry.yarnpkg.com/yaml/-/yaml-1.10.2.tgz#2301c5ffbf12b467de8da2333a459e29e7920e4b" 808 | integrity sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg== 809 | -------------------------------------------------------------------------------- /impl/rs/.format.toml: -------------------------------------------------------------------------------- 1 | format_code_in_doc_comments = true 2 | hex_literal_case = "Upper" 3 | imports_granularity = "Module" 4 | newline_style = "Unix" 5 | normalize_comments = true 6 | normalize_doc_attributes = true 7 | reorder_impl_items = true 8 | group_imports = "StdExternalCrate" 9 | use_field_init_shorthand = true 10 | use_try_shorthand = true 11 | wrap_comments = true 12 | -------------------------------------------------------------------------------- /impl/rs/.gitignore: -------------------------------------------------------------------------------- 1 | target/ -------------------------------------------------------------------------------- /impl/rs/Cargo.lock: -------------------------------------------------------------------------------- 1 | # This file is automatically @generated by Cargo. 2 | # It is not intended for manual editing. 3 | version = 3 4 | 5 | [[package]] 6 | name = "autocfg" 7 | version = "1.1.0" 8 | source = "registry+https://github.com/rust-lang/crates.io-index" 9 | checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" 10 | 11 | [[package]] 12 | name = "bitflags" 13 | version = "1.3.2" 14 | source = "registry+https://github.com/rust-lang/crates.io-index" 15 | checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" 16 | 17 | [[package]] 18 | name = "cc" 19 | version = "1.0.73" 20 | source = "registry+https://github.com/rust-lang/crates.io-index" 21 | checksum = "2fff2a6927b3bb87f9595d67196a70493f627687a71d87a0d692242c33f58c11" 22 | 23 | [[package]] 24 | name = "cfg-if" 25 | version = "1.0.0" 26 | source = "registry+https://github.com/rust-lang/crates.io-index" 27 | checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" 28 | 29 | [[package]] 30 | name = "getrandom" 31 | version = "0.2.7" 32 | source = "registry+https://github.com/rust-lang/crates.io-index" 33 | checksum = "4eb1a864a501629691edf6c15a593b7a51eebaa1e8468e9ddc623de7c9b58ec6" 34 | dependencies = [ 35 | "cfg-if", 36 | "libc", 37 | "wasi", 38 | ] 39 | 40 | [[package]] 41 | name = "hex" 42 | version = "0.4.3" 43 | source = "registry+https://github.com/rust-lang/crates.io-index" 44 | checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" 45 | 46 | [[package]] 47 | name = "libc" 48 | version = "0.2.127" 49 | source = "registry+https://github.com/rust-lang/crates.io-index" 50 | checksum = "505e71a4706fa491e9b1b55f51b95d4037d0821ee40131190475f692b35b009b" 51 | 52 | [[package]] 53 | name = "mac_address" 54 | version = "1.1.3" 55 | source = "registry+https://github.com/rust-lang/crates.io-index" 56 | checksum = "df1d1bc1084549d60725ccc53a2bfa07f67fe4689fda07b05a36531f2988104a" 57 | dependencies = [ 58 | "nix", 59 | "winapi", 60 | ] 61 | 62 | [[package]] 63 | name = "memoffset" 64 | version = "0.6.5" 65 | source = "registry+https://github.com/rust-lang/crates.io-index" 66 | checksum = "5aa361d4faea93603064a027415f07bd8e1d5c88c9fbf68bf56a285428fd79ce" 67 | dependencies = [ 68 | "autocfg", 69 | ] 70 | 71 | [[package]] 72 | name = "nix" 73 | version = "0.23.1" 74 | source = "registry+https://github.com/rust-lang/crates.io-index" 75 | checksum = "9f866317acbd3a240710c63f065ffb1e4fd466259045ccb504130b7f668f35c6" 76 | dependencies = [ 77 | "bitflags", 78 | "cc", 79 | "cfg-if", 80 | "libc", 81 | "memoffset", 82 | ] 83 | 84 | [[package]] 85 | name = "pika" 86 | version = "0.1.3" 87 | dependencies = [ 88 | "hex", 89 | "mac_address", 90 | "rand", 91 | ] 92 | 93 | [[package]] 94 | name = "ppv-lite86" 95 | version = "0.2.16" 96 | source = "registry+https://github.com/rust-lang/crates.io-index" 97 | checksum = "eb9f9e6e233e5c4a35559a617bf40a4ec447db2e84c20b55a6f83167b7e57872" 98 | 99 | [[package]] 100 | name = "rand" 101 | version = "0.8.5" 102 | source = "registry+https://github.com/rust-lang/crates.io-index" 103 | checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" 104 | dependencies = [ 105 | "libc", 106 | "rand_chacha", 107 | "rand_core", 108 | ] 109 | 110 | [[package]] 111 | name = "rand_chacha" 112 | version = "0.3.1" 113 | source = "registry+https://github.com/rust-lang/crates.io-index" 114 | checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" 115 | dependencies = [ 116 | "ppv-lite86", 117 | "rand_core", 118 | ] 119 | 120 | [[package]] 121 | name = "rand_core" 122 | version = "0.6.3" 123 | source = "registry+https://github.com/rust-lang/crates.io-index" 124 | checksum = "d34f1408f55294453790c48b2f1ebbb1c5b4b7563eb1f418bcfcfdbb06ebb4e7" 125 | dependencies = [ 126 | "getrandom", 127 | ] 128 | 129 | [[package]] 130 | name = "wasi" 131 | version = "0.11.0+wasi-snapshot-preview1" 132 | source = "registry+https://github.com/rust-lang/crates.io-index" 133 | checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" 134 | 135 | [[package]] 136 | name = "winapi" 137 | version = "0.3.9" 138 | source = "registry+https://github.com/rust-lang/crates.io-index" 139 | checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" 140 | dependencies = [ 141 | "winapi-i686-pc-windows-gnu", 142 | "winapi-x86_64-pc-windows-gnu", 143 | ] 144 | 145 | [[package]] 146 | name = "winapi-i686-pc-windows-gnu" 147 | version = "0.4.0" 148 | source = "registry+https://github.com/rust-lang/crates.io-index" 149 | checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" 150 | 151 | [[package]] 152 | name = "winapi-x86_64-pc-windows-gnu" 153 | version = "0.4.0" 154 | source = "registry+https://github.com/rust-lang/crates.io-index" 155 | checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" 156 | -------------------------------------------------------------------------------- /impl/rs/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "pika" 3 | version = "0.1.3" 4 | edition = "2021" 5 | readme="./README.md" 6 | description="Pika ID implementation in Rust" 7 | license = "MPL-2.0" 8 | 9 | [dependencies] 10 | hex = "0.4.3" 11 | mac_address = "1.1.3" 12 | rand = "0.8.5" 13 | -------------------------------------------------------------------------------- /impl/rs/README.md: -------------------------------------------------------------------------------- 1 | # pika rs 2 | 3 | Rust implementation for Pika 4 | 5 | ## Install 6 | 7 | ```toml 8 | [dependencies] 9 | pika = "0.1" 10 | ``` 11 | 12 | ## Basic Usage 13 | 14 | ```rs 15 | let prefixes = [ 16 | PrefixRecord { 17 | prefix: "user".to_string(), 18 | description: Some("User ID".to_string()), 19 | secure: false, 20 | }, 21 | PrefixRecord { 22 | prefix: "sk".to_string(), 23 | description: Some("Secret Key".to_string()), 24 | secure: true, 25 | } 26 | ]; 27 | 28 | let mut pika = Pika::new( 29 | prefixes.to_vec(), 30 | InitOptions { 31 | epoch: Some(1_650_153_600_000), 32 | node_id: None, 33 | disable_lowercase: Some(true), 34 | }, 35 | ); 36 | 37 | pika.gen("user").unwrap(); 38 | // => user_Mzc5ODk1NTI4NzgxMTY4NjQ 39 | 40 | pika.gen("sk").unwrap() 41 | // => sk_c19iMGI0NTM4ZjU3ZThjYTIyZThjNjNlMTgwOTg5MWMyM18zODA2NTE5MjcwNDc5NDYyNA 42 | ``` 43 | 44 | ## Node IDs 45 | 46 | By default, Node IDs are calculated by finding the MAC address of the first public network interface device, then calculating the modulo against 1024. 47 | 48 | This works well for smaller systems, but if you have a lot of nodes generating Snowflakes, then collision is possible. In this case, you should create an internal singleton service which keeps a rolling count of the assigned node IDs - from 1 to 1023. Then, services that generate Pikas should call this service to get assigned a node ID. 49 | 50 | You can then pass in the node ID when initializing Pika like this: 51 | 52 | ```rs 53 | let mut pika = Pika::new( 54 | prefixes.to_vec(), 55 | InitOptions { 56 | epoch: Some(1_650_153_600_000), 57 | node_id: custom_node_id, 58 | disable_lowercase: Some(true), 59 | }, 60 | ); 61 | ``` 62 | -------------------------------------------------------------------------------- /impl/rs/src/base64.rs: -------------------------------------------------------------------------------- 1 | const BASE64_CHARS: [u8; 64] = [ 2 | b'A', b'B', b'C', b'D', b'E', b'F', b'G', b'H', b'I', b'J', b'K', b'L', b'M', b'N', b'O', b'P', 3 | b'Q', b'R', b'S', b'T', b'U', b'V', b'W', b'X', b'Y', b'Z', b'a', b'b', b'c', b'd', b'e', b'f', 4 | b'g', b'h', b'i', b'j', b'k', b'l', b'm', b'n', b'o', b'p', b'q', b'r', b's', b't', b'u', b'v', 5 | b'w', b'x', b'y', b'z', b'0', b'1', b'2', b'3', b'4', b'5', b'6', b'7', b'8', b'9', b'+', b'/' 6 | ]; 7 | 8 | // Base64 encode a slice of bytes 9 | pub fn base64_encode(input: String) -> String { 10 | let mut result = String::new(); 11 | let mut buffer: u32 = 0; 12 | let mut bits_left = 0; 13 | 14 | for byte in input.into_bytes() { 15 | buffer = (buffer << 8) | u32::from(byte); 16 | bits_left += 8; 17 | 18 | while bits_left >= 6 { 19 | bits_left -= 6; 20 | let index = ((buffer >> bits_left) & 0b11_1111) as usize; 21 | result.push(BASE64_CHARS[index] as char); 22 | } 23 | } 24 | 25 | if bits_left > 0 { 26 | buffer <<= 6 - bits_left; 27 | let index = (buffer & 0b11_1111) as usize; 28 | result.push(BASE64_CHARS[index] as char); 29 | } 30 | 31 | while result.len() % 4 != 0 { 32 | result.push('='); 33 | } 34 | 35 | result 36 | } 37 | 38 | // Base64 decode a string into a vector of bytes 39 | #[allow(clippy::cast_possible_truncation)] 40 | pub fn base64_decode(encoded: &str) -> Option> { 41 | let mut decoded = Vec::new(); 42 | let mut padding = 0; 43 | let mut buffer = 0; 44 | let mut bits = 0; 45 | 46 | for c in encoded.chars() { 47 | let value = match BASE64_CHARS.iter().position(|&x| x == c as u8) { 48 | Some(v) => v as u32, 49 | None if c == '=' => { 50 | padding += 1; 51 | continue; 52 | } 53 | None => return None, 54 | }; 55 | 56 | buffer = (buffer << 6) | value; 57 | bits += 6; 58 | 59 | if bits >= 8 { 60 | bits -= 8; 61 | decoded.push((buffer >> bits) as u8); 62 | buffer &= (1 << bits) - 1; 63 | } 64 | } 65 | 66 | if bits >= 6 || padding > 2 || (padding > 0 && bits > 0) { 67 | return None; 68 | } 69 | 70 | Some(decoded) 71 | } -------------------------------------------------------------------------------- /impl/rs/src/lib.rs: -------------------------------------------------------------------------------- 1 | #![allow( 2 | clippy::missing_panics_doc, 3 | clippy::missing_errors_doc, 4 | clippy::must_use_candidate, 5 | clippy::module_name_repetitions 6 | )] 7 | 8 | pub mod pika; 9 | pub mod snowflake; 10 | mod base64; 11 | mod utils; 12 | -------------------------------------------------------------------------------- /impl/rs/src/pika.rs: -------------------------------------------------------------------------------- 1 | use std::io::Error; 2 | 3 | use rand::rngs::OsRng; 4 | use rand::RngCore; 5 | 6 | use crate::base64::{base64_decode, base64_encode}; 7 | use crate::snowflake::{self, Snowflake}; 8 | 9 | #[derive(Clone, Debug)] 10 | pub struct PrefixRecord { 11 | pub prefix: String, 12 | pub description: Option, 13 | pub secure: bool, 14 | } 15 | 16 | #[derive(Debug)] 17 | pub struct DecodedPika { 18 | pub prefix: String, 19 | pub tail: String, 20 | pub snowflake: u64, 21 | pub node_id: u32, 22 | pub timestamp: u64, 23 | pub epoch: u64, 24 | pub seq: u64, 25 | pub version: i8, 26 | pub prefix_record: PrefixRecord, 27 | } 28 | 29 | #[derive(Debug, Clone)] 30 | pub struct Pika { 31 | pub prefixes: Vec, 32 | pub epoch: u64, 33 | pub node_id: u32, 34 | pub disable_lowercase: Option, 35 | snowflake: Snowflake, 36 | } 37 | 38 | #[derive(Default)] // default implementation was identical to std::default::Default 39 | pub struct InitOptions { 40 | pub epoch: Option, 41 | pub node_id: Option, 42 | pub disable_lowercase: Option, 43 | } 44 | 45 | pub const DEFAULT_EPOCH: u64 = 1_640_995_200_000; 46 | 47 | impl Pika { 48 | pub fn new(prefixes: Vec, options: &InitOptions) -> Self { 49 | let epoch = options.epoch.unwrap_or(DEFAULT_EPOCH); 50 | let node_id = options.node_id.unwrap_or_else(Self::compute_node_id); 51 | 52 | Self { 53 | prefixes, 54 | epoch, 55 | node_id, 56 | disable_lowercase: options.disable_lowercase, 57 | snowflake: snowflake::Snowflake::new_with_nodeid(epoch, node_id), 58 | } 59 | } 60 | 61 | #[allow(clippy::cast_possible_truncation)] 62 | fn compute_node_id() -> u32 { 63 | let res = mac_address::get_mac_address().unwrap(); 64 | let first_mac = res.unwrap().to_string(); 65 | 66 | let first_mac = u64::from_str_radix(&first_mac.replace(':', ""), 16).unwrap(); 67 | 68 | // should lower the chance of collisions 69 | (first_mac % 1024) as u32 70 | } 71 | 72 | pub fn deconstruct(&self, id: &str) -> DecodedPika { 73 | let s = id.split('_').collect::>(); 74 | 75 | let prefix = s[..s.len() - 1].join("_"); 76 | 77 | let tail = s[s.len() - 1].to_string(); 78 | 79 | let prefix_record = self.prefixes.iter().find(|x| x.prefix == prefix); 80 | 81 | let decoded_tail = base64_decode(&tail).unwrap(); 82 | 83 | let decoded_tail = &String::from_utf8_lossy(&decoded_tail).to_string(); 84 | let sf = decoded_tail.split('_').collect::>(); 85 | let sf = sf[sf.len() - 1].to_string(); 86 | 87 | let snowflake = self.snowflake.decode(&sf); 88 | 89 | DecodedPika { 90 | prefix, 91 | tail, 92 | snowflake: sf.parse::().unwrap(), 93 | node_id: self.node_id, 94 | timestamp: snowflake.timestamp, 95 | epoch: self.epoch, 96 | seq: snowflake.seq, 97 | version: 1, 98 | prefix_record: prefix_record.unwrap().clone(), 99 | } 100 | } 101 | 102 | pub fn gen(&mut self, prefix: &str) -> Result { 103 | let valid_prefix = prefix 104 | .chars() 105 | .all(|c| c.is_ascii_alphanumeric() || c == '_') 106 | && prefix.len() <= 32 107 | && !prefix.is_empty(); 108 | 109 | assert!(valid_prefix, "Invalid prefix: {prefix}"); 110 | 111 | let prefix_record = self.prefixes.iter().find(|x| x.prefix == prefix); 112 | 113 | assert!(prefix_record.is_some(), "Prefix not found: {prefix}"); 114 | 115 | let snowflake = self.snowflake.gen(); 116 | 117 | let id = if prefix_record.unwrap().secure { 118 | let mut bytes = [0u8; 16]; 119 | OsRng.fill_bytes(&mut bytes); 120 | 121 | let hex_string = hex::encode(bytes); 122 | 123 | format!( 124 | "{}_{}", 125 | prefix, 126 | base64_encode(format!("_s_{}_{}", hex_string, snowflake.as_str())) 127 | ) 128 | } else { 129 | format!("{}_{}", prefix, base64_encode(snowflake).replace('=', "")) 130 | }; 131 | 132 | Ok(id) 133 | } 134 | } 135 | 136 | #[cfg(test)] 137 | mod tests { 138 | use super::{InitOptions, Pika, PrefixRecord}; 139 | 140 | #[test] 141 | fn init_pika() { 142 | let prefixes = [ 143 | PrefixRecord { 144 | prefix: "test".to_string(), 145 | description: Some("test".to_string()), 146 | secure: false, 147 | }, 148 | PrefixRecord { 149 | prefix: "s_test".to_string(), 150 | description: Some("test".to_string()), 151 | secure: true, 152 | }, 153 | ]; 154 | 155 | let mut pika = Pika::new( 156 | prefixes.to_vec(), 157 | &InitOptions { 158 | epoch: Some(1_650_153_600_000), 159 | node_id: None, 160 | disable_lowercase: Some(true), 161 | }, 162 | ); 163 | 164 | let id = pika.gen("test").unwrap(); 165 | let deconstructed = pika.deconstruct(&id); 166 | 167 | let s_id = pika.gen("s_test").unwrap(); 168 | 169 | let s_deconstructed = pika.deconstruct(&s_id); 170 | 171 | assert_eq!(deconstructed.node_id, Pika::compute_node_id()); 172 | assert_eq!(deconstructed.seq, 0); 173 | assert_eq!(deconstructed.version, 1); 174 | assert_eq!(deconstructed.epoch, 1_650_153_600_000); 175 | 176 | assert_eq!(s_deconstructed.node_id, Pika::compute_node_id()); 177 | assert_eq!(s_deconstructed.seq, 1); 178 | assert_eq!(s_deconstructed.version, 1); 179 | assert_eq!(s_deconstructed.epoch, 1_650_153_600_000); 180 | } 181 | 182 | #[test] 183 | fn init_pika_with_nodeid() { 184 | let prefixes = [PrefixRecord { 185 | prefix: "test".to_string(), 186 | description: Some("test".to_string()), 187 | secure: false, 188 | }]; 189 | 190 | let mut pika = Pika::new( 191 | prefixes.to_vec(), 192 | &InitOptions { 193 | epoch: Some(1_650_153_600_000), 194 | node_id: Some(622), 195 | disable_lowercase: Some(true), 196 | }, 197 | ); 198 | 199 | let id = pika.gen("test").unwrap(); 200 | let deconstructed = pika.deconstruct(&id); 201 | 202 | assert_eq!(deconstructed.node_id, 622); 203 | assert_eq!(deconstructed.seq, 0); 204 | assert_eq!(deconstructed.version, 1); 205 | assert_eq!(deconstructed.epoch, 1_650_153_600_000); 206 | } 207 | } 208 | -------------------------------------------------------------------------------- /impl/rs/src/snowflake.rs: -------------------------------------------------------------------------------- 1 | use crate::utils::now_timestamp; 2 | 3 | #[derive(Clone, Debug)] 4 | pub struct Snowflake { 5 | epoch: u64, 6 | node_id: u32, 7 | seq: u32, 8 | last_sequence_exhaustion: u64, 9 | } 10 | 11 | #[derive(Clone, Debug)] 12 | pub struct DecodedSnowflake { 13 | pub id: u64, 14 | pub timestamp: u64, 15 | pub node_id: u32, 16 | pub seq: u64, 17 | pub epoch: u64, 18 | } 19 | 20 | impl Snowflake { 21 | pub fn new_with_nodeid(epoch: u64, node_id: u32) -> Self { 22 | Self { 23 | epoch, 24 | node_id, 25 | seq: 0, 26 | last_sequence_exhaustion: 0, 27 | } 28 | } 29 | 30 | #[inline] 31 | pub fn gen(&mut self) -> String { 32 | self.gen_with_ts(now_timestamp()) 33 | } 34 | 35 | pub fn gen_with_ts(&mut self, timestamp: u64) -> String { 36 | if self.seq >= 4095 && timestamp == self.last_sequence_exhaustion { 37 | while now_timestamp() - timestamp < 1 { 38 | continue; 39 | } 40 | } 41 | 42 | let sf = ((timestamp - self.epoch) << 22) 43 | | (u64::from(self.node_id) << 12) 44 | | u64::from(self.seq); 45 | 46 | self.seq = if self.seq >= 4095 { 0 } else { self.seq + 1 }; 47 | 48 | if self.seq == 4095 { 49 | self.last_sequence_exhaustion = timestamp; 50 | } 51 | 52 | sf.to_string() 53 | } 54 | 55 | pub fn decode(&self, sf: &str) -> DecodedSnowflake { 56 | let sf = sf.parse::().unwrap(); 57 | 58 | let timestamp = (sf >> 22) + self.epoch; 59 | let node_id = (sf >> 12) & 0b11_1111_1111; 60 | let seq = sf & 0b1111_1111_1111; 61 | 62 | DecodedSnowflake { 63 | id: sf, 64 | timestamp, 65 | node_id: node_id as u32, 66 | seq, 67 | epoch: self.epoch, 68 | } 69 | } 70 | } 71 | 72 | mod test { 73 | #[test] 74 | fn generate_snowflake() { 75 | // if the node_id >= 1024 it will go to 0? 76 | let mut sf = super::Snowflake::new_with_nodeid(650_153_600_000, 1023); 77 | let snowflake = sf.gen(); 78 | 79 | let deconstruct = sf.decode(snowflake.as_str()); 80 | 81 | assert_eq!(deconstruct.epoch, 650_153_600_000); 82 | assert_eq!(deconstruct.node_id, 1023); 83 | } 84 | 85 | #[test] 86 | fn generate_snowflakes() { 87 | let mut sf = super::Snowflake::new_with_nodeid(650_153_600_000, 1023); 88 | 89 | // when the seq is 4096, the next snowflake will be 0 90 | let snowflakes: Vec = (0..4096).map(|_| sf.gen()).collect(); 91 | let last_snowflake = sf.gen(); 92 | 93 | for (sequence, snowflake) in snowflakes.iter().enumerate() { 94 | let deconstruct = sf.decode(snowflake.as_str()); 95 | 96 | assert_eq!(deconstruct.seq, sequence as u64); 97 | } 98 | 99 | let deconstruct = sf.decode(last_snowflake.as_str()); 100 | assert_eq!(deconstruct.seq, 0); 101 | } 102 | } 103 | -------------------------------------------------------------------------------- /impl/rs/src/utils.rs: -------------------------------------------------------------------------------- 1 | use std::time::{SystemTime, UNIX_EPOCH}; 2 | 3 | #[allow(clippy::cast_possible_truncation)] 4 | pub fn now_timestamp() -> u64 { 5 | SystemTime::now() 6 | .duration_since(UNIX_EPOCH) 7 | .unwrap() 8 | .as_millis() as u64 9 | } 10 | --------------------------------------------------------------------------------