├── .gitmodules ├── .gitattributes ├── .gitignore ├── .github ├── FUNDING.yml ├── dependabot.yml └── workflows │ ├── release.yml │ └── main.yml ├── LICENSE ├── src ├── database │ ├── Json.zig │ ├── Id.zig │ └── Image.zig ├── StringsIntern.zig ├── list │ └── Entry.zig ├── StringIntern.zig ├── List.zig ├── ArgParser.zig ├── aniz.zig └── Database.zig └── README.md /.gitmodules: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | *.zig text eol=lf 2 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .zig-cache 2 | zig-out 3 | -------------------------------------------------------------------------------- /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | github: [Hejsil] 2 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: github-actions 4 | directory: "/" 5 | schedule: 6 | interval: weekly 7 | time: "11:00" 8 | open-pull-requests-limit: 10 9 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release 2 | on: 3 | release: 4 | types: [published] 5 | 6 | permissions: 7 | contents: write 8 | 9 | jobs: 10 | release: 11 | strategy: 12 | matrix: 13 | target: [x86_64-linux-musl] 14 | runs-on: ubuntu-latest 15 | steps: 16 | - uses: actions/checkout@v5 17 | - uses: mlugg/setup-zig@v2 18 | with: 19 | version: master 20 | cache-key: ${{ matrix.target }} 21 | 22 | - run: | 23 | zig build -Doptimize=ReleaseSafe -Dstrip -Dcpu=baseline -Dtarget=${{ matrix.target }} 24 | mv zig-out/bin/aniz zig-out/bin/aniz-${{ matrix.target }} 25 | 26 | - env: 27 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 28 | run: | 29 | gh release upload ${{ github.event.release.tag_name }} zig-out/bin/aniz-${{ matrix.target }} 30 | -------------------------------------------------------------------------------- /.github/workflows/main.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | on: 3 | push: 4 | pull_request: 5 | schedule: 6 | - cron: '0 0 * * 0' 7 | 8 | jobs: 9 | test: 10 | strategy: 11 | matrix: 12 | optimize: [Debug, ReleaseSmall, ReleaseSafe, ReleaseFast] 13 | runs-on: ubuntu-latest 14 | steps: 15 | - uses: actions/checkout@v5 16 | with: 17 | submodules: recursive 18 | - uses: mlugg/setup-zig@v2 19 | with: 20 | version: master 21 | cache-key: ${{ matrix.optimize }} 22 | - run: zig build test -Doptimize=${{ matrix.optimize }} 23 | build: 24 | runs-on: ubuntu-latest 25 | steps: 26 | - uses: actions/checkout@v5 27 | with: 28 | submodules: recursive 29 | - uses: mlugg/setup-zig@v2 30 | with: 31 | version: master 32 | - run: zig build install 33 | lint: 34 | runs-on: ubuntu-latest 35 | steps: 36 | - uses: actions/checkout@v5 37 | - uses: mlugg/setup-zig@v2 38 | with: 39 | version: master 40 | - run: zig fmt --check . 41 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) Jimmi Holst Christensen 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /src/database/Json.zig: -------------------------------------------------------------------------------- 1 | //! The anime-offline-database schema converted to Zig code. Used for parsing the JSON database. 2 | //! See https://github.com/manami-project/anime-offline-database/blob/master/anime-offline-database.schema.json 3 | 4 | @"$schema": []const u8 = "", 5 | license: License, 6 | repository: []const u8, 7 | scoreRange: struct { 8 | minInclusive: f32 = 0.0, 9 | maxInclusive: f32 = 0.0, 10 | } = .{}, 11 | lastUpdate: []const u8, 12 | data: []const Anime, 13 | 14 | pub const License = struct { 15 | name: []const u8, 16 | url: []const u8, 17 | }; 18 | 19 | pub const Anime = struct { 20 | sources: []const []const u8, 21 | title: []const u8, 22 | type: Type, 23 | episodes: u16, 24 | status: Status, 25 | animeSeason: SeasonAndYear, 26 | picture: []const u8, 27 | thumbnail: []const u8, 28 | synonyms: []const []const u8, 29 | relatedAnime: []const []const u8, 30 | tags: []const []const u8, 31 | duration: Duration = .{}, 32 | score: Score = .{}, 33 | 34 | const SeasonAndYear = struct { 35 | season: Season, 36 | year: u16 = 0, 37 | }; 38 | 39 | const Duration = struct { 40 | value: u16 = 0, 41 | unit: Unit = .SECONDS, 42 | 43 | const Unit = enum { 44 | SECONDS, 45 | }; 46 | }; 47 | 48 | const Score = struct { 49 | arithmeticGeometricMean: f32 = 0.0, 50 | arithmeticMean: f32 = 0.0, 51 | median: f32 = 0.0, 52 | }; 53 | }; 54 | 55 | pub const Type = enum { 56 | TV, 57 | MOVIE, 58 | OVA, 59 | ONA, 60 | SPECIAL, 61 | UNKNOWN, 62 | }; 63 | 64 | pub const Status = enum { 65 | FINISHED, 66 | ONGOING, 67 | UPCOMING, 68 | UNKNOWN, 69 | }; 70 | 71 | pub const Season = enum { 72 | SPRING, 73 | SUMMER, 74 | FALL, 75 | WINTER, 76 | UNDEFINED, 77 | }; 78 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # aniz 2 | 3 | aniz is a program for keeping a local list of anime you have watched. 4 | 5 | 6 | ## Example 7 | 8 | ```sh 9 | $ # First, an anime database needs to be downloaded. 10 | $ # Run this once a week to have the latest database at all time. 11 | $ aniz database download 12 | 13 | $ # Search for an anime in the database 14 | $ aniz database -s 'Attack on titan' | head -n1 15 | tv 2013 spring 25 Shingeki no Kyojin https://anidb.net/anime/9541 https://cdn.myanimelist.net/images/anime/10/47347.jpg 16 | 17 | $ # Add one or more animes to your list 18 | $ aniz list plan-to-watch \ 19 | https://anidb.net/anime/9541 \ 20 | https://anilist.co/anime/11981 \ 21 | https://kitsu.io/anime/7929 22 | 23 | $ # Show your list 24 | $ aniz list 25 | 2023-04-19 p 0 0 Shingeki no Kyojin https://anidb.net/anime/9541 26 | 2023-04-19 p 0 0 RWBY https://kitsu.io/anime/7929 27 | 2023-04-19 p 0 0 Mahou Shoujo Madoka★Magica Movie 3: Hangyaku no Monogatari https://anilist.co/anime/11981 28 | 29 | $ # Watch an episode 30 | $ aniz list watch-episode https://kitsu.io/anime/7929 31 | $ aniz list 32 | 2023-04-19 w 1 0 RWBY https://kitsu.io/anime/7929 33 | 2023-04-19 p 0 0 Shingeki no Kyojin https://anidb.net/anime/9541 34 | 2023-04-19 p 0 0 Mahou Shoujo Madoka★Magica Movie 3: Hangyaku no Monogatari https://anilist.co/anime/11981 35 | 36 | $ # Complete show 37 | $ aniz list complete https://anidb.net/anime/9541 38 | $ aniz list 39 | 2023-04-19 w 1 0 RWBY https://kitsu.io/anime/7929 40 | 2023-04-19 p 0 0 Mahou Shoujo Madoka★Magica Movie 3: Hangyaku no Monogatari https://anilist.co/anime/11981 41 | 2023-04-19 c 25 1 Shingeki no Kyojin https://anidb.net/anime/9541 42 | 43 | ``` 44 | 45 | 46 | -------------------------------------------------------------------------------- /src/database/Id.zig: -------------------------------------------------------------------------------- 1 | site: Site, 2 | id: u32, 3 | 4 | pub fn fromUrl(url: []const u8) !Id { 5 | for (std.meta.tags(Site)) |site| { 6 | for (site.urls()) |site_url| { 7 | if (std.mem.startsWith(u8, url, site_url)) { 8 | const id = try std.fmt.parseUnsigned(u32, url[site_url.len..], 10); 9 | return Id{ .site = site, .id = id }; 10 | } 11 | } 12 | } 13 | 14 | return error.InvalidUrl; 15 | } 16 | 17 | pub fn format( 18 | id: Id, 19 | comptime f: []const u8, 20 | options: std.fmt.FormatOptions, 21 | writer: anytype, 22 | ) !void { 23 | _ = f; 24 | _ = options; 25 | return writer.print("{s}{d}", .{ id.site.url(), id.id }); 26 | } 27 | 28 | pub const Site = enum(u8) { 29 | anidb, 30 | anilist, 31 | animecountdown, 32 | anisearch, 33 | kitsu, 34 | livechart, 35 | myanimelist, 36 | simkl, 37 | 38 | pub const all = std.meta.tags(Site); 39 | 40 | pub fn url(site: Site) []const u8 { 41 | return site.urls()[0]; 42 | } 43 | 44 | /// The first url in the slice returned is the current url for that site. Anything after are 45 | /// old or alternative urls for the site. 46 | pub fn urls(site: Site) []const []const u8 { 47 | return switch (site) { 48 | .anidb => &.{"https://anidb.net/anime/"}, 49 | .anilist => &.{"https://anilist.co/anime/"}, 50 | .animecountdown => &.{"https://animecountdown.com/"}, 51 | .anisearch => &.{"https://anisearch.com/anime/"}, 52 | .livechart => &.{"https://livechart.me/anime/"}, 53 | .myanimelist => &.{"https://myanimelist.net/anime/"}, 54 | .simkl => &.{"https://simkl.com/anime/"}, 55 | 56 | .kitsu => &.{ 57 | "https://kitsu.app/anime/", 58 | "https://kitsu.io/anime/", 59 | }, 60 | }; 61 | } 62 | }; 63 | 64 | pub const Optional = enum(u32) { 65 | none = std.math.maxInt(u32), 66 | _, 67 | 68 | pub fn unwrap(id: Optional) ?u32 { 69 | if (id == .none) 70 | return null; 71 | return @intFromEnum(id); 72 | } 73 | }; 74 | 75 | const Id = @This(); 76 | 77 | const std = @import("std"); 78 | -------------------------------------------------------------------------------- /src/StringsIntern.zig: -------------------------------------------------------------------------------- 1 | //! Datastructure for interning slices of already interned strings. 2 | 3 | dedupe: std.ArrayHashMapUnmanaged(Span, void, SpanContext, true) = .{}, 4 | data: std.ArrayListUnmanaged(StringIntern.Index) = .{}, 5 | 6 | pub fn deinit(intern: *StringsIntern, allocator: std.mem.Allocator) void { 7 | intern.dedupe.deinit(allocator); 8 | intern.data.deinit(allocator); 9 | } 10 | 11 | pub fn put(intern: *StringsIntern, allocator: std.mem.Allocator, strings: []const StringIntern.Index) !Span { 12 | if (strings.len == 0) 13 | return Span{ .index = 0, .len = 0 }; 14 | 15 | try intern.data.ensureUnusedCapacity(allocator, strings.len); 16 | 17 | const key_ctx = SliceToSpanContext{ .intern = intern }; 18 | const ctx = SpanContext{ .intern = intern }; 19 | const entry = try intern.dedupe.getOrPutContextAdapted(allocator, strings, key_ctx, ctx); 20 | if (entry.found_existing) 21 | return entry.key_ptr.*; 22 | 23 | const index = std.math.cast(u32, intern.data.items.len) orelse return error.OutOfMemory; 24 | const len = std.math.cast(u32, strings.len) orelse return error.OutOfMemory; 25 | const res = Span{ .index = index, .len = len }; 26 | entry.key_ptr.* = res; 27 | 28 | intern.data.appendSliceAssumeCapacity(strings); 29 | 30 | return res; 31 | } 32 | 33 | pub const Span = extern struct { 34 | index: u32, 35 | len: u32, 36 | 37 | pub fn slice(span: @This(), data: []const StringIntern.Index) []const StringIntern.Index { 38 | return data[span.index..][0..span.len]; 39 | } 40 | }; 41 | 42 | const SliceToSpanContext = struct { 43 | intern: *StringsIntern, 44 | 45 | pub fn eql(ctx: SliceToSpanContext, a: []const StringIntern.Index, b: Span, b_index: usize) bool { 46 | _ = b_index; 47 | const b_slice = b.slice(ctx.intern.data.items); 48 | return std.mem.eql(StringIntern.Index, a, b_slice); 49 | } 50 | 51 | pub fn hash(ctx: SliceToSpanContext, s: []const StringIntern.Index) u32 { 52 | _ = ctx; 53 | return hashStringIndexs(s); 54 | } 55 | }; 56 | 57 | const SpanContext = struct { 58 | intern: *StringsIntern, 59 | 60 | pub fn eql(ctx: SpanContext, a: Span, b: Span, b_index: usize) bool { 61 | _ = ctx; 62 | _ = b_index; 63 | return std.meta.eql(a, b); 64 | } 65 | 66 | pub fn hash(ctx: SpanContext, key: Span) u32 { 67 | const slice = key.slice(ctx.intern.data.items); 68 | return hashStringIndexs(slice); 69 | } 70 | }; 71 | 72 | fn hashStringIndexs(slice: []const StringIntern.Index) u32 { 73 | const bytes = std.mem.sliceAsBytes(slice); 74 | return @as(u32, @truncate(std.hash.Wyhash.hash(0, bytes))); 75 | } 76 | 77 | test { 78 | _ = StringIntern; 79 | } 80 | 81 | const StringsIntern = @This(); 82 | 83 | const StringIntern = @import("StringIntern.zig"); 84 | 85 | const std = @import("std"); 86 | -------------------------------------------------------------------------------- /src/database/Image.zig: -------------------------------------------------------------------------------- 1 | //! Image urls in the anime-offline-database all follow a similar pattern. They all start with a 2 | //! base url and then have a path that is unique to the image. To save on memory, we only store 3 | //! the path of the image url and then an enum that represents the base url. 4 | 5 | base: Base, 6 | path: []const u8, 7 | 8 | pub fn fromUrl(str: []const u8) !Image { 9 | const base = try Base.fromUrl(str); 10 | return Image{ .base = base, .path = str[base.url().len..] }; 11 | } 12 | 13 | pub fn format( 14 | image: Image, 15 | comptime f: []const u8, 16 | options: std.fmt.FormatOptions, 17 | writer: anytype, 18 | ) !void { 19 | _ = f; 20 | _ = options; 21 | return writer.print("{s}{s}", .{ image.base.url(), image.path }); 22 | } 23 | 24 | pub const Base = enum(u8) { 25 | anidb, 26 | anilist, 27 | animeplanet1, 28 | animeplanet2, 29 | anisearch1, 30 | anisearch2, 31 | anisearch3, 32 | kitsu1, 33 | kitsu2, 34 | kitsu3, 35 | livechart, 36 | myanimelist1, 37 | myanimelist2, 38 | no_pic1, 39 | no_pic2, 40 | notifymoe1, 41 | notifymoe2, 42 | simkl, 43 | 44 | pub fn fromUrl(str: []const u8) !Image.Base { 45 | for (std.meta.tags(Image.Base)) |base| { 46 | const base_url = base.url(); 47 | if (std.mem.startsWith(u8, str, base_url)) 48 | return base; 49 | } 50 | 51 | return error.InvalidUrl; 52 | } 53 | 54 | pub fn url(base: Image.Base) []const u8 { 55 | return switch (base) { 56 | .anidb => "https://cdn.anidb.net/images/main/", 57 | .anilist => "https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/", 58 | .animeplanet1 => "https://cdn.anime-planet.com/images/anime/default/", 59 | .animeplanet2 => "https://cdn.anime-planet.com/anime/primary/", 60 | .anisearch1 => "https://cdn.anisearch.com/images/anime/cover/full/", 61 | .anisearch2 => "https://cdn.anisearch.com/images/anime/cover/", 62 | .anisearch3 => "https://www.anisearch.com/images/anime/cover/", 63 | .kitsu1 => "https://media.kitsu.io/anime/poster_images/", 64 | .kitsu2 => "https://media.kitsu.io/anime/", 65 | .kitsu3 => "https://media.kitsu.app/anime/", 66 | .livechart => "https://u.livechart.me/anime/", 67 | .myanimelist1 => "https://cdn.myanimelist.net/images/anime/", 68 | .myanimelist2 => "https://cdn.myanimelist.net/images/", 69 | .no_pic1 => "https://raw.githubusercontent.com/manami-project/anime-offline-database/master/pics/no_pic.png", 70 | .no_pic2 => "https://raw.githubusercontent.com/manami-project/anime-offline-database/master/pics/no_pic_thumbnail.png", 71 | .notifymoe1 => "https://media.notify.moe/images/anime/large/", 72 | .notifymoe2 => "https://media.notify.moe/images/anime/small/", 73 | .simkl => "https://simkl.in/posters/", 74 | }; 75 | } 76 | }; 77 | 78 | const Image = @This(); 79 | 80 | const std = @import("std"); 81 | -------------------------------------------------------------------------------- /src/list/Entry.zig: -------------------------------------------------------------------------------- 1 | date: datetime.datetime.Date, 2 | status: Status, 3 | episodes: u16, 4 | watched: u16, 5 | title: StringIntern.Index, 6 | id: Id, 7 | 8 | pub const Status = enum(u3) { 9 | complete, 10 | dropped, 11 | on_hold, 12 | plan_to_watch, 13 | watching, 14 | 15 | pub fn fromString(str: []const u8) ?Status { 16 | const map = comptime std.StaticStringMap(Status).initComptime(.{ 17 | .{ "c", .complete }, 18 | .{ "d", .dropped }, 19 | .{ "o", .on_hold }, 20 | .{ "p", .plan_to_watch }, 21 | .{ "w", .watching }, 22 | }); 23 | return map.get(str); 24 | } 25 | 26 | pub fn toString(s: Status) []const u8 { 27 | return switch (s) { 28 | .complete => "c", 29 | .dropped => "d", 30 | .on_hold => "o", 31 | .plan_to_watch => "p", 32 | .watching => "w", 33 | }; 34 | } 35 | }; 36 | 37 | pub fn lessThan(a: Entry, b: Entry, strings: [:0]const u8) bool { 38 | switch (a.date.cmp(b.date)) { 39 | .lt => return true, 40 | .gt => return false, 41 | .eq => {}, 42 | } 43 | switch (std.math.order(@intFromEnum(a.status), @intFromEnum(b.status))) { 44 | .lt => return true, 45 | .gt => return false, 46 | .eq => {}, 47 | } 48 | switch (std.math.order(a.episodes, b.episodes)) { 49 | .lt => return true, 50 | .gt => return false, 51 | .eq => {}, 52 | } 53 | switch (std.math.order(a.watched, b.watched)) { 54 | .lt => return true, 55 | .gt => return false, 56 | .eq => {}, 57 | } 58 | switch (std.mem.order(u8, a.title.slice(strings), b.title.slice(strings))) { 59 | .lt => return true, 60 | .gt => return false, 61 | .eq => {}, 62 | } 63 | switch (std.math.order(@intFromEnum(a.id.site), @intFromEnum(b.id.site))) { 64 | .lt => return true, 65 | .gt => return false, 66 | .eq => {}, 67 | } 68 | switch (std.math.order(a.id.id, b.id.id)) { 69 | .lt => return true, 70 | .gt => return false, 71 | .eq => {}, 72 | } 73 | return false; 74 | } 75 | 76 | pub fn serializeToTsv(entry: Entry, strings: [:0]const u8, writer: anytype) !void { 77 | try writer.print("{d:4>2}-{d:0>2}-{d:0>2}\t{s}\t{}\t{}\t{s}\t{s}{d}", .{ 78 | entry.date.year, 79 | entry.date.month, 80 | entry.date.day, 81 | entry.status.toString(), 82 | entry.episodes, 83 | entry.watched, 84 | entry.title.slice(strings), 85 | entry.id.site.url(), 86 | entry.id.id, 87 | }); 88 | } 89 | 90 | test { 91 | _ = Database; 92 | _ = StringIntern; 93 | } 94 | 95 | const Entry = @This(); 96 | 97 | const Id = Database.Id; 98 | 99 | const Database = @import("../Database.zig"); 100 | const StringIntern = @import("../StringIntern.zig"); 101 | 102 | const datetime = @import("datetime"); 103 | const std = @import("std"); 104 | -------------------------------------------------------------------------------- /src/StringIntern.zig: -------------------------------------------------------------------------------- 1 | //! Datastructure for interning strings. Interned strings are stored in the `StringIntern` and can be accessed by their 2 | //! index. Strings are stored null terminated. The index 0 is reserved for the empty string. 3 | 4 | dedupe: std.ArrayHashMapUnmanaged(Index, void, IndexContext, true), 5 | data: std.ArrayListUnmanaged(u8), 6 | 7 | pub fn init(allocator: std.mem.Allocator) !StringIntern { 8 | var data = std.ArrayListUnmanaged(u8){}; 9 | errdefer data.deinit(allocator); 10 | 11 | // Ensure there is at least one null byte at the start of the data. We can point empty strings here. 12 | try data.append(allocator, 0); 13 | 14 | return .{ .data = data, .dedupe = .{} }; 15 | } 16 | 17 | pub fn deinit(intern: *StringIntern, allocator: std.mem.Allocator) void { 18 | intern.dedupe.deinit(allocator); 19 | intern.data.deinit(allocator); 20 | } 21 | 22 | pub fn sliceZ(intern: StringIntern) [:0]const u8 { 23 | return intern.data.items[0 .. intern.data.items.len - 1 :0]; 24 | } 25 | 26 | pub fn put(intern: *StringIntern, allocator: std.mem.Allocator, string: []const u8) !Index { 27 | if (string.len == 0) 28 | return @enumFromInt(0); 29 | 30 | try intern.data.ensureUnusedCapacity(allocator, string.len + 1); 31 | 32 | const key_ctx = StringToIndexContext{ .intern = intern }; 33 | const ctx = IndexContext{ .intern = intern }; 34 | const entry = try intern.dedupe.getOrPutContextAdapted(allocator, string, key_ctx, ctx); 35 | if (entry.found_existing) 36 | return entry.key_ptr.*; 37 | 38 | const index = std.math.cast(u32, intern.data.items.len) orelse return error.OutOfMemory; 39 | const res: Index = @enumFromInt(index); 40 | entry.key_ptr.* = res; 41 | 42 | intern.data.appendSliceAssumeCapacity(string); 43 | intern.data.appendAssumeCapacity(0); 44 | 45 | return res; 46 | } 47 | 48 | pub const Index = enum(u32) { 49 | empty = 0, 50 | _, 51 | 52 | pub fn slice(index: @This(), data: [:0]const u8) [:0]const u8 { 53 | return std.mem.sliceTo(index.ptr(data), 0); 54 | } 55 | 56 | pub fn ptr(index: @This(), data: [:0]const u8) [*:0]const u8 { 57 | return data[@intFromEnum(index)..].ptr; 58 | } 59 | }; 60 | 61 | const StringToIndexContext = struct { 62 | intern: *StringIntern, 63 | 64 | pub fn eql(ctx: StringToIndexContext, a: []const u8, b: Index, b_index: usize) bool { 65 | _ = b_index; 66 | const b_srt = b.slice(ctx.intern.data.items[0 .. ctx.intern.data.items.len - 1 :0]); 67 | return std.mem.eql(u8, a, b_srt); 68 | } 69 | 70 | pub fn hash(ctx: StringToIndexContext, s: []const u8) u32 { 71 | _ = ctx; 72 | return std.array_hash_map.hashString(s); 73 | } 74 | }; 75 | 76 | const IndexContext = struct { 77 | intern: *StringIntern, 78 | 79 | pub fn eql(ctx: IndexContext, a: Index, b: Index, b_index: usize) bool { 80 | _ = ctx; 81 | _ = b_index; 82 | return a == b; 83 | } 84 | 85 | pub fn hash(ctx: IndexContext, key: Index) u32 { 86 | const str = key.slice(ctx.intern.sliceZ()); 87 | return std.array_hash_map.hashString(str); 88 | } 89 | }; 90 | 91 | const StringIntern = @This(); 92 | 93 | const std = @import("std"); 94 | -------------------------------------------------------------------------------- /src/List.zig: -------------------------------------------------------------------------------- 1 | //! The list of animes the user is tracking. Stored in a TSV file. 2 | 3 | intern: StringIntern, 4 | entries: std.ArrayListUnmanaged(Entry), 5 | 6 | pub fn init(allocator: std.mem.Allocator) !List { 7 | return .{ 8 | .intern = try StringIntern.init(allocator), 9 | .entries = .{}, 10 | }; 11 | } 12 | 13 | pub fn deinit(list: *List, allocator: std.mem.Allocator) void { 14 | list.intern.deinit(allocator); 15 | list.entries.deinit(allocator); 16 | } 17 | 18 | pub fn deserializeFromTsv(allocator: std.mem.Allocator, csv: []const u8) !List { 19 | var list = try List.init(allocator); 20 | errdefer list.deinit(allocator); 21 | 22 | var lines = std.mem.tokenizeScalar(u8, csv, '\n'); 23 | while (lines.next()) |line| { 24 | var fields = std.mem.splitScalar(u8, line, '\t'); 25 | const date_str = fields.first(); 26 | const status_str = fields.next() orelse return error.Invalid; 27 | const episodes_str = fields.next() orelse return error.Invalid; 28 | const watched_str = fields.next() orelse return error.Invalid; 29 | const title = fields.next() orelse return error.Invalid; 30 | const id_str = fields.next() orelse return error.Invalid; 31 | if (fields.next() != null) 32 | return error.Invalid; 33 | 34 | try list.entries.append(allocator, .{ 35 | .date = try datetime.datetime.Date.parseIso(date_str), 36 | .status = Entry.Status.fromString(status_str) orelse return error.Invalid, 37 | .episodes = try std.fmt.parseUnsigned(u16, episodes_str, 0), 38 | .watched = try std.fmt.parseUnsigned(u16, watched_str, 0), 39 | .title = try list.intern.put(allocator, title), 40 | .id = try Id.fromUrl(id_str), 41 | }); 42 | } 43 | 44 | return list; 45 | } 46 | 47 | pub fn serializeToTsv(list: List, writer: anytype) !void { 48 | for (list.entries.items) |entry| { 49 | try entry.serializeToTsv(list.intern.sliceZ(), writer); 50 | try writer.writeAll("\n"); 51 | } 52 | } 53 | 54 | pub fn addEntry(list: *List, allocator: std.mem.Allocator, id: Id, title: []const u8) !*Entry { 55 | const entry = list.find(id) orelse blk: { 56 | const entry = try list.entries.addOne(allocator); 57 | entry.* = .{ 58 | .date = datetime.datetime.Date.now(), 59 | .status = .watching, 60 | .episodes = 0, 61 | .watched = 0, 62 | .title = .empty, 63 | .id = id, 64 | }; 65 | break :blk entry; 66 | }; 67 | 68 | // Always update the entry to have newest link id and title. 69 | entry.id = id; 70 | entry.title = try list.intern.put(allocator, title); 71 | 72 | return entry; 73 | } 74 | 75 | pub fn find(list: List, id: Id) ?*Entry { 76 | for (list.entries.items) |*entry| { 77 | if (entry.id.id == id.id and entry.id.site == id.site) 78 | return entry; 79 | } 80 | 81 | return null; 82 | } 83 | 84 | pub fn sort(list: List) void { 85 | std.mem.sort(Entry, list.entries.items, list.intern.sliceZ(), struct { 86 | fn lessThan(strings: [:0]const u8, a: Entry, b: Entry) bool { 87 | // Sort from biggest datetime to smallest. Most people would expect that the 88 | // newest entry they've seen is then one that ends up at the top of their list 89 | return b.lessThan(a, strings); 90 | } 91 | }.lessThan); 92 | } 93 | 94 | fn testTransform(input: []const u8, expected_output: []const u8) !void { 95 | var list = try deserializeFromTsv(std.testing.allocator, input); 96 | defer list.deinit(std.testing.allocator); 97 | 98 | var actual_output = std.ArrayList(u8).init(std.testing.allocator); 99 | defer actual_output.deinit(); 100 | try list.serializeToTsv(actual_output.writer()); 101 | 102 | try std.testing.expectEqualStrings(expected_output, actual_output.items); 103 | } 104 | 105 | fn testCanonical(input: []const u8) !void { 106 | return testTransform(input, input); 107 | } 108 | 109 | test "tsv" { 110 | try testCanonical( 111 | "2000-10-10\tw\t12\t10\tMahou Shoujo Madoka★Magica\thttps://anidb.net/anime/8069\n", 112 | ); 113 | } 114 | 115 | test { 116 | _ = Entry; 117 | _ = Database; 118 | _ = StringIntern; 119 | } 120 | 121 | const List = @This(); 122 | 123 | const Id = Database.Id; 124 | 125 | pub const Entry = @import("list/Entry.zig"); 126 | 127 | const Database = @import("Database.zig"); 128 | const StringIntern = @import("StringIntern.zig"); 129 | 130 | const datetime = @import("datetime"); 131 | const std = @import("std"); 132 | -------------------------------------------------------------------------------- /src/ArgParser.zig: -------------------------------------------------------------------------------- 1 | args: []const []const u8, 2 | index: usize = 0, 3 | 4 | consumed: bool = false, 5 | 6 | pub fn next(parser: *ArgParser) bool { 7 | parser.consumed = parser.index >= parser.args.len; 8 | return !parser.consumed; 9 | } 10 | 11 | pub fn flag(parser: *ArgParser, names: []const []const u8) bool { 12 | if (parser.consumed) 13 | return false; 14 | 15 | for (names) |name| { 16 | if (!std.mem.eql(u8, parser.args[parser.index], name)) 17 | continue; 18 | 19 | parser.consumed = true; 20 | parser.index += 1; 21 | return true; 22 | } 23 | 24 | return false; 25 | } 26 | 27 | pub fn option(parser: *ArgParser, names: []const []const u8) ?[]const u8 { 28 | if (parser.consumed) 29 | return null; 30 | 31 | const arg = parser.args[parser.index]; 32 | for (names) |name| { 33 | if (!std.mem.startsWith(u8, arg, name)) 34 | continue; 35 | if (!std.mem.startsWith(u8, arg[name.len..], "=")) 36 | continue; 37 | 38 | parser.consumed = true; 39 | parser.index += 1; 40 | return arg[name.len + 1 ..]; 41 | } 42 | 43 | if (parser.index + 1 < parser.args.len) { 44 | if (parser.flag(names)) 45 | return parser.eat(); 46 | } 47 | 48 | return null; 49 | } 50 | 51 | pub fn positional(parser: *ArgParser) ?[]const u8 { 52 | if (parser.consumed) 53 | return null; 54 | 55 | return parser.eat(); 56 | } 57 | 58 | fn eat(parser: *ArgParser) []const u8 { 59 | defer parser.index += 1; 60 | return parser.args[parser.index]; 61 | } 62 | 63 | test flag { 64 | var parser = ArgParser{ .args = &.{ 65 | "-a", "--beta", "command", 66 | } }; 67 | 68 | try std.testing.expect(parser.flag(&.{ "-a", "--alpha" })); 69 | try std.testing.expect(!parser.flag(&.{ "-b", "--beta" })); 70 | try std.testing.expect(!parser.flag(&.{"command"})); 71 | 72 | try std.testing.expect(parser.next()); 73 | try std.testing.expect(!parser.flag(&.{ "-a", "--alpha" })); 74 | try std.testing.expect(parser.flag(&.{ "-b", "--beta" })); 75 | try std.testing.expect(!parser.flag(&.{"command"})); 76 | 77 | try std.testing.expect(parser.next()); 78 | try std.testing.expect(!parser.flag(&.{ "-a", "--alpha" })); 79 | try std.testing.expect(!parser.flag(&.{ "-b", "--beta" })); 80 | try std.testing.expect(parser.flag(&.{"command"})); 81 | 82 | try std.testing.expect(!parser.next()); 83 | try std.testing.expect(!parser.flag(&.{ "-a", "--alpha" })); 84 | try std.testing.expect(!parser.flag(&.{ "-b", "--beta" })); 85 | try std.testing.expect(!parser.flag(&.{"command"})); 86 | } 87 | 88 | fn expectEqualOptionalString(m_expect: ?[]const u8, m_actual: ?[]const u8) !void { 89 | if (m_expect) |expect| { 90 | try std.testing.expect(m_actual != null); 91 | try std.testing.expectEqualStrings(expect, m_actual.?); 92 | } else { 93 | try std.testing.expect(m_actual == null); 94 | } 95 | } 96 | 97 | test option { 98 | var parser = ArgParser{ .args = &.{ 99 | "-a", 100 | "a_value", 101 | "--beta=b_value", 102 | "command", 103 | "command_value", 104 | } }; 105 | 106 | try expectEqualOptionalString("a_value", parser.option(&.{ "-a", "--alpha" })); 107 | try expectEqualOptionalString(null, parser.option(&.{ "-b", "--beta" })); 108 | try expectEqualOptionalString(null, parser.option(&.{"command"})); 109 | 110 | try std.testing.expect(parser.next()); 111 | try expectEqualOptionalString(null, parser.option(&.{ "-a", "--alpha" })); 112 | try expectEqualOptionalString("b_value", parser.option(&.{ "-b", "--beta" })); 113 | try expectEqualOptionalString(null, parser.option(&.{"command"})); 114 | 115 | try std.testing.expect(parser.next()); 116 | try expectEqualOptionalString(null, parser.option(&.{ "-a", "--alpha" })); 117 | try expectEqualOptionalString(null, parser.option(&.{ "-b", "--beta" })); 118 | try expectEqualOptionalString("command_value", parser.option(&.{"command"})); 119 | 120 | try std.testing.expect(!parser.next()); 121 | try expectEqualOptionalString(null, parser.option(&.{ "-a", "--alpha" })); 122 | try expectEqualOptionalString(null, parser.option(&.{ "-b", "--beta" })); 123 | try expectEqualOptionalString(null, parser.option(&.{"command"})); 124 | } 125 | 126 | test positional { 127 | var parser = ArgParser{ .args = &.{ 128 | "-a", 129 | "--beta", 130 | "command", 131 | } }; 132 | 133 | try expectEqualOptionalString("-a", parser.positional()); 134 | try std.testing.expect(parser.next()); 135 | try expectEqualOptionalString("--beta", parser.positional()); 136 | try std.testing.expect(parser.next()); 137 | try expectEqualOptionalString("command", parser.positional()); 138 | try std.testing.expect(!parser.next()); 139 | try expectEqualOptionalString(null, parser.positional()); 140 | } 141 | 142 | test "all" { 143 | var parser = ArgParser{ .args = &.{ 144 | "-a", 145 | "--beta", 146 | "b_value", 147 | "-c=c_value", 148 | "command", 149 | } }; 150 | 151 | try std.testing.expect(parser.flag(&.{ "-a", "--alpha" })); 152 | try expectEqualOptionalString(null, parser.option(&.{ "-b", "--beta" })); 153 | try expectEqualOptionalString(null, parser.option(&.{ "-c", "--center" })); 154 | try expectEqualOptionalString(null, parser.positional()); 155 | 156 | try std.testing.expect(parser.next()); 157 | try std.testing.expect(!parser.flag(&.{ "-a", "--alpha" })); 158 | try expectEqualOptionalString("b_value", parser.option(&.{ "-b", "--beta" })); 159 | try expectEqualOptionalString(null, parser.option(&.{ "-c", "--center" })); 160 | try expectEqualOptionalString(null, parser.positional()); 161 | 162 | try std.testing.expect(parser.next()); 163 | try std.testing.expect(!parser.flag(&.{ "-a", "--alpha" })); 164 | try expectEqualOptionalString(null, parser.option(&.{ "-b", "--beta" })); 165 | try expectEqualOptionalString("c_value", parser.option(&.{ "-c", "--center" })); 166 | try expectEqualOptionalString(null, parser.positional()); 167 | 168 | try std.testing.expect(parser.next()); 169 | try std.testing.expect(!parser.flag(&.{ "-a", "--alpha" })); 170 | try expectEqualOptionalString(null, parser.option(&.{ "-b", "--beta" })); 171 | try expectEqualOptionalString(null, parser.option(&.{ "-c", "--center" })); 172 | try expectEqualOptionalString("command", parser.positional()); 173 | 174 | try std.testing.expect(!parser.next()); 175 | try std.testing.expect(!parser.flag(&.{ "-a", "--alpha" })); 176 | try expectEqualOptionalString(null, parser.option(&.{ "-b", "--beta" })); 177 | try expectEqualOptionalString(null, parser.option(&.{ "-c", "--center" })); 178 | try expectEqualOptionalString(null, parser.positional()); 179 | } 180 | 181 | const ArgParser = @This(); 182 | 183 | const std = @import("std"); 184 | -------------------------------------------------------------------------------- /src/aniz.zig: -------------------------------------------------------------------------------- 1 | pub fn main() !void { 2 | var gpa_state = std.heap.GeneralPurposeAllocator(.{}){}; 3 | const gpa = gpa_state.allocator(); 4 | defer _ = gpa_state.deinit(); 5 | 6 | const args = try std.process.argsAlloc(gpa); 7 | defer std.process.argsFree(gpa, args); 8 | 9 | var stdout_buffered = std.io.bufferedWriter(std.io.getStdOut().writer()); 10 | const stdout = stdout_buffered.writer(); 11 | 12 | try mainFull(.{ 13 | .allocator = gpa, 14 | .args = args[1..], 15 | .stdout = stdout.any(), 16 | }); 17 | 18 | return stdout_buffered.flush(); 19 | } 20 | 21 | pub fn mainFull(options: struct { 22 | allocator: std.mem.Allocator, 23 | args: []const []const u8, 24 | stdout: std.io.AnyWriter, 25 | }) !void { 26 | var program = Program{ 27 | .allocator = options.allocator, 28 | .args = .{ .args = options.args }, 29 | .stdout = options.stdout, 30 | }; 31 | 32 | return program.mainCommand(); 33 | } 34 | 35 | const Program = @This(); 36 | 37 | allocator: std.mem.Allocator, 38 | args: ArgParser, 39 | stdout: std.io.AnyWriter, 40 | 41 | const main_usage = 42 | \\Usage: aniz [command] [args] 43 | \\ 44 | \\Commands: 45 | \\ database 46 | \\ list 47 | \\ help Display this message 48 | \\ 49 | ; 50 | 51 | pub fn mainCommand(program: *Program) !void { 52 | while (program.args.next()) { 53 | if (program.args.flag(&.{"database"})) 54 | return program.databaseSubCommand(); 55 | if (program.args.flag(&.{"list"})) 56 | return program.listSubCommand(); 57 | if (program.args.flag(&.{ "-h", "--help", "help" })) 58 | return program.stdout.writeAll(main_usage); 59 | if (program.args.positional()) |_| { 60 | try std.io.getStdErr().writeAll(main_usage); 61 | return error.InvalidArgument; 62 | } 63 | } 64 | 65 | try std.io.getStdErr().writeAll(main_usage); 66 | return error.InvalidArgument; 67 | } 68 | 69 | const database_sub_usage = 70 | \\Usage: 71 | \\ aniz database [command] [args] 72 | \\ aniz database [options] [ids]... 73 | \\ 74 | \\Commands: 75 | \\ download Download newest version of the database 76 | \\ help Display this message 77 | \\ [ids]... 78 | \\ 79 | ; 80 | 81 | fn databaseSubCommand(program: *Program) !void { 82 | if (program.args.next()) { 83 | if (program.args.flag(&.{"download"})) 84 | return program.databaseDownloadCommand(); 85 | if (program.args.flag(&.{ "-h", "--help", "help" })) 86 | return program.stdout.writeAll(database_sub_usage); 87 | } 88 | 89 | return program.databaseCommand(); 90 | } 91 | 92 | fn databaseCommand(program: *Program) !void { 93 | var m_search: ?[]const u8 = null; 94 | var ids = std.AutoArrayHashMap(Database.Id, void).init(program.allocator); 95 | defer ids.deinit(); 96 | 97 | while (program.args.next()) { 98 | if (program.args.option(&.{ "-s", "--search" })) |search| 99 | m_search = search; 100 | if (program.args.flag(&.{ "-h", "--help", "help" })) 101 | return program.stdout.writeAll(database_sub_usage); 102 | if (program.args.positional()) |url| { 103 | const id = try Database.Id.fromUrl(url); 104 | try ids.put(id, {}); 105 | } 106 | } 107 | 108 | var db = loadDatabase(program.allocator) catch |err| switch (err) { 109 | error.FileNotFound => { 110 | std.log.err("error: Database doesn't exists. " ++ 111 | "Use `aniz database download` to get it", .{}); 112 | return error.DatabaseDoesntExist; 113 | }, 114 | else => |e| return e, 115 | }; 116 | defer db.deinit(program.allocator); 117 | 118 | if (ids.count() == 0 and m_search == null) { 119 | // Fast path if no ids or search is provided. 120 | for (db.entries) |entry| { 121 | try entry.serializeToDsv(db.strings, program.stdout); 122 | try program.stdout.writeAll("\n"); 123 | } 124 | return; 125 | } 126 | 127 | var entries_to_print = std.ArrayList(Database.Entry).init(program.allocator); 128 | defer entries_to_print.deinit(); 129 | 130 | try db.filterEntries(&entries_to_print, .{ 131 | .search = m_search, 132 | .ids = if (ids.count() == 0) null else ids.keys(), 133 | }); 134 | 135 | for (entries_to_print.items) |entry| { 136 | try entry.serializeToDsv(db.strings, program.stdout); 137 | try program.stdout.writeAll("\n"); 138 | } 139 | } 140 | 141 | const database_download_usage = 142 | \\Usage: aniz download 143 | \\ 144 | ; 145 | 146 | fn databaseDownloadCommand(program: *Program) !void { 147 | while (program.args.next()) { 148 | if (program.args.flag(&.{ "-h", "--help", "help" })) 149 | return program.stdout.writeAll(database_download_usage); 150 | if (program.args.positional()) |_| { 151 | try std.io.getStdErr().writeAll(database_download_usage); 152 | return error.InvalidArgument; 153 | } 154 | } 155 | 156 | var http_client = std.http.Client{ .allocator = program.allocator }; 157 | defer http_client.deinit(); 158 | 159 | var data_dir = try openFolder(.cache, .{}); 160 | defer data_dir.close(); 161 | 162 | const database_json_file = try data_dir.createFile(database_json_name, .{ .read = true }); 163 | defer database_json_file.close(); 164 | 165 | const database_json_url = "https://raw.githubusercontent.com/manami-project/anime-offline-database/master/anime-offline-database-minified.json"; 166 | try download(&http_client, database_json_url, database_json_file.writer()); 167 | 168 | try database_json_file.seekTo(0); 169 | const database_json = try database_json_file.readToEndAlloc(program.allocator, std.math.maxInt(usize)); 170 | defer program.allocator.free(database_json); 171 | 172 | var db = try Database.deserializeFromJson(program.allocator, database_json); 173 | defer db.deinit(program.allocator); 174 | 175 | const database_bin_file = try data_dir.createFile(database_bin_name, .{}); 176 | defer database_bin_file.close(); 177 | 178 | var buffered_writer = std.io.bufferedWriter(database_bin_file.writer()); 179 | try db.serializeToBinary(buffered_writer.writer()); 180 | try buffered_writer.flush(); 181 | } 182 | 183 | const list_sub_usage = 184 | \\Usage: 185 | \\ aniz list [command] [args] 186 | \\ aniz list [options] [ids]... 187 | \\ 188 | \\Commands: 189 | \\ complete 190 | \\ drop 191 | \\ on-hold 192 | \\ plan-to-watch 193 | \\ remove 194 | \\ update 195 | \\ watch-episode 196 | \\ watching 197 | \\ help Display this message 198 | \\ [ids]... 199 | \\ 200 | ; 201 | 202 | fn listSubCommand(program: *Program) !void { 203 | if (program.args.next()) { 204 | if (program.args.flag(&.{"complete"})) 205 | return program.manipulateListCommand(completeAction); 206 | if (program.args.flag(&.{"drop"})) 207 | return program.manipulateListCommand(dropAction); 208 | if (program.args.flag(&.{"on-hold"})) 209 | return program.manipulateListCommand(onHoldAction); 210 | if (program.args.flag(&.{"plan-to-watch"})) 211 | return program.manipulateListCommand(planToWatchAction); 212 | if (program.args.flag(&.{"remove"})) 213 | return program.manipulateListCommand(removeAction); 214 | if (program.args.flag(&.{"update"})) 215 | return program.manipulateListCommand(updateAction); 216 | if (program.args.flag(&.{"watch-episode"})) 217 | return program.manipulateListCommand(watchEpisodeAction); 218 | if (program.args.flag(&.{"watching"})) 219 | return program.manipulateListCommand(watchingAction); 220 | if (program.args.flag(&.{ "-h", "--help", "help" })) 221 | return program.stdout.writeAll(list_sub_usage); 222 | } 223 | 224 | return program.listCommand(); 225 | } 226 | 227 | fn listCommand(program: *Program) !void { 228 | var m_search: ?[]const u8 = null; 229 | var ids = std.AutoArrayHashMap(Database.Id, void).init(program.allocator); 230 | defer ids.deinit(); 231 | 232 | while (program.args.next()) { 233 | if (program.args.option(&.{ "-s", "--search" })) |search| 234 | m_search = search; 235 | if (program.args.flag(&.{ "-h", "--help", "help" })) 236 | return program.stdout.writeAll(list_sub_usage); 237 | if (program.args.positional()) |url| { 238 | const id = try Database.Id.fromUrl(url); 239 | try ids.put(id, {}); 240 | } 241 | } 242 | 243 | var list = try loadList(program.allocator); 244 | defer list.deinit(program.allocator); 245 | 246 | if (ids.count() == 0 and m_search == null) { 247 | // Fast path if no ids or search is provided. 248 | for (list.entries.items) |entry| { 249 | try entry.serializeToTsv(list.intern.sliceZ(), program.stdout); 250 | try program.stdout.writeAll("\n"); 251 | } 252 | return; 253 | } 254 | 255 | var db = loadDatabase(program.allocator) catch |err| switch (err) { 256 | error.FileNotFound => { 257 | std.log.err("error: Database doesn't exists. " ++ 258 | "Use `aniz database download` to get it", .{}); 259 | return error.DatabaseDoesntExist; 260 | }, 261 | else => |e| return e, 262 | }; 263 | defer db.deinit(program.allocator); 264 | 265 | var entries_to_print = std.ArrayList(Database.Entry).init(program.allocator); 266 | defer entries_to_print.deinit(); 267 | 268 | try db.filterEntries(&entries_to_print, .{ 269 | .search = m_search, 270 | .ids = if (ids.count() == 0) null else ids.keys(), 271 | }); 272 | 273 | for (entries_to_print.items) |entry| { 274 | for (entry.ids.all()) |m_id| { 275 | const id = m_id orelse continue; 276 | const list_entry = list.find(id) orelse continue; 277 | try list_entry.serializeToTsv(list.intern.sliceZ(), program.stdout); 278 | try program.stdout.writeAll("\n"); 279 | } 280 | } 281 | } 282 | 283 | const manipuate_list_usage = 284 | \\Usage: aniz list [ids]... 285 | \\ 286 | \\Commands: 287 | \\ help Display this message 288 | \\ [ids]... 289 | \\ 290 | ; 291 | 292 | fn manipulateListCommand(program: *Program, action: Action) !void { 293 | var ids = std.AutoArrayHashMap(Database.Id, void).init(program.allocator); 294 | defer ids.deinit(); 295 | 296 | while (program.args.next()) { 297 | if (program.args.flag(&.{ "-h", "--help", "help" })) 298 | return program.stdout.writeAll(manipuate_list_usage); 299 | if (program.args.positional()) |url| { 300 | const id = try Database.Id.fromUrl(url); 301 | try ids.put(id, {}); 302 | } 303 | } 304 | 305 | var db = loadDatabase(program.allocator) catch |err| switch (err) { 306 | error.FileNotFound => { 307 | std.log.err("error: Database doesn't exists. " ++ 308 | "Use `aniz database download` to get it", .{}); 309 | return error.DatabaseDoesntExist; 310 | }, 311 | else => |e| return e, 312 | }; 313 | defer db.deinit(program.allocator); 314 | 315 | var list = try loadList(program.allocator); 316 | defer list.deinit(program.allocator); 317 | 318 | for (ids.keys()) |id| { 319 | const database_entry = db.findWithId(id) orelse { 320 | std.log.err("Anime '{}' was not found in the database", .{id}); 321 | return error.NoSuchAnime; 322 | }; 323 | 324 | const title = database_entry.title.slice(db.strings); 325 | const entry = try list.addEntry(program.allocator, id, title); 326 | 327 | action(&list, entry, database_entry.*); 328 | } 329 | 330 | try saveList(list); 331 | } 332 | 333 | const Action = *const fn (*List, *List.Entry, Database.Entry) void; 334 | 335 | fn completeAction(_: *List, list_entry: *List.Entry, database_entry: Database.Entry) void { 336 | list_entry.date = datetime.datetime.Date.now(); 337 | list_entry.status = .complete; 338 | list_entry.watched += 1; 339 | list_entry.episodes = database_entry.episodes; 340 | } 341 | 342 | fn onHoldAction(_: *List, list_entry: *List.Entry, _: Database.Entry) void { 343 | list_entry.date = datetime.datetime.Date.now(); 344 | list_entry.status = .on_hold; 345 | } 346 | 347 | fn dropAction(_: *List, list_entry: *List.Entry, _: Database.Entry) void { 348 | list_entry.date = datetime.datetime.Date.now(); 349 | list_entry.status = .dropped; 350 | } 351 | 352 | fn planToWatchAction(_: *List, list_entry: *List.Entry, _: Database.Entry) void { 353 | list_entry.date = datetime.datetime.Date.now(); 354 | list_entry.status = .plan_to_watch; 355 | } 356 | 357 | fn watchingAction(_: *List, list_entry: *List.Entry, _: Database.Entry) void { 358 | list_entry.date = datetime.datetime.Date.now(); 359 | list_entry.status = .watching; 360 | } 361 | 362 | fn watchEpisodeAction(_: *List, list_entry: *List.Entry, database_entry: Database.Entry) void { 363 | if (list_entry.episodes < database_entry.episodes) { 364 | list_entry.date = datetime.datetime.Date.now(); 365 | list_entry.episodes += 1; 366 | list_entry.status = .watching; 367 | if (list_entry.episodes == database_entry.episodes) { 368 | list_entry.status = .complete; 369 | list_entry.watched += 1; 370 | } 371 | } 372 | } 373 | 374 | fn removeAction(list: *List, list_entry: *List.Entry, _: Database.Entry) void { 375 | const index = (@intFromPtr(list_entry) - @intFromPtr(list.entries.items.ptr)) / 376 | @sizeOf(List.Entry); 377 | _ = list.entries.swapRemove(index); 378 | } 379 | 380 | fn updateAction(_: *List, list_entry: *List.Entry, database_entry: Database.Entry) void { 381 | switch (list_entry.status) { 382 | .complete => list_entry.episodes = database_entry.episodes, 383 | .dropped, .on_hold, .plan_to_watch, .watching => list_entry.watched = 0, 384 | } 385 | } 386 | 387 | const program_name = "aniz"; 388 | const list_name = "list"; 389 | const database_json_name = "database.json"; 390 | const database_bin_name = "database.bin"; 391 | 392 | fn download(client: *std.http.Client, uri_str: []const u8, writer: anytype) !void { 393 | const uri = try std.Uri.parse(uri_str); 394 | var header_buffer: [1024 * 4]u8 = undefined; 395 | var request = try client.open(.GET, uri, .{ 396 | .server_header_buffer = &header_buffer, 397 | .keep_alive = false, 398 | }); 399 | defer request.deinit(); 400 | 401 | try request.send(); 402 | try request.finish(); 403 | try request.wait(); 404 | 405 | if (request.response.status != .ok) 406 | return error.HttpServerRepliedWithUnsuccessfulResponse; 407 | 408 | return pipe(request.reader(), writer); 409 | } 410 | 411 | fn pipe(reader: anytype, writer: anytype) !void { 412 | var buf: [1024 * 4]u8 = undefined; 413 | while (true) { 414 | const len = try reader.read(&buf); 415 | if (len == 0) 416 | break; 417 | 418 | try writer.writeAll(buf[0..len]); 419 | } 420 | } 421 | 422 | fn loadDatabase(allocator: std.mem.Allocator) !Database { 423 | var data_dir = try openFolder(.cache, .{}); 424 | defer data_dir.close(); 425 | 426 | const file = try data_dir.openFile(database_bin_name, .{}); 427 | defer file.close(); 428 | 429 | var res = try Database.deserializeFromBinary(allocator, file.reader()); 430 | errdefer res.deinit(allocator); 431 | 432 | if ((try file.getPos()) != (try file.getEndPos())) 433 | return error.FileNotFullyRead; 434 | 435 | return res; 436 | } 437 | 438 | fn loadList(allocator: std.mem.Allocator) !List { 439 | var data_dir = try openFolder(.data, .{}); 440 | defer data_dir.close(); 441 | 442 | const data = data_dir.readFileAlloc( 443 | allocator, 444 | list_name, 445 | std.math.maxInt(usize), 446 | ) catch |err| switch (err) { 447 | error.FileNotFound => "", 448 | else => |e| return e, 449 | }; 450 | defer allocator.free(data); 451 | 452 | return List.deserializeFromTsv(allocator, data); 453 | } 454 | 455 | fn saveList(list: List) !void { 456 | var data_dir = try openFolder(.data, .{}); 457 | defer data_dir.close(); 458 | 459 | var file = try data_dir.atomicFile(list_name, .{}); 460 | defer file.deinit(); 461 | 462 | list.sort(); 463 | 464 | var buffered_file = std.io.bufferedWriter(file.file.writer()); 465 | try list.serializeToTsv(buffered_file.writer()); 466 | 467 | try buffered_file.flush(); 468 | try file.finish(); 469 | } 470 | 471 | fn openFolder(folder: folders.KnownFolder, flags: std.fs.Dir.OpenDirOptions) !std.fs.Dir { 472 | var buf: [std.fs.max_path_bytes]u8 = undefined; 473 | var fba = std.heap.FixedBufferAllocator.init(&buf); 474 | var dir = (try folders.open(fba.allocator(), folder, flags)) orelse 475 | return error.NoCacheDir; 476 | defer dir.close(); 477 | 478 | return dir.makeOpenPath(program_name, flags); 479 | } 480 | 481 | test { 482 | _ = ArgParser; 483 | _ = Database; 484 | _ = List; 485 | } 486 | 487 | const ArgParser = @import("ArgParser.zig"); 488 | const Database = @import("Database.zig"); 489 | const List = @import("List.zig"); 490 | 491 | const datetime = @import("datetime"); 492 | const folders = @import("folders"); 493 | const std = @import("std"); 494 | -------------------------------------------------------------------------------- /src/Database.zig: -------------------------------------------------------------------------------- 1 | //! An efficient representation of the anime-offline-database. Can be serialized to and from JSON and binary. The 2 | //! binary format is used to store the database locally to avoid parsing the JSON file every time the program is run. 3 | 4 | license_name: StringIntern.Index, 5 | license_url: StringIntern.Index, 6 | repository: StringIntern.Index, 7 | last_update: StringIntern.Index, 8 | 9 | entries: []const Entry, 10 | synonyms: []const StringIntern.Index, 11 | tags: []const StringIntern.Index, 12 | related_sites: []const Id.Site, 13 | related_ids: []const u32, 14 | strings: [:0]const u8, 15 | 16 | pub const Entry = extern struct { 17 | ids: Ids, 18 | 19 | title: StringIntern.Index, 20 | picture_path: StringIntern.Index, 21 | thumbnail_path: StringIntern.Index, 22 | 23 | synonyms_span: Span, 24 | related_span: Span, 25 | tags_span: Span, 26 | 27 | year: u16, 28 | episodes: u16, 29 | 30 | picture_base: Image.Base, 31 | thumbnail_base: Image.Base, 32 | 33 | pack: packed struct(u16) { 34 | kind: Kind, 35 | season: Season, 36 | status: Status, 37 | pad: u8 = 0, 38 | }, 39 | 40 | pub const Season = enum(u3) { 41 | spring = @intFromEnum(Json.Season.SPRING), 42 | summer = @intFromEnum(Json.Season.SUMMER), 43 | fall = @intFromEnum(Json.Season.FALL), 44 | winter = @intFromEnum(Json.Season.WINTER), 45 | undef = @intFromEnum(Json.Season.UNDEFINED), 46 | }; 47 | 48 | pub const Kind = enum(u3) { 49 | tv = @intFromEnum(Json.Type.TV), 50 | movie = @intFromEnum(Json.Type.MOVIE), 51 | ova = @intFromEnum(Json.Type.OVA), 52 | ona = @intFromEnum(Json.Type.ONA), 53 | special = @intFromEnum(Json.Type.SPECIAL), 54 | unknown = @intFromEnum(Json.Type.UNKNOWN), 55 | }; 56 | 57 | pub const Status = enum(u2) { 58 | finished = @intFromEnum(Json.Status.FINISHED), 59 | ongoing = @intFromEnum(Json.Status.ONGOING), 60 | upcoming = @intFromEnum(Json.Status.UPCOMING), 61 | unknown = @intFromEnum(Json.Status.UNKNOWN), 62 | }; 63 | 64 | pub fn picture(entry: Entry, data: [:0]const u8) Image { 65 | return .{ .base = entry.picture_base, .path = entry.picture_path.slice(data) }; 66 | } 67 | 68 | pub fn thumbnail(entry: Entry, data: [:0]const u8) Image { 69 | return .{ .base = entry.thumbnail_base, .path = entry.thumbnail_path.slice(data) }; 70 | } 71 | 72 | pub fn synonyms(entry: Entry, database: Database) []const StringIntern.Index { 73 | return database.synonyms[entry.synonyms_span.index..][0..entry.synonyms_span.len]; 74 | } 75 | 76 | pub fn tags(entry: Entry, database: Database) []const StringIntern.Index { 77 | return database.tags[entry.tags_span.index..][0..entry.tags_span.len]; 78 | } 79 | 80 | pub fn related(entry: Entry, database: Database) struct { []const Id.Site, []const u32 } { 81 | return .{ 82 | database.related_sites[entry.related_span.index..][0..entry.related_span.len], 83 | database.related_ids[entry.related_span.index..][0..entry.related_span.len], 84 | }; 85 | } 86 | 87 | pub fn fuzzyScore(entry: Entry, database: Database, pattern: []const u8) usize { 88 | var score = fuzzyScoreString(pattern, entry.title.ptr(database.strings)); 89 | for (entry.synonyms(database)) |synonym| 90 | score = @min(score, fuzzyScoreString(pattern, synonym.ptr(database.strings))); 91 | 92 | return score; 93 | } 94 | 95 | pub fn serializeToDsv(entry: Entry, strings: [:0]const u8, writer: anytype) !void { 96 | try writer.print("{s}\t{}\t{s}\t{}\t{s}\t{}\t{}", .{ 97 | @tagName(entry.pack.kind), 98 | entry.year, 99 | @tagName(entry.pack.season), 100 | entry.episodes, 101 | entry.title.slice(strings), 102 | entry.ids.primary(), 103 | entry.picture(strings), 104 | }); 105 | } 106 | }; 107 | 108 | // Lower score is better 109 | fn fuzzyScoreString(pattern: []const u8, str: [*:0]const u8) usize { 110 | var score: usize = 0; 111 | var last_match: usize = 0; 112 | var i: usize = 0; 113 | for (pattern) |c| { 114 | while (str[i] != 0) { 115 | defer i += 1; 116 | 117 | if (std.ascii.toLower(c) != std.ascii.toLower(str[i])) 118 | continue; 119 | 120 | score += @intFromBool(c != str[i]); 121 | score += i -| (last_match + 1); 122 | last_match = i; 123 | break; 124 | } else return std.math.maxInt(usize); 125 | } 126 | 127 | // Find length by going to end of string 128 | while (str[i] != 0) : (i += 1) {} 129 | 130 | const len = i; 131 | score += ((len - pattern.len) * 2) * @intFromBool(pattern.len != 0); 132 | return score; 133 | } 134 | 135 | test fuzzyScoreString { 136 | try std.testing.expectEqual(@as(usize, std.math.maxInt(usize)), fuzzyScoreString("abc", "ab")); 137 | try std.testing.expectEqual(@as(usize, 0), fuzzyScoreString("", "abc")); 138 | try std.testing.expectEqual(@as(usize, 0), fuzzyScoreString("abc", "abc")); 139 | try std.testing.expectEqual(@as(usize, 1), fuzzyScoreString("abc", "Abc")); 140 | try std.testing.expectEqual(@as(usize, 2), fuzzyScoreString("abc", "ABc")); 141 | try std.testing.expectEqual(@as(usize, 3), fuzzyScoreString("abc", "ABC")); 142 | try std.testing.expectEqual(@as(usize, 3), fuzzyScoreString("abc", "abdc")); 143 | try std.testing.expectEqual(@as(usize, 0), fuzzyScoreString("attack on titan", "attack on titan")); 144 | try std.testing.expectEqual(@as(usize, 3), fuzzyScoreString("attack on titan", "Attack On Titan")); 145 | try std.testing.expectEqual(@as(usize, 0), fuzzyScoreString("Clannad", "Clannad")); 146 | try std.testing.expectEqual(@as(usize, 125), fuzzyScoreString("Clannad", "BJ Special: Hyakumannen Chikyuu no Tabi Bander Book")); 147 | } 148 | 149 | pub const Ids = extern struct { 150 | anidb: Id.Optional = .none, 151 | anilist: Id.Optional = .none, 152 | animecountdown: Id.Optional = .none, 153 | anisearch: Id.Optional = .none, 154 | kitsu: Id.Optional = .none, 155 | livechart: Id.Optional = .none, 156 | myanimelist: Id.Optional = .none, 157 | simkl: Id.Optional = .none, 158 | 159 | pub fn primary(ids: Ids) Id { 160 | return ids.primaryChecked() orelse unreachable; 161 | } 162 | 163 | pub fn primaryChecked(ids: Ids) ?Id { 164 | for (ids.all()) |id| { 165 | if (id != null) 166 | return id; 167 | } 168 | return null; 169 | } 170 | 171 | pub fn set(ids: *Ids, id: Id) void { 172 | switch (id.site) { 173 | inline else => |site| @field(ids, @tagName(site)) = @enumFromInt(id.id), 174 | } 175 | } 176 | 177 | pub fn has(ids: Ids, id: Id) bool { 178 | const opt_id: Id.Optional = @enumFromInt(id.id); 179 | switch (id.site) { 180 | inline else => |site| return @field(ids, @tagName(site)) == opt_id, 181 | } 182 | } 183 | 184 | pub fn all(ids: Ids) [Id.Site.all.len]?Id { 185 | var res: [Id.Site.all.len]?Id = undefined; 186 | inline for (&res, Id.Site.all) |*ptr, site| { 187 | if (@field(ids, @tagName(site)).unwrap()) |id| { 188 | ptr.* = .{ .site = site, .id = id }; 189 | } else { 190 | ptr.* = null; 191 | } 192 | } 193 | 194 | return res; 195 | } 196 | }; 197 | 198 | pub fn deserializeFromJson(allocator: std.mem.Allocator, json: []const u8) !Database { 199 | var arena_state = std.heap.ArenaAllocator.init(allocator); 200 | defer arena_state.deinit(); 201 | 202 | const arena = arena_state.allocator(); 203 | 204 | const parsed = try std.json.parseFromSlice( 205 | Json, 206 | arena, 207 | json, 208 | .{}, 209 | ); 210 | 211 | var database_entries = std.ArrayList(Json.Anime).init(arena); 212 | try database_entries.ensureUnusedCapacity(parsed.value.data.len); 213 | 214 | // Create a list of only valid database entries 215 | for (parsed.value.data) |entry| { 216 | for (entry.sources) |source| { 217 | _ = Id.fromUrl(source) catch continue; 218 | database_entries.appendAssumeCapacity(entry); 219 | break; 220 | } 221 | } 222 | 223 | var intern = try StringIntern.init(allocator); 224 | defer intern.deinit(allocator); 225 | 226 | // Many entries have the same tags, so we intern them aswell to save memory 227 | var tags_intern = StringsIntern{}; 228 | defer tags_intern.deinit(allocator); 229 | 230 | var tags_tmp = std.ArrayList(StringIntern.Index).init(allocator); 231 | defer tags_tmp.deinit(); 232 | 233 | var entries = std.ArrayList(Entry).init(allocator); 234 | errdefer entries.deinit(); 235 | 236 | var synonyms = std.ArrayList(StringIntern.Index).init(allocator); 237 | errdefer synonyms.deinit(); 238 | 239 | var related_sites = std.ArrayList(Id.Site).init(allocator); 240 | errdefer related_sites.deinit(); 241 | 242 | var related_ids = std.ArrayList(u32).init(allocator); 243 | errdefer related_ids.deinit(); 244 | 245 | const license_name = try intern.put(allocator, parsed.value.license.name); 246 | const license_url = try intern.put(allocator, parsed.value.license.url); 247 | const repository = try intern.put(allocator, parsed.value.repository); 248 | const last_update = try intern.put(allocator, parsed.value.lastUpdate); 249 | 250 | for (parsed.value.data) |entry| { 251 | var ids: Ids = .{}; 252 | 253 | for (entry.sources) |source| { 254 | const id = Id.fromUrl(source) catch continue; // TODO: Error handling 255 | ids.set(id); 256 | } 257 | if (ids.primaryChecked() == null) 258 | continue; // TODO: Error handling 259 | 260 | const picture = try Image.fromUrl(entry.picture); 261 | const thumbnail = try Image.fromUrl(entry.thumbnail); 262 | const title = try intern.put(allocator, entry.title); 263 | const picture_path = try intern.put(allocator, picture.path); 264 | const thumbnail_path = try intern.put(allocator, thumbnail.path); 265 | 266 | const synonyms_index = std.math.cast(u32, synonyms.items.len) orelse return error.OutOfMemory; 267 | const synonyms_len = std.math.cast(u32, entry.synonyms.len) orelse return error.OutOfMemory; 268 | 269 | try synonyms.ensureUnusedCapacity(synonyms_len); 270 | for (entry.synonyms) |synonym| { 271 | const index = try intern.put(allocator, synonym); 272 | synonyms.appendAssumeCapacity(index); 273 | } 274 | 275 | try tags_tmp.ensureTotalCapacity(entry.tags.len); 276 | tags_tmp.shrinkRetainingCapacity(0); 277 | 278 | for (entry.tags) |tag| { 279 | const index = try intern.put(allocator, tag); 280 | tags_tmp.appendAssumeCapacity(index); 281 | } 282 | 283 | const tags_span = try tags_intern.put(allocator, tags_tmp.items); 284 | 285 | std.debug.assert(related_ids.items.len == related_sites.items.len); 286 | const related_index = std.math.cast(u32, related_ids.items.len) orelse return error.OutOfMemory; 287 | 288 | try related_sites.ensureUnusedCapacity(entry.relatedAnime.len); 289 | try related_ids.ensureUnusedCapacity(entry.relatedAnime.len); 290 | for (entry.relatedAnime) |related| { 291 | const id = Id.fromUrl(related) catch continue; // TODO: Error handling 292 | related_sites.appendAssumeCapacity(id.site); 293 | related_ids.appendAssumeCapacity(id.id); 294 | } 295 | 296 | const related_len = std.math.cast(u32, related_ids.items.len - related_index) orelse return error.OutOfMemory; 297 | 298 | try entries.append(.{ 299 | .ids = ids, 300 | .title = title, 301 | .picture_path = picture_path, 302 | .thumbnail_path = thumbnail_path, 303 | .synonyms_span = .{ .index = synonyms_index, .len = synonyms_len }, 304 | .related_span = .{ .index = related_index, .len = related_len }, 305 | .tags_span = tags_span, 306 | .year = entry.animeSeason.year, 307 | .episodes = entry.episodes, 308 | .picture_base = picture.base, 309 | .thumbnail_base = thumbnail.base, 310 | .pack = .{ 311 | .kind = @enumFromInt(@intFromEnum(entry.type)), 312 | .season = @enumFromInt(@intFromEnum(entry.animeSeason.season)), 313 | .status = @enumFromInt(@intFromEnum(entry.status)), 314 | }, 315 | }); 316 | } 317 | 318 | const entries_slice = try entries.toOwnedSlice(); 319 | errdefer allocator.free(entries_slice); 320 | 321 | const synonyms_slice = try synonyms.toOwnedSlice(); 322 | errdefer allocator.free(synonyms_slice); 323 | 324 | const tags_slice = try tags_intern.data.toOwnedSlice(allocator); 325 | errdefer allocator.free(tags_slice); 326 | 327 | const related_sites_slice = try related_sites.toOwnedSlice(); 328 | errdefer allocator.free(related_sites_slice); 329 | 330 | const related_ids_slice = try related_ids.toOwnedSlice(); 331 | errdefer allocator.free(related_ids_slice); 332 | 333 | const strings_slice = try intern.data.toOwnedSliceSentinel(allocator, 0); 334 | errdefer allocator.free(strings_slice); 335 | 336 | return Database{ 337 | .license_name = license_name, 338 | .license_url = license_url, 339 | .repository = repository, 340 | .last_update = last_update, 341 | .entries = entries_slice, 342 | .synonyms = synonyms_slice, 343 | .tags = tags_slice, 344 | .related_sites = related_sites_slice, 345 | .related_ids = related_ids_slice, 346 | .strings = strings_slice, 347 | }; 348 | } 349 | 350 | pub fn serializeToJson(database: Database, writer: anytype, options: std.json.StringifyOptions) !void { 351 | var buf: [1024 * 4]u8 = undefined; 352 | var out = std.json.writeStream(writer, options); 353 | 354 | try out.beginObject(); 355 | 356 | try out.objectField("license"); 357 | try out.write(.{ 358 | .name = database.license_name.slice(database.strings), 359 | .url = database.license_url.slice(database.strings), 360 | }); 361 | try out.objectField("repository"); 362 | try out.write(database.repository.slice(database.strings)); 363 | try out.objectField("lastUpdate"); 364 | try out.write(database.last_update.slice(database.strings)); 365 | 366 | try out.objectField("data"); 367 | try out.beginArray(); 368 | for (database.entries) |entry| { 369 | try out.beginObject(); 370 | 371 | try out.objectField("sources"); 372 | try out.beginArray(); 373 | for (entry.ids.all()) |m_id| { 374 | if (m_id) |id| 375 | try out.write(try std.fmt.bufPrint(&buf, "{}", .{id})); 376 | } 377 | try out.endArray(); 378 | 379 | try out.objectField("title"); 380 | try out.write(entry.title.slice(database.strings)); 381 | 382 | try out.objectField("type"); 383 | try out.write(@as(Json.Type, @enumFromInt(@intFromEnum(entry.pack.kind)))); 384 | 385 | try out.objectField("episodes"); 386 | try out.write(entry.episodes); 387 | 388 | try out.objectField("status"); 389 | try out.write(@as(Json.Status, @enumFromInt(@intFromEnum(entry.pack.status)))); 390 | 391 | try out.objectField("animeSeason"); 392 | try out.write(.{ 393 | .season = @as(Json.Season, @enumFromInt(@intFromEnum(entry.pack.season))), 394 | .year = entry.year, 395 | }); 396 | 397 | try out.objectField("picture"); 398 | try out.write(try std.fmt.bufPrint(&buf, "{}", .{entry.picture(database.strings)})); 399 | 400 | try out.objectField("thumbnail"); 401 | try out.write(try std.fmt.bufPrint(&buf, "{}", .{entry.thumbnail(database.strings)})); 402 | 403 | try out.objectField("synonyms"); 404 | try out.beginArray(); 405 | for (entry.synonyms(database)) |synonym| 406 | try out.write(synonym.slice(database.strings)); 407 | try out.endArray(); 408 | 409 | try out.objectField("relatedAnime"); 410 | try out.beginArray(); 411 | const related_sites, const related_ids = entry.related(database); 412 | for (related_sites, related_ids) |related_site, related_id| { 413 | const related = Id{ .site = related_site, .id = related_id }; 414 | try out.write(try std.fmt.bufPrint(&buf, "{}", .{related})); 415 | } 416 | try out.endArray(); 417 | 418 | try out.objectField("tags"); 419 | try out.beginArray(); 420 | for (entry.tags(database)) |tag| 421 | try out.write(tag.slice(database.strings)); 422 | try out.endArray(); 423 | 424 | try out.endObject(); 425 | } 426 | try out.endArray(); 427 | 428 | try out.endObject(); 429 | } 430 | 431 | const binary_magic: [4]u8 = "ANIZ".*; 432 | const binary_version: u16 = 2; 433 | 434 | const BinaryHeader = extern struct { 435 | magic: [4]u8 = binary_magic, 436 | version: u32 = binary_version, 437 | license_name: StringIntern.Index, 438 | license_url: StringIntern.Index, 439 | repository: StringIntern.Index, 440 | last_update: StringIntern.Index, 441 | entries: u32, 442 | synonyms: u32, 443 | tags: u32, 444 | related_sites: u32, 445 | related_ids: u32, 446 | strings: u32, 447 | }; 448 | 449 | pub fn deserializeFromBinary(allocator: std.mem.Allocator, reader: anytype) !Database { 450 | const header = try reader.readStruct(BinaryHeader); 451 | if (!std.mem.eql(u8, &header.magic, &binary_magic)) 452 | return error.InvalidData; 453 | if (header.version != binary_version) 454 | return error.InvalidData; 455 | 456 | const entries = try allocator.alloc(Entry, header.entries); 457 | errdefer allocator.free(entries); 458 | 459 | const synonyms = try allocator.alloc(StringIntern.Index, header.synonyms); 460 | errdefer allocator.free(synonyms); 461 | 462 | const tags = try allocator.alloc(StringIntern.Index, header.tags); 463 | errdefer allocator.free(tags); 464 | 465 | const related_sites = try allocator.alloc(Id.Site, header.related_sites); 466 | errdefer allocator.free(related_sites); 467 | 468 | const related_ids = try allocator.alloc(u32, header.related_ids); 469 | errdefer allocator.free(related_ids); 470 | 471 | const strings = try allocator.alloc(u8, header.strings); 472 | errdefer allocator.free(strings); 473 | 474 | try reader.readNoEof(std.mem.sliceAsBytes(entries)); 475 | try reader.readNoEof(std.mem.sliceAsBytes(synonyms)); 476 | try reader.readNoEof(std.mem.sliceAsBytes(tags)); 477 | try reader.readNoEof(std.mem.sliceAsBytes(related_sites)); 478 | try reader.readNoEof(std.mem.sliceAsBytes(related_ids)); 479 | try reader.readNoEof(strings); 480 | 481 | if (strings.len == 0) 482 | return error.InvalidData; 483 | if (strings[strings.len - 1] != 0) 484 | return error.InvalidData; 485 | 486 | return .{ 487 | .license_name = header.license_name, 488 | .license_url = header.license_url, 489 | .repository = header.repository, 490 | .last_update = header.last_update, 491 | .entries = entries, 492 | .synonyms = synonyms, 493 | .tags = tags, 494 | .related_sites = related_sites, 495 | .related_ids = related_ids, 496 | .strings = strings[0 .. strings.len - 1 :0], 497 | }; 498 | } 499 | 500 | pub fn serializeToBinary(database: Database, writer: anytype) !void { 501 | try writer.writeStruct(BinaryHeader{ 502 | .license_name = database.license_name, 503 | .license_url = database.license_url, 504 | .repository = database.repository, 505 | .last_update = database.last_update, 506 | .entries = std.math.cast(u32, database.entries.len) orelse return error.OutOfMemory, 507 | .synonyms = std.math.cast(u32, database.synonyms.len) orelse return error.OutOfMemory, 508 | .tags = std.math.cast(u32, database.tags.len) orelse return error.OutOfMemory, 509 | .related_sites = std.math.cast(u32, database.related_sites.len) orelse return error.OutOfMemory, 510 | .related_ids = std.math.cast(u32, database.related_ids.len) orelse return error.OutOfMemory, 511 | .strings = std.math.cast(u32, database.strings.len + 1) orelse return error.OutOfMemory, 512 | }); 513 | 514 | try writer.writeAll(std.mem.sliceAsBytes(database.entries)); 515 | try writer.writeAll(std.mem.sliceAsBytes(database.synonyms)); 516 | try writer.writeAll(std.mem.sliceAsBytes(database.tags)); 517 | try writer.writeAll(std.mem.sliceAsBytes(database.related_sites)); 518 | try writer.writeAll(std.mem.sliceAsBytes(database.related_ids)); 519 | try writer.writeAll(database.strings[0 .. database.strings.len + 1]); 520 | } 521 | 522 | pub fn deinit(database: *Database, allocator: std.mem.Allocator) void { 523 | allocator.free(database.entries); 524 | allocator.free(database.synonyms); 525 | allocator.free(database.tags); 526 | allocator.free(database.related_sites); 527 | allocator.free(database.related_ids); 528 | allocator.free(database.strings); 529 | } 530 | 531 | pub fn findWithId(database: Database, id: Id) ?*const Entry { 532 | const converted_id: Id.Optional = @enumFromInt(id.id); 533 | switch (id.site) { 534 | inline else => |site| { 535 | for (database.entries) |*entry| { 536 | if (@field(entry.ids, @tagName(site)) == converted_id) 537 | return entry; 538 | } 539 | 540 | return null; 541 | }, 542 | } 543 | } 544 | 545 | pub fn entriesWithIds(database: Database, ids: []const Id, list: *std.ArrayList(Entry)) !void { 546 | try list.ensureTotalCapacity(ids); 547 | for (database.entries.items) |entry| { 548 | for (ids) |id| { 549 | if (entry.ids.has(id)) 550 | try list.append(entry); 551 | } 552 | } 553 | } 554 | 555 | pub const FilterOptions = struct { 556 | /// If not null, entries will search using this string. The search is fuzzy and the results will be sorted 557 | /// based on how well they match the search string. 558 | search: ?[]const u8 = null, 559 | 560 | /// If not null, only entries with one of the ides in this list will be included. 561 | ids: ?[]const Id = null, 562 | 563 | /// The allocator used for temporary allocations. If null then `out.allocator` will be used. 564 | tmp_allocator: ?std.mem.Allocator = null, 565 | }; 566 | 567 | pub fn filterEntries(database: Database, out: *std.ArrayList(Entry), opt: FilterOptions) !void { 568 | const tmp_allocator = opt.tmp_allocator orelse out.allocator; 569 | 570 | var scores = std.ArrayList(usize).init(tmp_allocator); 571 | defer scores.deinit(); 572 | 573 | try out.ensureUnusedCapacity(database.entries.len); 574 | try scores.ensureUnusedCapacity(database.entries.len); 575 | for (database.entries) |entry| { 576 | if (opt.ids) |ids| { 577 | for (ids) |id| { 578 | if (entry.ids.has(id)) 579 | break; 580 | } else continue; 581 | } 582 | 583 | if (opt.search) |search| { 584 | const score = entry.fuzzyScore(database, search); 585 | if (score == std.math.maxInt(usize)) 586 | continue; 587 | 588 | scores.appendAssumeCapacity(score); 589 | } 590 | 591 | out.appendAssumeCapacity(entry); 592 | } 593 | 594 | if (opt.search) |_| { 595 | std.mem.sortContext(0, out.items.len, ScoredSortContext{ 596 | .entries = out.items, 597 | .scores = scores.items, 598 | }); 599 | } 600 | } 601 | 602 | const ScoredSortContext = struct { 603 | entries: []Entry, 604 | scores: []usize, 605 | 606 | pub fn swap(ctx: ScoredSortContext, a_index: usize, b_index: usize) void { 607 | std.mem.swap(Entry, &ctx.entries[a_index], &ctx.entries[b_index]); 608 | std.mem.swap(usize, &ctx.scores[a_index], &ctx.scores[b_index]); 609 | } 610 | 611 | pub fn lessThan(ctx: ScoredSortContext, a_index: usize, b_index: usize) bool { 612 | return ctx.scores[a_index] < ctx.scores[b_index]; 613 | } 614 | }; 615 | 616 | fn testTransform(input: []const u8, expected_output: []const u8) !void { 617 | var database_from_json = try deserializeFromJson(std.testing.allocator, input); 618 | defer database_from_json.deinit(std.testing.allocator); 619 | 620 | var actual_output = std.ArrayList(u8).init(std.testing.allocator); 621 | defer actual_output.deinit(); 622 | try database_from_json.serializeToJson(actual_output.writer(), .{ .whitespace = .indent_2 }); 623 | 624 | try std.testing.expectEqualStrings(expected_output, actual_output.items); 625 | 626 | // After testing the JSON serialization, test the binary serialization 627 | var binary_serialized = std.ArrayList(u8).init(std.testing.allocator); 628 | defer binary_serialized.deinit(); 629 | try database_from_json.serializeToBinary(binary_serialized.writer()); 630 | 631 | var fbs = std.io.fixedBufferStream(binary_serialized.items); 632 | var database_from_binary = try deserializeFromBinary(std.testing.allocator, fbs.reader()); 633 | defer database_from_binary.deinit(std.testing.allocator); 634 | 635 | actual_output.shrinkRetainingCapacity(0); 636 | try database_from_binary.serializeToJson(actual_output.writer(), .{ .whitespace = .indent_2 }); 637 | try std.testing.expectEqualStrings(expected_output, actual_output.items); 638 | } 639 | 640 | fn testCanonical(input: []const u8) !void { 641 | return testTransform(input, input); 642 | } 643 | 644 | test "json" { 645 | try testCanonical( 646 | \\{ 647 | \\ "license": { 648 | \\ "name": "", 649 | \\ "url": "" 650 | \\ }, 651 | \\ "repository": "", 652 | \\ "lastUpdate": "", 653 | \\ "data": [] 654 | \\} 655 | ); 656 | } 657 | 658 | test "binary version should be updated" { 659 | const json = 660 | \\{ 661 | \\ "license": { 662 | \\ "name": "test1", 663 | \\ "url": "test2" 664 | \\ }, 665 | \\ "repository": "test3", 666 | \\ "lastUpdate": "test4", 667 | \\ "data": [ 668 | \\ { 669 | \\ "sources": [ 670 | \\ "https://anidb.net/anime/8069", 671 | \\ "https://anilist.co/anime/9756", 672 | \\ "https://anisearch.com/anime/6601", 673 | \\ "https://kitsu.app/anime/5853", 674 | \\ "https://livechart.me/anime/3246", 675 | \\ "https://myanimelist.net/anime/9756" 676 | \\ ], 677 | \\ "title": "Mahou Shoujo Madoka★Magica", 678 | \\ "type": "TV", 679 | \\ "episodes": 12, 680 | \\ "status": "FINISHED", 681 | \\ "animeSeason": { 682 | \\ "season": "WINTER", 683 | \\ "year": 2011 684 | \\ }, 685 | \\ "picture": "https://cdn.myanimelist.net/images/anime/11/55225.jpg", 686 | \\ "thumbnail": "https://cdn.myanimelist.net/images/anime/11/55225t.jpg", 687 | \\ "synonyms": [ 688 | \\ "Büyücü Kız Madoka Magica", 689 | \\ "Cô gái phép thuật Madoka", 690 | \\ "MSMM", 691 | \\ "Madoka", 692 | \\ "Madoka Magica", 693 | \\ "Magical Girl Madoka Magica", 694 | \\ "Magical Girl Madoka Magika", 695 | \\ "Mahou Shoujo Madoka Magica", 696 | \\ "Mahou Shoujo Madoka Magika", 697 | \\ "Mahou Shoujo Madoka☆Magica", 698 | \\ "Mahō Shōjo Madoka Magica", 699 | \\ "Meduka Meguca", 700 | \\ "PMMM", 701 | \\ "Puella Magi Madoka Magica", 702 | \\ "madokamagica", 703 | \\ "madomagi", 704 | \\ "pmagi", 705 | \\ "Μάντοκα, το Μαγικό Κορίτσι", 706 | \\ "Волшебница Мадока Магика", 707 | \\ "Девочка-волшебница Мадока Магика", 708 | \\ "Девочка-волшебница Мадока☆Волшебство", 709 | \\ "Дівчина-чарівниця Мадока Маґіка", 710 | \\ "Чарівниця Мадока Магіка", 711 | \\ "הנערה הקסומה מאדוקה מאגיקה", 712 | \\ "مادوكا ماجيكا", 713 | \\ "مدوکا مجیکا دختر جادویی", 714 | \\ "สาวน้อยเวทมนตร์ มาโดกะ", 715 | \\ "まどマギ", 716 | \\ "まほうしょうじょまどかまぎか", 717 | \\ "マドマギ", 718 | \\ "小圆", 719 | \\ "魔法少女まどか★マギカ", 720 | \\ "魔法少女まどか★マギカ PUELLA MAGI MADOKA MAGICA", 721 | \\ "魔法少女まどか☆マギカ", 722 | \\ "魔法少女小圆", 723 | \\ "魔法少女小圓", 724 | \\ "마법소녀 마도카 마기카" 725 | \\ ], 726 | \\ "relatedAnime": [ 727 | \\ "https://anidb.net/anime/11793", 728 | \\ "https://anidb.net/anime/14360", 729 | \\ "https://anidb.net/anime/15472", 730 | \\ "https://anidb.net/anime/16278", 731 | \\ "https://anidb.net/anime/16404", 732 | \\ "https://anidb.net/anime/8778", 733 | \\ "https://anilist.co/anime/101090", 734 | \\ "https://anilist.co/anime/104051", 735 | \\ "https://anilist.co/anime/10519", 736 | \\ "https://anilist.co/anime/11977", 737 | \\ "https://anilist.co/anime/11979", 738 | \\ "https://anilist.co/anime/11981", 739 | \\ "https://anisearch.com/anime/13854", 740 | \\ "https://anisearch.com/anime/6993", 741 | \\ "https://anisearch.com/anime/7409", 742 | \\ "https://kitsu.app/anime/11573", 743 | \\ "https://kitsu.app/anime/13871", 744 | \\ "https://kitsu.app/anime/42016", 745 | \\ "https://kitsu.app/anime/48919", 746 | \\ "https://kitsu.app/anime/6218", 747 | \\ "https://kitsu.app/anime/6636", 748 | \\ "https://kitsu.app/anime/6637", 749 | \\ "https://kitsu.app/anime/6638", 750 | \\ "https://livechart.me/anime/10663", 751 | \\ "https://livechart.me/anime/1947", 752 | \\ "https://livechart.me/anime/3495", 753 | \\ "https://livechart.me/anime/4910", 754 | \\ "https://livechart.me/anime/74", 755 | \\ "https://livechart.me/anime/972", 756 | \\ "https://livechart.me/anime/973", 757 | \\ "https://livechart.me/anime/9862", 758 | \\ "https://myanimelist.net/anime/10519", 759 | \\ "https://myanimelist.net/anime/11977", 760 | \\ "https://myanimelist.net/anime/11979", 761 | \\ "https://myanimelist.net/anime/11981", 762 | \\ "https://myanimelist.net/anime/32153", 763 | \\ "https://myanimelist.net/anime/35300", 764 | \\ "https://myanimelist.net/anime/38256", 765 | \\ "https://myanimelist.net/anime/53932", 766 | \\ "https://myanimelist.net/anime/54209" 767 | \\ ], 768 | \\ "tags": [ 769 | \\ "achronological order", 770 | \\ "action", 771 | \\ "aliens", 772 | \\ "alternate universe", 773 | \\ "angst", 774 | \\ "anthropomorphism", 775 | \\ "anti-hero", 776 | \\ "asia", 777 | \\ "award winning", 778 | \\ "coming of age", 779 | \\ "contemporary fantasy", 780 | \\ "cosmic horror", 781 | \\ "dark fantasy", 782 | \\ "drama", 783 | \\ "earth", 784 | \\ "ensemble cast", 785 | \\ "fantasy", 786 | \\ "female protagonist", 787 | \\ "gods", 788 | \\ "guns", 789 | \\ "henshin", 790 | \\ "horror", 791 | \\ "japan", 792 | \\ "lgbtq+ themes", 793 | \\ "love triangle", 794 | \\ "magic", 795 | \\ "magical girl", 796 | \\ "mahou shoujo", 797 | \\ "mature themes", 798 | \\ "melancholy", 799 | \\ "middle school", 800 | \\ "moe", 801 | \\ "monster", 802 | \\ "new", 803 | \\ "original work", 804 | \\ "philosophy", 805 | \\ "present", 806 | \\ "primarily child cast", 807 | \\ "primarily female cast", 808 | \\ "primarily teen cast", 809 | \\ "psychological", 810 | \\ "psychological drama", 811 | \\ "school", 812 | \\ "school life", 813 | \\ "spearplay", 814 | \\ "suicide", 815 | \\ "super power", 816 | \\ "supernatural drama", 817 | \\ "survival", 818 | \\ "suspense", 819 | \\ "swords & co", 820 | \\ "thriller", 821 | \\ "time loop", 822 | \\ "time manipulation", 823 | \\ "tomboy", 824 | \\ "tragedy", 825 | \\ "transfer students", 826 | \\ "twisted story", 827 | \\ "urban", 828 | \\ "urban fantasy", 829 | \\ "violence", 830 | \\ "witch" 831 | \\ ] 832 | \\ } 833 | \\ ] 834 | \\} 835 | ; 836 | 837 | try testCanonical(json); 838 | 839 | var database_from_json = try deserializeFromJson(std.testing.allocator, json); 840 | defer database_from_json.deinit(std.testing.allocator); 841 | 842 | var binary_serialized = std.ArrayList(u8).init(std.testing.allocator); 843 | defer binary_serialized.deinit(); 844 | try database_from_json.serializeToBinary(binary_serialized.writer()); 845 | 846 | var digest: [std.crypto.hash.sha2.Sha256.digest_length]u8 = undefined; 847 | std.crypto.hash.sha2.Sha256.hash(binary_serialized.items, &digest, .{}); 848 | 849 | // If this test fails, then the binary version should be updated 850 | const expected_hash = "8f901cea56125b39f98eed8967b0120e5e2d3426ff4f67a469fb5b79fbf32065"; 851 | try std.testing.expectEqualStrings(expected_hash, &std.fmt.bytesToHex(digest, .lower)); 852 | } 853 | 854 | test { 855 | _ = Json; 856 | _ = Id; 857 | _ = Image; 858 | _ = StringIntern; 859 | _ = StringsIntern; 860 | } 861 | 862 | const Database = @This(); 863 | 864 | const Span = StringsIntern.Span; 865 | 866 | pub const Id = @import("database/Id.zig"); 867 | pub const Image = @import("database/Image.zig"); 868 | 869 | const Json = @import("database/Json.zig"); 870 | const StringIntern = @import("StringIntern.zig"); 871 | const StringsIntern = @import("StringsIntern.zig"); 872 | 873 | const builtin = @import("builtin"); 874 | const std = @import("std"); 875 | --------------------------------------------------------------------------------