├── .credo.exs
├── .formatter.exs
├── .gitignore
├── README.md
├── config
└── config.exs
├── h2o.yaml
├── lib
├── mix
│ ├── sweetroll2.bootstrap.ex
│ ├── sweetroll2.drop.ex
│ ├── sweetroll2.export.ex
│ ├── sweetroll2.import.ex
│ └── sweetroll2.setup.ex
├── nice_log_formatter.ex
├── sweetroll2.ex
└── sweetroll2
│ ├── application.ex
│ ├── auth
│ ├── access_token.ex
│ ├── bearer.ex
│ ├── serve.ex
│ ├── session.ex
│ └── temp_code.ex
│ ├── convert.ex
│ ├── events.ex
│ ├── http_client.ex
│ ├── job
│ ├── backup.ex
│ ├── clear_jobs.ex
│ ├── compress.ex
│ ├── fetch.ex
│ ├── generate.ex
│ ├── notify_websub.ex
│ └── send_webmentions.ex
│ ├── markup.ex
│ ├── media_upload.ex
│ ├── micropub.ex
│ ├── post.ex
│ ├── post
│ ├── comments.ex
│ ├── db_as_map.ex
│ ├── generative.ex
│ ├── generative
│ │ ├── feed.ex
│ │ ├── inbox.ex
│ │ ├── pagination.ex
│ │ └── tag.ex
│ └── page.ex
│ ├── render.ex
│ └── serve.ex
├── mix.exs
├── mix.lock
├── mrb
├── as.rb
├── out.rb
└── root.rb
├── priv
└── static
│ ├── fnt
│ ├── Inter-Regular.woff
│ ├── Inter-Regular.woff2
│ └── Inter-upright.var.woff2
│ ├── icons.svg
│ ├── icons
│ ├── arrow-down.svg
│ ├── arrow-up.svg
│ ├── bookmark.svg
│ ├── desktop-download.svg
│ ├── device-camera.svg
│ ├── eye.svg
│ ├── info.svg
│ ├── link.svg
│ ├── lock.svg
│ ├── megaphone.svg
│ ├── octoface.svg
│ ├── paintcan.svg
│ ├── quote.svg
│ ├── reply.svg
│ ├── star.svg
│ └── telescope.svg
│ ├── micro-panel-all.bundle.min.js
│ ├── sr2.js
│ └── style.css
├── test
├── sweetroll2
│ ├── markup_test.exs
│ ├── post_test.exs
│ └── render_test.exs
├── sweetroll2_test.exs
└── test_helper.exs
└── tpl
├── cite.html.eex
├── entry.html.eex
├── footer.html.eex
├── head.html.eex
├── header.html.eex
├── page_authorize.html.eex
├── page_entry.html.eex
├── page_feed.html.eex
└── page_login.html.eex
/.credo.exs:
--------------------------------------------------------------------------------
1 | %{
2 | configs: [
3 | %{
4 | name: "default",
5 | files: %{
6 | included: ["lib/", "src/", "test/", "web/", "apps/"],
7 | excluded: [~r"/_build/", ~r"/deps/", ~r"/node_modules/"]
8 | },
9 | checks: [
10 | # not available with elixir 1.9 (??)
11 | {Credo.Check.Refactor.MapInto, false},
12 | {Credo.Check.Warning.LazyLogging, false},
13 |
14 | # reconfigure
15 | {Credo.Check.Readability.MaxLineLength, [priority: :low, max_length: 128]},
16 | {Credo.Check.Refactor.CyclomaticComplexity, [max_complexity: 12]},
17 |
18 | # opt in
19 | {Credo.Check.Warning.UnsafeToAtom, []},
20 | {Credo.Check.Readability.UnnecessaryAliasExpansion, []},
21 | {Credo.Check.Consistency.MultiAliasImportRequireUse, []},
22 | {Credo.Check.Design.DuplicatedCode, []},
23 | {Credo.Check.Refactor.AppendSingleItem, []},
24 | {Credo.Check.Refactor.DoubleBooleanNegation, []},
25 | {Credo.Check.Refactor.VariableRebinding, []},
26 | {Credo.Check.Warning.MapGetUnsafePass, []},
27 | ]
28 | }
29 | ]
30 | }
31 |
--------------------------------------------------------------------------------
/.formatter.exs:
--------------------------------------------------------------------------------
1 | # Used by "mix format"
2 | [
3 | inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"],
4 | import_deps: [:plug, :ex_early_ret],
5 | locals_without_parens: [
6 | deftpl: 2,
7 | ret_if: :*
8 | ]
9 | ]
10 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # The directory Mix will write compiled artifacts to.
2 | /_build/
3 |
4 | # If you run "mix test --cover", coverage assets end up here.
5 | /cover/
6 |
7 | # The directory Mix downloads your dependencies sources to.
8 | /deps/
9 |
10 | # Where 3rd-party dependencies like ExDoc output generated docs.
11 | /doc/
12 |
13 | # Ignore .fetch files in case you like to edit your project deps locally.
14 | /.fetch
15 |
16 | # If the VM crashes, it generates a dump, let's ignore it too.
17 | erl_crash.dump
18 |
19 | # Also ignore archive artifacts (built via "mix archive.build").
20 | *.ez
21 |
22 | # Ignore package tarball (built via "mix hex.build").
23 | sweetroll2-*.tar
24 |
25 | # Lang server
26 | .elixir_ls
27 |
28 | # Profiler
29 | fprof.trace
30 |
31 | # Dev TLS cert and key
32 | /*.pem
33 |
34 | # Default static generator output folder
35 | /out/
36 |
37 | # Database
38 | /priv/db
39 |
40 | # Backups
41 | /priv/backup
42 |
43 | # Hooks
44 | /priv/hooks
45 |
46 | # Compressed assets
47 | /priv/static/*.br
48 | /priv/static/*.gz
49 |
50 | # Debug assets
51 | /priv/static/*.js.map
52 |
53 | # Home dir stuff
54 | /.hex
55 | /.mix
56 | /.cargo
57 |
58 | # Erlang node stuff
59 | .erlang.cookie
60 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Sweetroll2 (is abandoned, sorry)
2 |
3 | A powerful micro/blogging engine with [IndieWeb] features.
4 |
5 | - monolithic BEAM app written in [Elixir] with no external database (everything is stored in Mnesia)
6 | - your site is dynamic and static at the same time
7 | - it's not a traditional "static site generator", it's very much a long-running server
8 | - but all pages are saved to static HTML files for performance (served from a frontend server like [h2o])
9 | - uniform data model: every page is a [microformats2] style object
10 | - special object types are used for stuff like feeds (for dynamic feeds, `children` are the result of a query)
11 | - and they create more virtual pages (e.g. dynamic feeds are paginated: `/notes` → `/notes/page1`, `/notes/page2`…)
12 | - asset management: automatic compression and cache busting
13 | - local password authentication, [IndieAuth] app authorization
14 | - [Micropub] is the primary way of working with pages
15 | - [micro-panel] is integrated as the post editing interface
16 | - [Webmentions] are supported for distributed conversations across websites
17 | - [Salmentions] are sent as a natural consequence of the architecture (received mention → update event → mentions are sent)
18 | - [WebSub] notifications are sent for quick updates in readers
19 |
20 | [IndieWeb]: https://indieweb.org/
21 | [Elixir]: https://elixir-lang.org/
22 | [h2o]: https://h2o.examp1e.net/
23 | [microformats2]: http://microformats.org/wiki/microformats2
24 | [IndieAuth]: https://indieweb.org/IndieAuth
25 | [Micropub]: https://indieweb.org/micropub
26 | [micro-panel]: https://github.com/myfreeweb/micro-panel
27 | [Webmentions]: https://indieweb.org/webmention
28 | [Salmentions]: https://indieweb.org/Salmention
29 | [WebSub]: https://indieweb.org/WebSub
30 |
31 | ## Usage
32 |
33 | Mix tasks:
34 |
35 | - `sweetroll2.setup` creates the database
36 | - `sweetroll2.bootstrap` adds initial posts
37 | - `sweetroll2.drop` deletes the database
38 |
39 | Environment variables:
40 |
41 | - `MIX_ENV`: in `prod`, logging will be in JSON (ready for [shipping somewhere with something](https://docs.timber.io/setup/log-forwarders/fluent-bit)), scheduled background tasks will be active, the plug debug middleware won't be active, etc.
42 | - `SR2_SERVER_SOCKET` or `SR2_SERVER_PORT`: where to listen for connections (default is port 6969)
43 | - `SR2_PASSWORD_HASH`: Argon2 hash of the admin password (REQUIRED, e.g. `$argon2id$v=19$m=…`)
44 | - `SR2_CANONICAL_HOME_URL`: scheme and hostname (NO SLASH) of the website (REQUIRED, e.g. `https://example.com`)
45 | - `SR2_WEBSUB_HUB`: URL of the WebSub hub to use (default `https://pubsubhubbub.superfeedr.com/`) (make sure to also modify in h2o settings for static files!!)
46 | - `SR2_STATIC_GEN_OUT_DIR`: where to write static HTML (default `out`; also the h2o scripts use it!)
47 |
48 | ## License
49 |
50 | This is free and unencumbered software released into the public domain.
51 | For more information, please refer to the `UNLICENSE` file or [unlicense.org](http://unlicense.org).
52 |
53 | (Note: different licenses apply to dependencies.)
54 |
--------------------------------------------------------------------------------
/config/config.exs:
--------------------------------------------------------------------------------
1 | use Mix.Config
2 |
3 | config :mnesia, dir: 'priv/db/#{Mix.env()}/#{node()}'
4 |
5 | config :elixir, :time_zone_database, Tzdata.TimeZoneDatabase
6 |
7 | config :microformats2, atomize_keys: false, underscore_keys: false
8 |
9 | config :logger, :console,
10 | format:
11 | if(Mix.env() == :prod, do: {Timber.Formatter, :format}, else: {NiceLogFormatter, :format}),
12 | colors: [enabled: false],
13 | metadata: :all,
14 | handle_sasl_reports: true
15 |
16 | config :floki, :html_parser, Floki.HTMLParser.Html5ever
17 |
18 | config :liquid,
19 | extra_tags: %{
20 | head: {Sweetroll2.Render.LiquidTags.Head, Liquid.Tag},
21 | header: {Sweetroll2.Render.LiquidTags.Header, Liquid.Tag},
22 | footer: {Sweetroll2.Render.LiquidTags.Footer, Liquid.Tag},
23 | feedpreview: {Sweetroll2.Render.LiquidTags.FeedPreview, Liquid.Tag}
24 | }
25 |
26 | config :event_bus, topics: [:url_updated, :upload_processed], id_generator: EventBus.Util.Base62
27 |
28 | config :hammer,
29 | backend: {Hammer.Backend.ETS, [expiry_ms: 60_000 * 60 * 4, cleanup_interval_ms: 60_000 * 10]}
30 |
31 | config :ex_aws,
32 | json_codec: Jason,
33 | region: {:system, "SR2_S3_REGION"}
34 |
35 | config :sweetroll2, Sweetroll2.Application.Scheduler,
36 | jobs:
37 | [
38 | {"@reboot", {Sweetroll2.Job.Compress, :enqueue_assets, []}},
39 | {"0 */2 * * *", {Sweetroll2.Job.ClearJobs, :enqueue, []}}
40 | ] ++
41 | if(Mix.env() == :prod,
42 | do: [
43 | {"1 */2 * * *", {Sweetroll2.Job.Backup, :enqueue, []}},
44 | {"30 0 */2 * *", {Sweetroll2.Job.SendWebmentions, :enqueue_all, []}},
45 | {"0 */6 * * *", {Sweetroll2.Job.Generate, :enqueue_all, []}}
46 | ],
47 | else: []
48 | )
49 |
--------------------------------------------------------------------------------
/h2o.yaml:
--------------------------------------------------------------------------------
1 | max-delegations: 5 # default - do not set a different number!
2 | hosts:
3 | "riverwood.lan:80":
4 | listen:
5 | port: 80
6 | paths:
7 | "/":
8 | redirect: https://riverwood.lan
9 | "riverwood.lan:443":
10 | listen:
11 | port: 443
12 | ssl:
13 | key-file: key.pem
14 | certificate-file: cert.pem
15 | min-version: TLSv1.2
16 | compress: ON
17 | file.send-compressed: ON
18 | file.mime.addtypes:
19 | "application/javascript; charset=utf-8": .js
20 | "text/css; charset=utf-8": .css
21 | "text/html; charset=utf-8": .html
22 | file.dirlisting: OFF
23 | paths:
24 | "/":
25 | - reproxy: ON
26 | mruby.handler-file: mrb/root.rb
27 | - proxy.preserve-host: ON
28 | proxy.reverse.url: "http://127.0.0.1:6969/"
29 | "/__as__/":
30 | - mruby.handler-file: mrb/as.rb
31 | - header.add: "x-content-type-options: nosniff"
32 | file.dir: priv/static/
33 | "/__out__/":
34 | - mruby.handler-file: mrb/out.rb
35 | - header.add: 'link: ; rel="hub", ; rel="webmention", ; rel="micropub", ; rel="authorization_endpoint", ; rel="token_endpoint"'
36 | header.add: "strict-transport-security: max-age=31536000"
37 | header.add: "feature-policy: unsized-media 'none'; sync-xhr 'none'; document-write 'none'"
38 | header.add: "referrer-policy: strict-origin"
39 | header.add: "x-content-type-options: nosniff"
40 | file.dir: out/
41 | # https://github.com/snarfed/bridgy/issues/878 → https://issuetracker.google.com/issues/112277350
42 | "/__crapp_engine__/":
43 | - mruby.handler-file: mrb/out.rb
44 | - header.add: 'link: ; rel="hub", ; rel="webmention", ; rel="micropub", ; rel="authorization_endpoint", ; rel="token_endpoint"'
45 | header.add: "strict-transport-security: max-age=31536000"
46 | header.add: "feature-policy: unsized-media 'none'; sync-xhr 'none'; document-write 'none'"
47 | header.add: "referrer-policy: strict-origin"
48 | header.add: "x-content-type-options: nosniff"
49 | compress: OFF
50 | file.send-compressed: OFF
51 | file.dir: out/
52 |
--------------------------------------------------------------------------------
/lib/mix/sweetroll2.bootstrap.ex:
--------------------------------------------------------------------------------
1 | defmodule Mix.Tasks.Sweetroll2.Bootstrap do
2 | use Mix.Task
3 |
4 | @shortdoc "Adds default entries (pages) to the database"
5 |
6 | @impl Mix.Task
7 | @doc false
8 | def run(_) do
9 | :ok = Memento.start()
10 | :ok = :mnesia.wait_for_tables([Sweetroll2.Post], 1000)
11 |
12 | Sweetroll2.Application.bootstrap!()
13 | end
14 | end
15 |
--------------------------------------------------------------------------------
/lib/mix/sweetroll2.drop.ex:
--------------------------------------------------------------------------------
1 | defmodule Mix.Tasks.Sweetroll2.Drop do
2 | use Mix.Task
3 |
4 | @shortdoc "Deletes the Mnesia DB on disk for Sweetroll2"
5 |
6 | @impl Mix.Task
7 | @doc false
8 | def run(_) do
9 | :ok = Memento.start()
10 |
11 | :ok =
12 | :mnesia.wait_for_tables(
13 | [
14 | Sweetroll2.Post,
15 | Sweetroll2.Auth.Session,
16 | Sweetroll2.Auth.TempCode,
17 | Sweetroll2.Auth.AccessToken,
18 | Que.Persistence.Mnesia.DB.Jobs
19 | ],
20 | 1000
21 | )
22 |
23 | Sweetroll2.Application.drop!()
24 | end
25 | end
26 |
--------------------------------------------------------------------------------
/lib/mix/sweetroll2.export.ex:
--------------------------------------------------------------------------------
1 | defmodule Mix.Tasks.Sweetroll2.Export do
2 | use Mix.Task
3 |
4 | @shortdoc "Export entries from the Mnesia DB as JSON Lines to stdout"
5 |
6 | @impl Mix.Task
7 | @doc false
8 | def run(_raw_args) do
9 | :ok = Memento.start()
10 | :ok = :mnesia.wait_for_tables([Sweetroll2.Post], 1000)
11 |
12 | posts = Memento.transaction!(fn -> Memento.Query.all(Sweetroll2.Post) end)
13 |
14 | for post <- posts do
15 | post |> Sweetroll2.Post.to_map() |> Jason.encode!() |> IO.puts()
16 | end
17 | end
18 | end
19 |
--------------------------------------------------------------------------------
/lib/mix/sweetroll2.import.ex:
--------------------------------------------------------------------------------
1 | defmodule Mix.Tasks.Sweetroll2.Import do
2 | use Mix.Task
3 |
4 | @shortdoc "Import JSON Lines files into the Mnesia DB"
5 |
6 | @impl Mix.Task
7 | @doc false
8 | def run(raw_args) do
9 | {switches, argv, _} = OptionParser.parse(raw_args, strict: [domain: [:string, :keep]])
10 |
11 | domains =
12 | Enum.reduce(switches, [], fn
13 | {:domain, d}, acc -> [d | acc]
14 | _, acc -> acc
15 | end)
16 |
17 | Mix.shell().info("Ignoring domains #{inspect(domains)}")
18 | :ok = Memento.start()
19 | :ok = :mnesia.wait_for_tables([Sweetroll2.Post], 1000)
20 | Tzdata.EtsHolder.start_link()
21 |
22 | for path <- argv do
23 | Mix.shell().info("Importing #{path}")
24 | Sweetroll2.Post.import_json_lines(File.read!(path), domains)
25 | Mix.shell().info("Finished importing #{path}")
26 | end
27 | end
28 | end
29 |
--------------------------------------------------------------------------------
/lib/mix/sweetroll2.setup.ex:
--------------------------------------------------------------------------------
1 | defmodule Mix.Tasks.Sweetroll2.Setup do
2 | use Mix.Task
3 |
4 | @shortdoc "Creates a Mnesia DB on disk for Sweetroll2"
5 |
6 | @impl Mix.Task
7 | @doc false
8 | def run(_) do
9 | Sweetroll2.Application.setup!()
10 | end
11 | end
12 |
--------------------------------------------------------------------------------
/lib/nice_log_formatter.ex:
--------------------------------------------------------------------------------
1 | defmodule NiceLogFormatter do
2 | alias IO.ANSI
3 |
4 | def format(level, message, timestamp, metadata) do
5 | # IO.inspect(metadata)
6 | metamap = Enum.into(metadata, %{})
7 |
8 | [
9 | fmt_time(timestamp),
10 | fmt_level(level),
11 | fmt_ctx(metamap),
12 | Enum.map(metamap[:event] || [], &fmt_evt/1),
13 | fmt_msg(message),
14 | "\n"
15 | ]
16 | rescue
17 | e -> "could not format: #{inspect({level, message, metadata, e})}"
18 | end
19 |
20 | defp fmt_time({_, {hour, min, sec, ms}}),
21 | do: [
22 | ANSI.light_black(),
23 | _pad_hms(hour),
24 | ":",
25 | _pad_hms(min),
26 | ":",
27 | _pad_hms(sec),
28 | ".",
29 | _pad_milli(ms),
30 | ANSI.reset()
31 | ]
32 |
33 | defp _pad_hms(x), do: x |> Integer.to_string() |> String.pad_leading(2, "0")
34 | defp _pad_milli(x), do: x |> Integer.to_string() |> String.pad_leading(3, "0")
35 |
36 | defp fmt_level(:info), do: [ANSI.blue(), " INFO", ANSI.reset()]
37 | defp fmt_level(:debug), do: [ANSI.light_black(), " DEBUG", ANSI.reset()]
38 | defp fmt_level(:warn), do: [ANSI.yellow(), " WARN", ANSI.reset()]
39 | defp fmt_level(:error), do: [ANSI.red(), " ERROR", ANSI.reset()]
40 | defp fmt_level(_), do: [ANSI.magenta(), " WTF", ANSI.reset()]
41 |
42 | defp fmt_ctx(%{request_id: rid}),
43 | do: [" ", ANSI.cyan_background(), ANSI.black(), "HTTP:", rid, ANSI.reset()]
44 |
45 | defp fmt_ctx(%{job_id: jid}),
46 | do: [" ", ANSI.green_background(), ANSI.black(), "JOB:", to_string(jid), ANSI.reset()]
47 |
48 | defp fmt_ctx(%{timber_context: %{http: %{request_id: rid}}}), do: fmt_ctx(%{request_id: rid})
49 | defp fmt_ctx(%{timber_context: %{que: %{job_id: rid}}}), do: fmt_ctx(%{job_id: rid})
50 | defp fmt_ctx(_), do: []
51 |
52 | defp fmt_evt({key, val}),
53 | do: [" ", ANSI.bright(), ANSI.light_white(), to_string(key), ANSI.reset(), fmt_val(val)]
54 |
55 | defp fmt_val(m) when is_map(m),
56 | do:
57 | m
58 | |> Map.delete(:__struct__)
59 | |> Enum.flat_map(fn {key, val} ->
60 | [" ", ANSI.yellow(), to_string(key), ANSI.light_black(), ":", ANSI.reset(), fmt_val(val)]
61 | end)
62 |
63 | defp fmt_val(l) when is_list(l),
64 | do: [
65 | ANSI.light_black(),
66 | "[ ",
67 | ANSI.reset(),
68 | Enum.intersperse(Enum.map(l, &fmt_val/1), [ANSI.light_black(), ", ", ANSI.reset()]),
69 | ANSI.light_black(),
70 | " ]",
71 | ANSI.reset()
72 | ]
73 |
74 | defp fmt_val(s) when is_binary(s), do: s
75 | defp fmt_val(x), do: inspect(x)
76 |
77 | defp fmt_msg(nil), do: []
78 | defp fmt_msg(""), do: []
79 | defp fmt_msg(s), do: [ANSI.light_black(), " // ", ANSI.reset(), s]
80 | end
81 |
--------------------------------------------------------------------------------
/lib/sweetroll2.ex:
--------------------------------------------------------------------------------
1 | defmodule Sweetroll2 do
2 | @moduledoc false
3 | @default_home "http://localhost:6969"
4 |
5 | def canonical_home_url, do: System.get_env("SR2_CANONICAL_HOME_URL") || @default_home
6 | end
7 |
--------------------------------------------------------------------------------
/lib/sweetroll2/application.ex:
--------------------------------------------------------------------------------
1 | defmodule Sweetroll2.Application do
2 | # https://hexdocs.pm/elixir/Application.html
3 | # https://hexdocs.pm/elixir/Supervisor.html
4 | @moduledoc false
5 |
6 | alias Supervisor.Spec
7 | use Application
8 |
9 | def start(_type, _args) do
10 | :ok = Logger.add_translator({Timber.Exceptions.Translator, :translate})
11 |
12 | server_opts =
13 | [protocol_options: [idle_timeout: 10 * 60_000]] ++
14 | case {System.get_env("SR2_SERVER_SOCKET"), System.get_env("SR2_SERVER_PORT")} do
15 | {nil, nil} -> [port: 6969]
16 | {nil, port} -> [port: String.to_integer(port)]
17 | {sock, _} -> [ip: {:local, sock}, port: 0]
18 | end
19 |
20 | children = [
21 | Plug.Cowboy.child_spec(scheme: :http, plug: Sweetroll2.Serve, options: server_opts),
22 | Spec.worker(Sweetroll2.Application.Scheduler, []),
23 | Supervisor.child_spec(
24 | {ConCache,
25 | [name: :asset_rev, ttl_check_interval: :timer.minutes(1), global_ttl: :timer.minutes(60)]},
26 | id: :cache_asset
27 | ),
28 | Supervisor.child_spec(
29 | {ConCache,
30 | [
31 | name: :parsed_tpl,
32 | ttl_check_interval: :timer.minutes(30),
33 | global_ttl: :timer.hours(12)
34 | ]},
35 | id: :cache_tpl
36 | ),
37 | Supervisor.child_spec(
38 | {ConCache,
39 | [
40 | name: :misc,
41 | ttl_check_interval: :timer.minutes(30),
42 | global_ttl: :timer.hours(12)
43 | ]},
44 | id: :cache_misc
45 | ),
46 | {Sweetroll2.Job.Compress.AssetWatcher, dirs: ["priv/static"]},
47 | Sweetroll2.Events
48 | ]
49 |
50 | opts = [strategy: :one_for_one, name: Sweetroll2.Supervisor]
51 | Supervisor.start_link(children, opts)
52 | end
53 |
54 | def setup!(nodes \\ [node()]) do
55 | if path = Application.get_env(:mnesia, :dir) do
56 | :ok = File.mkdir_p!(path)
57 | end
58 |
59 | Memento.stop()
60 | Memento.Schema.create(nodes)
61 | Memento.start()
62 | Que.Persistence.Mnesia.setup!()
63 | Memento.Table.create!(Sweetroll2.Post, disc_copies: nodes)
64 | Memento.Table.create!(Sweetroll2.MediaUpload, disc_copies: nodes)
65 | Memento.Table.create!(Sweetroll2.Auth.Session, disc_copies: nodes)
66 | Memento.Table.create!(Sweetroll2.Auth.TempCode, disc_copies: nodes)
67 | Memento.Table.create!(Sweetroll2.Auth.AccessToken, disc_copies: nodes)
68 | end
69 |
70 | def drop! do
71 | Memento.Table.delete(Sweetroll2.Post)
72 | Memento.Table.delete(Sweetroll2.MediaUpload)
73 | Memento.Table.delete(Sweetroll2.Auth.Session)
74 | Memento.Table.delete(Sweetroll2.Auth.TempCode)
75 | Memento.Table.delete(Sweetroll2.Auth.AccessToken)
76 | Memento.Table.delete(Que.Persistence.Mnesia.DB.Jobs)
77 | end
78 |
79 | def bootstrap! do
80 | Memento.transaction!(fn ->
81 | Memento.Query.write(%Sweetroll2.Post{
82 | url: "/",
83 | status: :published,
84 | type: "x-custom-page",
85 | props: %{
86 | "name" => "Home",
87 | "site-name" => "a new sweetroll2 website",
88 | # credo:disable-for-lines:22
89 | "content" => [%{"html" => ~S[
90 |
91 | {% head %}
92 |
93 | {% header %}
94 |
95 |
96 |
97 |
Hello
98 |
99 | Congratulations, new Sweetroll2 user! Please customize this page by logging in and clicking Edit.
100 |
101 |
102 |
103 |
104 | {% feedpreview /posts %}
105 |
106 |
107 |
108 | {% footer %}
109 |
110 | ]}]
111 | }
112 | })
113 |
114 | Memento.Query.write(%Sweetroll2.Post{
115 | url: "/tag",
116 | status: :published,
117 | type: "x-dynamic-tag-feed",
118 | props: %{
119 | "name" => ~S[#{tag}],
120 | "feed-settings" => [
121 | %{"show-in-nav" => false, "show-in-post" => true, "order-in-post" => 69}
122 | ],
123 | "filter" => [
124 | %{"category" => ["{tag}"]}
125 | ]
126 | }
127 | })
128 |
129 | Memento.Query.write(%Sweetroll2.Post{
130 | url: "/posts",
131 | status: :published,
132 | type: "x-dynamic-feed",
133 | props: %{
134 | "name" => "Posts",
135 | "feed-settings" => [
136 | %{"order-in-nav" => 0, "show-in-nav" => true, "show-in-post" => true}
137 | ],
138 | "filter" => [
139 | %{"category" => ["_notes"]},
140 | %{"category" => ["_articles"]},
141 | %{"category" => ["_reposts"]},
142 | %{"index-display" => ["show"]}
143 | ],
144 | "unfilter" => [
145 | %{"client-id" => ["https://micropub.rocks/"]},
146 | %{"index-display" => ["hide"]}
147 | ]
148 | }
149 | })
150 |
151 | Memento.Query.write(%Sweetroll2.Post{
152 | url: "/notes",
153 | status: :published,
154 | type: "x-dynamic-feed",
155 | props: %{
156 | "name" => "Notes",
157 | "feed-settings" => [
158 | %{"show-in-nav" => false, "show-in-post" => true}
159 | ],
160 | "filter" => [
161 | %{"category" => ["_notes"]}
162 | ],
163 | "unfilter" => [
164 | %{"client-id" => ["https://micropub.rocks/"]}
165 | ]
166 | }
167 | })
168 |
169 | Memento.Query.write(%Sweetroll2.Post{
170 | url: "/replies",
171 | status: :published,
172 | type: "x-dynamic-feed",
173 | props: %{
174 | "name" => "Replies",
175 | "feed-settings" => [
176 | %{"show-in-nav" => false, "show-in-post" => true}
177 | ],
178 | "filter" => [
179 | %{"category" => ["_replies"]}
180 | ],
181 | "unfilter" => [
182 | %{"client-id" => ["https://micropub.rocks/"]}
183 | ]
184 | }
185 | })
186 |
187 | Memento.Query.write(%Sweetroll2.Post{
188 | url: "/likes",
189 | status: :published,
190 | type: "x-dynamic-feed",
191 | props: %{
192 | "name" => "Likes",
193 | "feed-settings" => [
194 | %{"show-in-nav" => false, "show-in-post" => true}
195 | ],
196 | "filter" => [
197 | %{"category" => ["_likes"]}
198 | ],
199 | "unfilter" => [
200 | %{"client-id" => ["https://micropub.rocks/"]}
201 | ]
202 | }
203 | })
204 |
205 | Memento.Query.write(%Sweetroll2.Post{
206 | url: "/rsvp",
207 | status: :published,
208 | type: "x-dynamic-feed",
209 | props: %{
210 | "name" => "RSVPs",
211 | "feed-settings" => [
212 | %{"show-in-nav" => false, "show-in-post" => true}
213 | ],
214 | "filter" => [
215 | %{"category" => ["_rsvp"]}
216 | ],
217 | "unfilter" => [
218 | %{"client-id" => ["https://micropub.rocks/"]}
219 | ]
220 | }
221 | })
222 |
223 | Memento.Query.write(%Sweetroll2.Post{
224 | url: "/photos",
225 | status: :published,
226 | type: "x-dynamic-feed",
227 | props: %{
228 | "name" => "Photos",
229 | "feed-settings" => [
230 | %{
231 | "order-in-nav" => 10,
232 | "show-in-nav" => true,
233 | "show-in-post" => true,
234 | "layout" => "gallery"
235 | }
236 | ],
237 | "filter" => [
238 | %{"category" => ["_photos"]}
239 | ],
240 | "unfilter" => [
241 | %{"client-id" => ["https://micropub.rocks/"]}
242 | ]
243 | }
244 | })
245 |
246 | Memento.Query.write(%Sweetroll2.Post{
247 | url: "/kb",
248 | status: :published,
249 | type: "x-dynamic-feed",
250 | props: %{
251 | "name" => "KB",
252 | "feed-settings" => [
253 | %{
254 | "order-in-nav" => 20,
255 | "show-in-nav" => true,
256 | "show-in-post" => true,
257 | "layout" => "knowledgebase",
258 | "limit" => 0,
259 | "sort" => "name"
260 | }
261 | ],
262 | "filter" => [
263 | %{"category" => ["_kb"]}
264 | ],
265 | "unfilter" => [
266 | %{"client-id" => ["https://micropub.rocks/"]}
267 | ]
268 | }
269 | })
270 |
271 | Memento.Query.write(%Sweetroll2.Post{
272 | url: "/bookmarks",
273 | status: :published,
274 | type: "x-dynamic-feed",
275 | props: %{
276 | "name" => "Bookmarks",
277 | "feed-settings" => [
278 | %{"order-in-nav" => 30, "show-in-nav" => true, "show-in-post" => true}
279 | ],
280 | "filter" => [
281 | %{"category" => ["_bookmarks"]}
282 | ],
283 | "unfilter" => [
284 | %{"client-id" => ["https://micropub.rocks/"]}
285 | ]
286 | }
287 | })
288 |
289 | Memento.Query.write(%Sweetroll2.Post{
290 | url: "/inbox",
291 | status: :private,
292 | type: "x-inbox-feed",
293 | props: %{
294 | "name" => "Inbox",
295 | "feed-settings" => [
296 | %{"order-in-nav" => 30, "show-in-nav" => true, "show-in-post" => true}
297 | ],
298 | "filter" => [
299 | %{}
300 | ],
301 | "unfilter" => [
302 | %{}
303 | ]
304 | }
305 | })
306 |
307 | now = DateTime.utc_now()
308 |
309 | Memento.Query.write(%Sweetroll2.Post{
310 | url: "/notes/#{to_string(now) |> String.replace(" ", "-")}",
311 | status: :published,
312 | type: "entry",
313 | published: now,
314 | props: %{
315 | "category" => ["_notes"],
316 | "content" => [
317 | %{
318 | "markdown" =>
319 | "Welcome to *Sweetroll2*! This is an example note. You can delete it and write your own instead :)"
320 | }
321 | ]
322 | }
323 | })
324 | end)
325 | end
326 |
327 | defmodule Scheduler do
328 | use Quantum.Scheduler, otp_app: :sweetroll2
329 | end
330 | end
331 |
--------------------------------------------------------------------------------
/lib/sweetroll2/auth/access_token.ex:
--------------------------------------------------------------------------------
1 | defmodule Sweetroll2.Auth.AccessToken do
2 | @moduledoc """
3 | A Mnesia table for storing IndieAuth (OAuth) access-tokens.
4 | """
5 |
6 | @expiration 31_557_600
7 |
8 | require Logger
9 |
10 | use Memento.Table,
11 | attributes: [:token, :used_tempcode, :grant_date, :client_id, :scopes, :revoked]
12 |
13 | def create(%Sweetroll2.Auth.TempCode{code: tempcode, client_id: client_id, scopes: scopes}) do
14 | token = "T-" <> Nanoid.Secure.generate()
15 |
16 | Memento.transaction!(fn ->
17 | now = DateTime.utc_now()
18 |
19 | Memento.Query.write(%__MODULE__{
20 | token: token,
21 | used_tempcode: tempcode,
22 | grant_date: now,
23 | client_id: client_id,
24 | scopes: scopes,
25 | revoked: false
26 | })
27 | end)
28 |
29 | token
30 | end
31 |
32 | def revoke(token) when is_binary(token) do
33 | Memento.transaction!(fn ->
34 | accesstoken = Memento.Query.read(__MODULE__, token)
35 | Memento.Query.write(%{accesstoken | revoked: true})
36 | end)
37 | end
38 |
39 | def get_if_valid(token) when is_binary(token) do
40 | Memento.transaction!(fn ->
41 | accesstoken = Memento.Query.read(__MODULE__, token)
42 |
43 | valid =
44 | !accesstoken.revoked &&
45 | DateTime.compare(
46 | DateTime.utc_now(),
47 | DateTime.add(accesstoken.grant_date, @expiration, :second)
48 | ) == :lt
49 |
50 | if valid do
51 | accesstoken
52 | else
53 | nil
54 | end
55 | end)
56 | rescue
57 | err ->
58 | Logger.warn("token not valid", event: %{access_token_not_valid: %{error: inspect(err)}})
59 | nil
60 | end
61 |
62 | def get_client_id(token) when is_binary(token) do
63 | Memento.transaction!(fn ->
64 | accesstoken = Memento.Query.read(__MODULE__, token)
65 |
66 | if !is_nil(accesstoken), do: accesstoken.client_id, else: nil
67 | end)
68 | end
69 | end
70 |
--------------------------------------------------------------------------------
/lib/sweetroll2/auth/bearer.ex:
--------------------------------------------------------------------------------
1 | defmodule Sweetroll2.Auth.Bearer do
2 | @moduledoc """
3 | Common entry point for checking tokens, which can be both
4 | cookie sessions and OAuth access tokens.
5 | """
6 |
7 | require Logger
8 | alias Sweetroll2.Auth.{Session, AccessToken}
9 |
10 | def is_allowed?(token, scope \\ nil)
11 |
12 | def is_allowed?("C-" <> _ = token, _) do
13 | # Cookie sessions can do anything
14 | !is_nil(Session.get_if_valid(token))
15 | end
16 |
17 | def is_allowed?("T-" <> _ = token, scope) do
18 | accesstoken = AccessToken.get_if_valid(token)
19 |
20 | if accesstoken do
21 | result = !is_nil(accesstoken) and (is_nil(scope) or to_string(scope) in accesstoken.scopes)
22 |
23 | Logger.info("checking scope #{to_string(scope)}",
24 | event: %{
25 | access_token_scope_check: %{scope: scope, allowed: accesstoken.scopes, result: result}
26 | }
27 | )
28 |
29 | result
30 | else
31 | Logger.info("no access token", event: %{access_token_not_found: %{}})
32 | false
33 | end
34 | end
35 | end
36 |
--------------------------------------------------------------------------------
/lib/sweetroll2/auth/serve.ex:
--------------------------------------------------------------------------------
1 | defmodule Sweetroll2.Auth.Serve do
2 | require Logger
3 | import ExEarlyRet
4 | alias Sweetroll2.{Auth.AccessToken, Auth.Session, Auth.TempCode, Render}
5 | use Plug.Router
6 |
7 | plug :match
8 | plug :dispatch
9 |
10 | get "/login" do
11 | {:safe, body} =
12 | Render.page_login(
13 | err: nil,
14 | redir: conn.query_params["redirect_uri"] || "/",
15 | csp_nonce: :crypto.strong_rand_bytes(24) |> Base.url_encode64()
16 | )
17 |
18 | resp(conn, :ok, body)
19 | end
20 |
21 | post "/login" do
22 | # TODO: use hashcash to prevent spam logins from starting slow argon2 calc
23 | # TODO: 2FA
24 | if conn.body_params["pwd"] &&
25 | Argon2.verify_pass(conn.body_params["pwd"], System.get_env("SR2_PASSWORD_HASH")) do
26 | token = Session.create(user_agent: conn.req_headers[:"user-agent"])
27 |
28 | conn
29 | |> Session.set_cookie(token)
30 | |> put_resp_header("Location", conn.body_params["redirect_uri"] || "/")
31 | |> resp(:found, "")
32 | else
33 | {:safe, body} =
34 | Render.page_login(
35 | err: "No correct password provided",
36 | redir: nil,
37 | csp_nonce: :crypto.strong_rand_bytes(24) |> Base.url_encode64()
38 | )
39 |
40 | resp(conn, :ok, body)
41 | end
42 | end
43 |
44 | post "/logout" do
45 | token = Session.current_token(conn)
46 |
47 | if token do
48 | Session.revoke(token)
49 | end
50 |
51 | conn
52 | |> Session.drop_cookie()
53 | |> put_resp_header("Location", (conn.body_params && conn.body_params["redirect_uri"]) || "/")
54 | |> resp(:found, "")
55 | end
56 |
57 | # https://indieauth.spec.indieweb.org/#authorization-endpoint-0
58 |
59 | get "/authorize" do
60 | if is_nil(Session.current_token(conn)) do
61 | conn
62 | |> put_resp_header(
63 | "Location",
64 | "/__auth__/login?#{
65 | URI.encode_query(%{"redirect_uri" => "/__auth__/authorize?" <> conn.query_string})
66 | }"
67 | )
68 | |> resp(:found, "")
69 | else
70 | Logger.info(
71 | "authorize request",
72 | event: %{
73 | authorize_request: %{params: conn.query_params, our_home: Process.get(:our_home_url)}
74 | }
75 | )
76 |
77 | {status, err} =
78 | cond do
79 | me_param(conn) != Process.get(:our_home_url) ->
80 | {:bad_request, "Wrong host"}
81 |
82 | is_nil(conn.query_params["redirect_uri"]) or
83 | !String.starts_with?(conn.query_params["redirect_uri"], "http") ->
84 | {:bad_request, "No valid redirect URI"}
85 |
86 | is_nil(conn.query_params["client_id"]) ->
87 | {:bad_request, "No client ID"}
88 |
89 | conn.query_params["response_type"] != "id" and
90 | conn.query_params["response_type"] != "code" ->
91 | {:bad_request, "Unknown response type"}
92 |
93 | true ->
94 | {:ok, nil}
95 | end
96 |
97 | # TODO fetch client_id
98 | {:safe, body} =
99 | Render.page_authorize(
100 | err: err,
101 | query: conn.query_params,
102 | scopes: split_scopes(conn.query_params["scope"]),
103 | csp_nonce: :crypto.strong_rand_bytes(24) |> Base.url_encode64()
104 | )
105 |
106 | resp(conn, status, body)
107 | end
108 | end
109 |
110 | post "/allow" do
111 | if is_nil(Session.current_token(conn)) do
112 | resp(conn, :unauthorized, "WTF")
113 | else
114 | code =
115 | TempCode.create(
116 | session: Session.current_token(conn),
117 | client_id: conn.body_params["client_id"],
118 | redirect_uri: conn.body_params["redirect_uri"],
119 | scopes: split_scopes(conn.body_params["scope"])
120 | )
121 |
122 | orig_uri = URI.parse(conn.body_params["redirect_uri"])
123 |
124 | new_query =
125 | URI.decode_query(orig_uri.query || "")
126 | |> Map.put("code", code)
127 | |> Map.put("state", conn.body_params["state"])
128 |
129 | new_uri = %{orig_uri | query: URI.encode_query(new_query)}
130 |
131 | conn
132 | |> put_resp_header("Location", URI.to_string(new_uri))
133 | |> resp(:found, "")
134 | end
135 | end
136 |
137 | # https://indieauth.spec.indieweb.org/#authorization-code-verification
138 |
139 | post "/authorize" do
140 | {status, body} =
141 | earlyret do
142 | redir = conn.body_params["redirect_uri"]
143 |
144 | ret_if is_nil(redir) or !String.starts_with?(redir, "http"),
145 | do: {:bad_request, "No valid redirect URI"}
146 |
147 | clid = conn.body_params["client_id"]
148 |
149 | ret_if is_nil(clid), do: {:bad_request, "No client ID"}
150 |
151 | ret_if is_nil(conn.body_params["code"]), do: {:bad_request, "No code"}
152 |
153 | tempcode = TempCode.get_if_valid(conn.body_params["code"])
154 |
155 | ret_if is_nil(tempcode), do: {:bad_request, "Code is not valid"}
156 |
157 | ret_if tempcode.redirect_uri != redir,
158 | do:
159 | {:bad_request,
160 | "redirect_uri does not match: '#{redir}' vs '#{tempcode.redirect_uri}'"}
161 |
162 | ret_if tempcode.client_id != clid,
163 | do: {:bad_request, "client_id does not match: '#{clid}' vs '#{tempcode.client_id}'"}
164 |
165 | TempCode.use(tempcode.code)
166 |
167 | Jason.encode(%{
168 | me: Process.get(:our_home_url)
169 | })
170 | end
171 |
172 | if status == :bad_request,
173 | do: Logger.error(body, event: %{authorization_failed: %{reason: body}})
174 |
175 | conn
176 | |> put_resp_content_type(if status == :ok, do: "application/json", else: "text/plain")
177 | |> resp(status, body)
178 | end
179 |
180 | # https://indieauth.spec.indieweb.org/#token-endpoint-0
181 |
182 | post "/token" do
183 | {status, body} =
184 | earlyret do
185 | ret_if conn.body_params["grant_type"] != "authorization_code",
186 | do: {:bad_request, "No/unknown grant type"}
187 |
188 | ret_if me_param(conn) != Process.get(:our_home_url), do: {:bad_request, "Wrong host"}
189 |
190 | redir = conn.body_params["redirect_uri"]
191 |
192 | ret_if is_nil(redir) or !String.starts_with?(redir, "http"),
193 | do: {:bad_request, "No valid redirect URI"}
194 |
195 | clid = conn.body_params["client_id"]
196 |
197 | ret_if is_nil(clid), do: {:bad_request, "No client ID"}
198 |
199 | ret_if is_nil(conn.body_params["code"]), do: {:bad_request, "No code"}
200 |
201 | tempcode = TempCode.get_if_valid(conn.body_params["code"])
202 |
203 | ret_if is_nil(tempcode), do: {:bad_request, "Code is not valid"}
204 |
205 | ret_if tempcode.redirect_uri != redir,
206 | do:
207 | {:bad_request,
208 | "redirect_uri does not match: '#{redir}' vs '#{tempcode.redirect_uri}'"}
209 |
210 | ret_if tempcode.client_id != clid,
211 | do: {:bad_request, "client_id does not match: '#{clid}' vs '#{tempcode.client_id}'"}
212 |
213 | TempCode.use(tempcode.code)
214 | token = AccessToken.create(tempcode)
215 |
216 | Jason.encode(%{
217 | token_type: "Bearer",
218 | access_token: token,
219 | me: Process.get(:our_home_url),
220 | scope: Enum.join(tempcode.scopes, " ")
221 | })
222 | end
223 |
224 | if status == :bad_request,
225 | do: Logger.error(body, event: %{token_grant_failed: %{reason: body}})
226 |
227 | conn
228 | |> put_resp_content_type(if status == :ok, do: "application/json", else: "text/plain")
229 | |> resp(status, body)
230 | end
231 |
232 | defp split_scopes(scope) do
233 | (scope || "create")
234 | |> String.slice(0..420)
235 | |> String.replace("post", "create")
236 | |> String.split()
237 | end
238 |
239 | defp me_param(conn) do
240 | (conn.query_params["me"] || conn.body_params["me"] || "")
241 | |> String.replace_trailing("/", "")
242 | end
243 | end
244 |
--------------------------------------------------------------------------------
/lib/sweetroll2/auth/session.ex:
--------------------------------------------------------------------------------
1 | defmodule Sweetroll2.Auth.Session do
2 | @moduledoc """
3 | A Mnesia table for storing site sessions (cookie based).
4 | And a Plug middleware for supporting sessions.
5 |
6 | We don't use Plug.Session because there's no need for backend flexibility
7 | and `put` being able to generate new sessions sounds weird.
8 | We only ever want to generate sessions in the login page.
9 |
10 | We do implement :plug_session stuff though, so e.g. Plug.CSRFProtection works.
11 | """
12 | @behaviour Plug
13 |
14 | @expiration 31_557_600
15 | @cookie_opts [max_age: @expiration, http_only: true, extra: "SameSite=Strict"]
16 | # NOTE: Plug automatically sets Secure
17 |
18 | require Logger
19 | alias Plug.Conn
20 |
21 | use Memento.Table,
22 | attributes: [:token, :revoked, :data, :start_date, :start_user_agent, :last_access]
23 |
24 | def create(user_agent: user_agent) do
25 | token = "C-" <> Nanoid.Secure.generate()
26 |
27 | Memento.transaction!(fn ->
28 | now = DateTime.utc_now()
29 |
30 | Memento.Query.write(%__MODULE__{
31 | token: token,
32 | revoked: false,
33 | data: %{},
34 | start_date: now,
35 | start_user_agent: user_agent,
36 | last_access: now
37 | })
38 | end)
39 |
40 | token
41 | end
42 |
43 | def revoke(token) when is_binary(token) do
44 | Memento.transaction!(fn ->
45 | session = Memento.Query.read(__MODULE__, token)
46 |
47 | if session do
48 | Memento.Query.write(%{session | revoked: true})
49 | end
50 | end)
51 | end
52 |
53 | def get_if_valid(token) when is_binary(token) do
54 | Memento.transaction!(fn ->
55 | session = Memento.Query.read(__MODULE__, token)
56 |
57 | valid =
58 | !session.revoked &&
59 | DateTime.compare(
60 | DateTime.utc_now(),
61 | DateTime.add(session.start_date, @expiration, :second)
62 | ) == :lt
63 |
64 | if valid do
65 | session = %{session | last_access: DateTime.utc_now()}
66 | Memento.Query.write(session)
67 | session
68 | else
69 | nil
70 | end
71 | end)
72 | rescue
73 | err ->
74 | Logger.warn("session #{token} not valid: #{inspect(err)}",
75 | event: %{session_token_not_valid: %{token: token, error: inspect(err)}}
76 | )
77 |
78 | nil
79 | end
80 |
81 | def cookie_key(%{scheme: :https}), do: "__Host-wheeeee"
82 | def cookie_key(_), do: "wheeeee"
83 |
84 | def set_cookie(conn, token) when is_binary(token) do
85 | Conn.put_resp_cookie(conn, cookie_key(conn), token, @cookie_opts)
86 | end
87 |
88 | def drop_cookie(conn) do
89 | Conn.delete_resp_cookie(conn, cookie_key(conn), @cookie_opts)
90 | end
91 |
92 | @doc "Gets the current session token after it was validated (don't forget to fetch_session!)"
93 | def current_token(conn) do
94 | conn.private[:sr2_session_token]
95 | end
96 |
97 | @impl true
98 | def init(_opts), do: %{}
99 |
100 | @impl true
101 | def call(conn, _) do
102 | Conn.put_private(conn, :plug_session_fetch, fn conn ->
103 | conn = Conn.fetch_cookies(conn)
104 |
105 | if (token = conn.cookies[cookie_key(conn)]) && token != "" do
106 | if session = get_if_valid(token) do
107 | conn
108 | |> Conn.put_private(:sr2_session_token, token)
109 | |> Conn.put_private(:plug_session, session.data)
110 | |> Conn.register_before_send(fn conn ->
111 | if conn.private[:plug_session_info] == :write do
112 | Memento.transaction!(fn ->
113 | Memento.Query.write(%{session | data: conn.private[:plug_session]})
114 | end)
115 | end
116 |
117 | # we don't care about other actions
118 | conn
119 | end)
120 | |> Conn.put_req_header("authorization", "Bearer " <> token)
121 | # header for micropub middleware (will be reverified there, boo inefficiency)
122 | |> Conn.put_resp_header("X-Frame-Options", "deny")
123 | |> Conn.put_resp_header("Content-Security-Policy", "frame-ancestors 'none'")
124 |
125 | # CSP combines with the one in the page
126 | else
127 | drop_cookie(conn)
128 | |> Conn.put_private(:plug_session, %{})
129 | end
130 | else
131 | conn
132 | |> Conn.put_private(:plug_session, %{})
133 | end
134 | |> Conn.put_private(:plug_session_fetch, :done)
135 | end)
136 | end
137 | end
138 |
--------------------------------------------------------------------------------
/lib/sweetroll2/auth/temp_code.ex:
--------------------------------------------------------------------------------
1 | defmodule Sweetroll2.Auth.TempCode do
2 | @moduledoc """
3 | A Mnesia table for storing IndieAuth (OAuth) authorization codes.
4 | """
5 |
6 | @expiration 600
7 |
8 | require Logger
9 |
10 | use Memento.Table,
11 | attributes: [:code, :used_session, :grant_date, :client_id, :redirect_uri, :scopes, :used]
12 |
13 | def create(session: session, client_id: client_id, redirect_uri: redirect_uri, scopes: scopes)
14 | when is_binary(session) and is_binary(client_id) and is_binary(redirect_uri) and
15 | is_list(scopes) do
16 | code = Nanoid.Secure.generate()
17 |
18 | Memento.transaction!(fn ->
19 | now = DateTime.utc_now()
20 |
21 | Memento.Query.write(%__MODULE__{
22 | code: code,
23 | used_session: session,
24 | grant_date: now,
25 | client_id: client_id,
26 | redirect_uri: redirect_uri,
27 | scopes: scopes,
28 | used: false
29 | })
30 | end)
31 |
32 | code
33 | end
34 |
35 | def use(code) when is_binary(code) do
36 | Memento.transaction!(fn ->
37 | tempcode = Memento.Query.read(__MODULE__, code)
38 | Memento.Query.write(%{tempcode | used: true})
39 | end)
40 | end
41 |
42 | def get_if_valid(code) when is_binary(code) do
43 | Memento.transaction!(fn ->
44 | tempcode = Memento.Query.read(__MODULE__, code)
45 |
46 | valid =
47 | !tempcode.used &&
48 | DateTime.compare(
49 | DateTime.utc_now(),
50 | DateTime.add(tempcode.grant_date, @expiration, :second)
51 | ) == :lt
52 |
53 | if valid do
54 | tempcode
55 | else
56 | nil
57 | end
58 | end)
59 | rescue
60 | err ->
61 | Logger.warn("tempcode not valid",
62 | event: %{temp_code_not_valid: %{code: code, error: inspect(err)}}
63 | )
64 |
65 | nil
66 | end
67 | end
68 |
--------------------------------------------------------------------------------
/lib/sweetroll2/convert.ex:
--------------------------------------------------------------------------------
1 | defmodule Sweetroll2.Convert do
2 | require Logger
3 |
4 | def as_one(x) when is_list(x), do: List.first(x)
5 | def as_one(x), do: x
6 |
7 | def as_many(nil), do: []
8 | def as_many(xs) when is_list(xs), do: xs
9 | def as_many(x), do: [x]
10 |
11 | def simplify(map) when is_map(map) do
12 | type = map[:type] || map["type"]
13 | props = map[:properties] || map["properties"] || map[:props]
14 |
15 | if type && props && is_map(props) do
16 | props
17 | |> Enum.map(&simplify/1)
18 | |> Enum.into(%{})
19 | |> Map.merge(%{
20 | "type" => String.replace_prefix(List.first(type || []), "h-", "")
21 | })
22 | else
23 | map
24 | |> Enum.map(&simplify/1)
25 | |> Enum.into(%{})
26 | end
27 | end
28 |
29 | def simplify({k, [v]}), do: {k, simplify(v)}
30 | def simplify({k, vs}) when is_list(vs), do: {k, Enum.map(vs, &simplify/1)}
31 | def simplify({k, v}), do: {k, simplify(v)}
32 | def simplify(x), do: x
33 |
34 | def find_mf_with_url(%{items: items}, url) do
35 | Enum.find(items, fn item ->
36 | url in (item["properties"]["url"] || [])
37 | end) || List.first(items)
38 | end
39 |
40 | def resp2log(%{status: status, headers: headers, body: body}),
41 | do: %{
42 | status: status,
43 | headers: Enum.map(headers, fn {k, v} -> "#{k}: #{v}" end),
44 | body: to_string(body)
45 | }
46 | end
47 |
--------------------------------------------------------------------------------
/lib/sweetroll2/events.ex:
--------------------------------------------------------------------------------
1 | defmodule Sweetroll2.Events do
2 | @moduledoc """
3 | A GenServer for automatic event handling.
4 | """
5 |
6 | @debounce_ms 2000
7 |
8 | require Logger
9 | alias Sweetroll2.{Post, Job}
10 | use EventBus.EventSource
11 | use GenServer
12 |
13 | def start_link(_), do: GenServer.start_link(__MODULE__, :ok, name: __MODULE__)
14 |
15 | @impl true
16 | def init(:ok) do
17 | EventBus.subscribe({__MODULE__, ["url_updated"]})
18 | {:ok, %{}}
19 | end
20 |
21 | @impl true
22 | def handle_cast({:url_updated, _id} = event_shadow, state) do
23 | %{data: %SSE.Chunk{data: url}} = EventBus.fetch_event(event_shadow)
24 |
25 | Job.Generate.remove_generated(url)
26 | Post.Page.clear_cached_template(url: url)
27 |
28 | Que.add(Job.Generate,
29 | urls: [url],
30 | next_jobs: [
31 | {Job.NotifyWebsub, home: Sweetroll2.canonical_home_url(), url: url},
32 | {Job.SendWebmentions, url: url, our_home_url: Sweetroll2.canonical_home_url()}
33 | ]
34 | )
35 |
36 | EventBus.mark_as_completed({__MODULE__, event_shadow})
37 | {:noreply, state}
38 | end
39 |
40 | @impl true
41 | def handle_cast({:notify_url_for_real, url}, state) do
42 | Logger.debug("finished debounce for url '#{url}', notifying event bus",
43 | event: %{debounce_finished: %{url: url}}
44 | )
45 |
46 | EventSource.notify(%{topic: :url_updated}, do: %SSE.Chunk{data: url})
47 | {:noreply, Map.delete(state, url)}
48 | end
49 |
50 | @impl true
51 | def handle_cast({:notify_url_req, url}, state) do
52 | if Map.has_key?(state, url) do
53 | Debounce.apply(state[url])
54 |
55 | Logger.debug("reset debounce for url '#{url}': #{inspect(state[url])}",
56 | event: %{debounce_reset: %{url: url}}
57 | )
58 |
59 | {:noreply, state}
60 | else
61 | {:ok, pid} =
62 | Debounce.start_link(
63 | {GenServer, :cast, [__MODULE__, {:notify_url_for_real, url}]},
64 | @debounce_ms
65 | )
66 |
67 | Debounce.apply(pid)
68 |
69 | Logger.debug("started debounce for url '#{url}': #{inspect(pid)}",
70 | event: %{debounce_started: %{url: url}}
71 | )
72 |
73 | {:noreply, Map.put(state, url, pid)}
74 | end
75 | end
76 |
77 | @doc "callback for EventBus"
78 | def process(event_shadow) do
79 | GenServer.cast(__MODULE__, event_shadow)
80 | :ok
81 | end
82 |
83 | def notify_urls_updated([]) do
84 | end
85 |
86 | def notify_urls_updated(urls) when is_list(urls) do
87 | for url <- urls do
88 | GenServer.cast(__MODULE__, {:notify_url_req, url})
89 | aff = affected_urls(url)
90 |
91 | Logger.info("updating affected urls",
92 | event: %{affected_discovered: %{url: url, affected: aff}}
93 | )
94 |
95 | notify_urls_updated(aff)
96 | end
97 | end
98 |
99 | defp affected_urls(url) do
100 | posts = %Post.DbAsMap{}
101 |
102 | if is_nil(posts[url]) do
103 | []
104 | else
105 | local_urls = Post.urls_local()
106 |
107 | # TODO: use a previous copy of the post to find feeds that formerly contained it!!
108 |
109 | aff_feeds =
110 | Post.filter_type(local_urls, posts, ["x-dynamic-feed", "x-dynamic-tag-feed"])
111 | |> Post.Generative.Tag.feeds_get_with_tags(posts: posts, local_urls: local_urls)
112 | |> Enum.filter(&Post.Generative.Feed.in_feed?(posts[url], &1))
113 |
114 | aff_page_urls =
115 | Post.filter_type(local_urls, posts, "x-custom-page")
116 | |> Enum.filter(fn page_url ->
117 | Enum.any?(Post.Page.used_feeds(posts[page_url]), &(&1 == url))
118 | end)
119 |
120 | Enum.flat_map(
121 | aff_feeds,
122 | &[&1.url | Post.Generative.child_urls_rec(&1, posts, local_urls)]
123 | ) ++ aff_page_urls ++ Post.Generative.child_urls_rec(posts[url], posts, local_urls)
124 | end
125 | end
126 | end
127 |
--------------------------------------------------------------------------------
/lib/sweetroll2/http_client.ex:
--------------------------------------------------------------------------------
1 | defmodule Sweetroll2.HttpClient do
2 | use Tesla
3 |
4 | adapter(Tesla.Adapter.Hackney,
5 | recv_timeout: 10_000,
6 | ssl_options: [
7 | verify: :verify_peer,
8 | verify_fun: &:ssl_verify_hostname.verify_fun/3,
9 | depth: 69,
10 | cacertfile: default_cert_bundle()
11 | ]
12 | )
13 |
14 | plug Tesla.Middleware.Timeout, timeout: 11_000
15 | plug Tesla.Middleware.Retry, max_retries: 3
16 | plug Tesla.Middleware.FollowRedirects, max_redirects: 3
17 | plug Tesla.Middleware.Compression, format: "gzip"
18 | plug Tesla.Middleware.Headers, [{"user-agent", "Sweetroll2 (Tesla/hackney)"}]
19 |
20 | plug Tesla.Middleware.FormUrlencoded,
21 | encode: &Plug.Conn.Query.encode/1,
22 | decode: &Plug.Conn.Query.decode/1
23 |
24 | defp default_cert_bundle do
25 | cond do
26 | File.exists?("/etc/ssl/cert.pem") -> "/etc/ssl/cert.pem"
27 | File.exists?("/etc/pki/tls/cert.pem") -> "/etc/pki/tls/cert.pem"
28 | File.exists?("/usr/lib/ssl/cert.pem") -> "/usr/lib/ssl/cert.pem"
29 | File.exists?("/etc/ssl/certs/ca-certificates.crt") -> "/etc/ssl/certs/ca-certificates.crt"
30 | Code.ensure_loaded(:certifi) == {:module, :certifi} -> apply(:certifi, :cacertfile, [])
31 | true -> nil
32 | end
33 | end
34 | end
35 |
--------------------------------------------------------------------------------
/lib/sweetroll2/job/backup.ex:
--------------------------------------------------------------------------------
1 | defmodule Sweetroll2.Job.Backup do
2 | @hook_path "priv/hooks/backup"
3 |
4 | use Que.Worker
5 |
6 | def perform(path: path) do
7 | File.mkdir_p!(Path.dirname(path))
8 | :mnesia.backup(String.to_charlist(path))
9 |
10 | if File.exists?(@hook_path) do
11 | System.cmd("sh", [@hook_path, path])
12 | end
13 | end
14 |
15 | def enqueue() do
16 | Que.add(__MODULE__, path: "priv/backup/sr2-#{Mix.env()}.bak")
17 | end
18 | end
19 |
--------------------------------------------------------------------------------
/lib/sweetroll2/job/clear_jobs.ex:
--------------------------------------------------------------------------------
1 | defmodule Sweetroll2.Job.ClearJobs do
2 | @moduledoc """
3 | Que job for removing old completed Que jobs. So meta.
4 | """
5 |
6 | require Logger
7 | alias Que.Persistence.Mnesia.DB.Jobs
8 | use Que.Worker
9 |
10 | def perform(keep: n) when is_integer(n) do
11 | Timber.add_context(que: %{job_id: Logger.metadata()[:job_id]})
12 |
13 | js = Jobs.completed_jobs()
14 | old_len = length(js)
15 |
16 | if old_len > n do
17 | Enum.slice(js, 0, old_len - n) |> Enum.each(&Jobs.delete_job/1)
18 |
19 | new_len = length(Jobs.completed_jobs())
20 |
21 | Logger.info("cleaned jobs",
22 | event: %{deleted_old_jobs: %{old_len: old_len, new_len: new_len}}
23 | )
24 | end
25 | end
26 |
27 | def enqueue(n \\ 69) when is_integer(n) do
28 | Que.add(__MODULE__, keep: n)
29 | end
30 | end
31 |
--------------------------------------------------------------------------------
/lib/sweetroll2/job/compress.ex:
--------------------------------------------------------------------------------
1 | defmodule Sweetroll2.Job.Compress do
2 | require Logger
3 | use Que.Worker, concurrency: 2
4 |
5 | def perform(path: path) do
6 | System.cmd("zopfli", ["--i8", path])
7 | System.cmd("brotli", ["--keep", "--best", "--force", path])
8 | end
9 |
10 | @asset_dir "priv/static"
11 |
12 | def enqueue_assets() do
13 | {:ok, files} = File.ls(@asset_dir)
14 |
15 | for file <- files do
16 | path = Path.join(@asset_dir, file)
17 |
18 | if !File.dir?(path) and !String.ends_with?(path, ".br") and !String.ends_with?(path, ".gz") and
19 | !String.ends_with?(path, ".woff") and !String.ends_with?(path, ".woff2") do
20 | Que.add(Sweetroll2.Job.Compress, path: path)
21 | end
22 |
23 | if (String.ends_with?(path, ".br") or String.ends_with?(path, ".gz")) and
24 | !File.exists?(Path.rootname(path)) do
25 | File.rm(path)
26 | end
27 | end
28 | end
29 |
30 | defmodule AssetWatcher do
31 | require Logger
32 | use GenServer
33 |
34 | @asset_dir "priv/static"
35 |
36 | def start_link(args) do
37 | GenServer.start_link(__MODULE__, args)
38 | end
39 |
40 | def init(args) do
41 | {:ok, watcher_pid} = FileSystem.start_link(args)
42 | FileSystem.subscribe(watcher_pid)
43 | {:ok, %{watcher_pid: watcher_pid}}
44 | end
45 |
46 | def handle_info(
47 | {:file_event, watcher_pid, {path, events}},
48 | %{watcher_pid: watcher_pid} = state
49 | ) do
50 | if !String.ends_with?(path, ".br") and !String.ends_with?(path, ".gz") and
51 | !String.ends_with?(path, ".woff") and !String.ends_with?(path, ".woff2") do
52 | for event <- events do
53 | case event do
54 | :created ->
55 | Logger.info("compressing new asset '#{path}'")
56 | Que.add(Sweetroll2.Job.Compress, path: path)
57 |
58 | :modified ->
59 | Logger.info("compressing modified asset '#{path}'")
60 | Que.add(Sweetroll2.Job.Compress, path: path)
61 |
62 | :deleted ->
63 | Logger.info("deleting compressed versions of asset '#{path}'")
64 | File.rm(path <> ".gz")
65 | File.rm(path <> ".br")
66 |
67 | _ ->
68 | true
69 | end
70 | end
71 |
72 | rel_path = Path.relative_to(to_string(path), Path.absname(@asset_dir))
73 | Logger.info("uncaching rev of '#{rel_path}'")
74 | ConCache.delete(:asset_rev, rel_path)
75 | end
76 |
77 | {:noreply, state}
78 | end
79 |
80 | def handle_info({:file_event, watcher_pid, :stop}, %{watcher_pid: watcher_pid} = state) do
81 | Logger.error("FS watcher stopped")
82 | Process.sleep(10_000)
83 | {:stop, :watcher_stopped, state}
84 | end
85 | end
86 | end
87 |
--------------------------------------------------------------------------------
/lib/sweetroll2/job/fetch.ex:
--------------------------------------------------------------------------------
1 | defmodule Sweetroll2.Job.Fetch do
2 | alias Sweetroll2.{Events, Post, Convert, HttpClient}
3 | require Logger
4 | import ExEarlyRet
5 | use Que.Worker, concurrency: 4
6 |
7 | def href_matches?({_, attrs, _}, url) do
8 | attrs
9 | |> Enum.filter(fn {k, _} -> k == "href" end)
10 | |> Enum.all?(fn {_, v} -> v == url end)
11 | end
12 |
13 | defearlyret fetch(url, check_mention: check_mention) do
14 | u = URI.parse(url)
15 |
16 | ret_if u.scheme != "http" && u.scheme != "https", do: {:non_http_scheme, u.scheme}
17 |
18 | # TODO: check IP address ranges too.. or just ban IP addreses
19 | ret_if u.host == nil || u.host == "localhost", do: {:local_host, u.host}
20 |
21 | resp = HttpClient.get!(url, headers: [{"accept", "text/html"}])
22 |
23 | ret_if resp.status == 410, do: {:gone, resp}
24 |
25 | html = Floki.parse(resp.body)
26 |
27 | ret_if check_mention &&
28 | not Enum.any?(Floki.find(html, "a"), &href_matches?(&1, check_mention)),
29 | do: {:no_mention, check_mention}
30 |
31 | mf =
32 | Microformats2.parse(html, url)
33 | |> Convert.find_mf_with_url(url)
34 | |> Convert.simplify()
35 |
36 | ret_if is_nil(mf), do: {:no_microformat, html}
37 |
38 | {:ok, mf}
39 | end
40 |
41 | def perform(
42 | url: url,
43 | check_mention: check_mention,
44 | save_mention: save_mention,
45 | notify_update: notify_update
46 | ) do
47 | Timber.add_context(que: %{job_id: Logger.metadata()[:job_id]})
48 |
49 | case fetch(url, check_mention: check_mention) do
50 | {:ok, mf} ->
51 | if author = Convert.as_one(mf["author"]) do
52 | author_url =
53 | cond do
54 | is_map(author) -> Convert.as_one(author["url"])
55 | is_binary(author) -> author
56 | true -> nil
57 | end
58 |
59 | if !is_nil(author_url) and is_binary(author_url) and
60 | String.starts_with?(author_url, "http") do
61 | Que.add(__MODULE__,
62 | url: author_url,
63 | check_mention: nil,
64 | save_mention: nil,
65 | notify_update: notify_update
66 | )
67 | end
68 | end
69 |
70 | Memento.transaction!(fn ->
71 | post = Post.from_map(mf)
72 |
73 | purl =
74 | cond do
75 | post.url <> "/" == url -> post.url <> "/"
76 | String.trim_trailing(post.url, "/") == url -> String.trim_trailing(post.url, "/")
77 | true -> post.url
78 | end
79 |
80 | post = %{post | url: purl}
81 |
82 | if post.url != url,
83 | do:
84 | Logger.warn("URL mismatch '#{post.url}' vs #{url}",
85 | event: %{fetch_url_mismatch: %{post: post.url, requested: url}}
86 | )
87 |
88 | Memento.Query.write(%{post | url: url, status: :fetched})
89 |
90 | if !is_nil(save_mention) do
91 | post = Memento.Query.read(Post, save_mention)
92 |
93 | props =
94 | Map.update(post.props, "comment", [url], fn comm ->
95 | if url in comm, do: comm, else: comm ++ [url]
96 | end)
97 |
98 | Memento.Query.write(%{post | props: props})
99 | end
100 | end)
101 |
102 | {:gone, _} ->
103 | Memento.transaction!(fn ->
104 | post = Memento.Query.read(Post, url)
105 |
106 | if post do
107 | Memento.Query.write(%{post | deleted: true})
108 | end
109 |
110 | if !is_nil(save_mention) do
111 | post = Memento.Query.read(Post, save_mention)
112 |
113 | props =
114 | Map.update(post.props, "comment", [], fn comm ->
115 | Enum.filter(comm, &(&1 != url))
116 | end)
117 |
118 | Memento.Query.write(%{post | props: props})
119 | end
120 | end)
121 |
122 | {:no_mention, _} ->
123 | if !is_nil(save_mention) do
124 | Memento.transaction!(fn ->
125 | post = Memento.Query.read(Post, save_mention)
126 |
127 | props =
128 | Map.update(post.props, "comment", [], fn comm ->
129 | Enum.filter(comm, &(&1 != url))
130 | end)
131 |
132 | Memento.Query.write(%{post | props: props})
133 | end)
134 | end
135 | end
136 |
137 | Events.notify_urls_updated(notify_update)
138 | end
139 | end
140 |
--------------------------------------------------------------------------------
/lib/sweetroll2/job/generate.ex:
--------------------------------------------------------------------------------
1 | defmodule Sweetroll2.Job.Generate do
2 | @concurrency 8
3 | @default_dir "out"
4 |
5 | require Logger
6 | alias Sweetroll2.{Post, Render, Job.Compress}
7 | use Que.Worker
8 |
9 | def dir, do: System.get_env("SR2_STATIC_GEN_OUT_DIR") || @default_dir
10 |
11 | def can_generate(url, posts, local_urls) when is_map(posts) do
12 | cond do
13 | !String.starts_with?(url, "/") -> :nonlocal
14 | url not in local_urls -> :nonexistent
15 | Post.Generative.lookup(url, posts, local_urls).status != :published -> :nonpublic
16 | true -> :ok
17 | end
18 | end
19 |
20 | def gen_page(url, posts, local_urls, log_ctx) when is_map(posts) do
21 | Process.flag(:min_heap_size, 131_072)
22 | Process.flag(:min_bin_vheap_size, 131_072)
23 | Process.flag(:priority, :low)
24 | Timber.LocalContext.save(log_ctx)
25 | Timber.add_context(sr2_generator: %{url: url})
26 |
27 | path_dir = Path.join(dir(), url)
28 | File.mkdir_p!(path_dir)
29 | path = Path.join(path_dir, "index.html")
30 | del_flag_path = Path.join(path_dir, "gone")
31 |
32 | post = Post.Generative.lookup(url, posts, local_urls)
33 |
34 | status =
35 | if post.deleted do
36 | File.rm("#{path}.gz")
37 | File.rm("#{path}.br")
38 | File.write!(path, "Gone")
39 | File.write!(del_flag_path, "+")
40 | :gone
41 | else
42 | File.rm(del_flag_path)
43 |
44 | {:safe, data} =
45 | Render.render_post(
46 | post: post,
47 | posts: posts,
48 | # all URLs is fine
49 | local_urls: local_urls,
50 | logged_in: false
51 | )
52 |
53 | # have to convert to compare with existing
54 | data = IO.iodata_to_binary(data)
55 |
56 | if File.read(path) != {:ok, data} do
57 | File.rm("#{path}.gz")
58 | File.rm("#{path}.br")
59 | File.write!(path, data)
60 | :updated
61 | else
62 | :same
63 | end
64 | end
65 |
66 | Logger.info("generated #{url} -> #{path}",
67 | event: %{generate_success: %{url: url, path: path, status: status}}
68 | )
69 |
70 | {status, path}
71 | end
72 |
73 | def gen_allowed_pages(urls, posts) when is_map(posts) do
74 | local_urls = Post.urls_local_public()
75 | urls_dyn = Post.Generative.list_generated_urls(local_urls, posts, local_urls)
76 | all_local_urls = local_urls ++ urls_dyn
77 |
78 | log_ctx = Timber.LocalContext.load()
79 |
80 | if(urls == :all, do: all_local_urls, else: urls)
81 | |> Enum.filter(&(can_generate(&1, posts, all_local_urls) == :ok))
82 | |> Task.async_stream(&gen_page(&1, posts, local_urls, log_ctx), max_concurrency: @concurrency)
83 | |> Enum.group_by(&elem(&1, 0))
84 | end
85 |
86 | def perform(urls: urls, next_jobs: next_jobs) do
87 | Process.flag(:min_heap_size, 524_288)
88 | Process.flag(:min_bin_vheap_size, 524_288)
89 | Process.flag(:priority, :low)
90 | Timber.add_context(que: %{job_id: Logger.metadata()[:job_id]})
91 |
92 | posts = Map.new(Memento.transaction!(fn -> Memento.Query.all(Post) end), &{&1.url, &1})
93 |
94 | result = gen_allowed_pages(urls, posts)
95 |
96 | for {:ok, {status, path}} <- result[:ok] || [] do
97 | if status != :same, do: Que.add(Compress, path: path)
98 | end
99 |
100 | for {mod, args} <- next_jobs do
101 | Que.add(mod, args)
102 | end
103 | end
104 |
105 | def remove_generated(url) do
106 | path_dir = Path.join(dir(), url)
107 | File.rm(Path.join(path_dir, "index.html"))
108 | File.rm(Path.join(path_dir, "index.html.gz"))
109 | File.rm(Path.join(path_dir, "index.html.br"))
110 | File.rm(Path.join(path_dir, "gone"))
111 | end
112 |
113 | def enqueue_all(next_jobs \\ []) do
114 | Que.add(__MODULE__, urls: :all, next_jobs: next_jobs)
115 | end
116 | end
117 |
--------------------------------------------------------------------------------
/lib/sweetroll2/job/notify_websub.ex:
--------------------------------------------------------------------------------
1 | defmodule Sweetroll2.Job.NotifyWebsub do
2 | @default_hub if Mix.env() == :dev,
3 | do: "https://httpbin.org/post",
4 | else: "https://pubsubhubbub.superfeedr.com/"
5 | @default_granary "https://granary.io/url"
6 |
7 | require Logger
8 | use Que.Worker, concurrency: 4
9 | alias Sweetroll2.{Convert, HttpClient}
10 |
11 | def hub, do: System.get_env("SR2_WEBSUB_HUB") || @default_hub
12 | def granary, do: System.get_env("SR2_GRANARY") || @default_granary
13 |
14 | def granary_url(url, output),
15 | do:
16 | granary() <>
17 | "?" <>
18 | URI.encode_query(%{"url" => url, "input" => "html", "output" => output, "hub" => hub()})
19 |
20 | def granary_urls(home: home, url: url),
21 | do: [
22 | {"alternate home", "application/atom+xml", granary_url(home <> "/", "atom")},
23 | {"alternate", "application/atom+xml", granary_url(home <> url, "atom")},
24 | {"alternate", "application/activity+json", granary_url(home <> url, "as2")}
25 | ]
26 |
27 | def perform(home: home, url: url) when is_binary(home) and is_binary(url) do
28 | perform(url: home <> url)
29 |
30 | for {_, _, gurl} <- granary_urls(home: home, url: url) do
31 | Que.add(__MODULE__, url: gurl)
32 | end
33 | end
34 |
35 | def perform(url: url) when is_binary(url) do
36 | Timber.add_context(que: %{job_id: Logger.metadata()[:job_id]})
37 |
38 | resp = HttpClient.post!(hub(), %{"hub.mode": "publish", "hub.url": url})
39 |
40 | if resp.status >= 200 and resp.status < 300 do
41 | Logger.info("", event: %{websub_success: Convert.resp2log(resp)})
42 | else
43 | Logger.info("", event: %{websub_failure: Convert.resp2log(resp)})
44 | end
45 | end
46 | end
47 |
--------------------------------------------------------------------------------
/lib/sweetroll2/job/send_webmentions.ex:
--------------------------------------------------------------------------------
1 | defmodule Sweetroll2.Job.SendWebmentions do
2 | alias Sweetroll2.{Post, Markup, Convert, HttpClient}
3 | require Logger
4 | use Que.Worker, concurrency: 4
5 |
6 | defp parse_http_links([]), do: []
7 | defp parse_http_links(""), do: []
8 |
9 | defp parse_http_links(l) when is_list(l), do: parse_http_links(Enum.join(l, ","))
10 |
11 | defp parse_http_links(s) when is_binary(s) do
12 | case ExHttpLink.parse(s) do
13 | {:ok, links} ->
14 | links
15 |
16 | {:error, err, _, _, _} ->
17 | Logger.warn("could not parse Link header",
18 | event: %{failed_link_header_parse: %{header: s, error: inspect(err)}}
19 | )
20 |
21 | []
22 | end
23 | end
24 |
25 | defp find_http_link(links) when is_list(links) do
26 | {link, _} =
27 | Enum.find(links, {nil, nil}, fn {_, rels} ->
28 | Tuple.to_list(rels)
29 | |> Stream.chunk_every(2)
30 | |> Enum.any?(fn [_, v] ->
31 | String.contains?(" #{v} ", " webmention ")
32 | end)
33 | end)
34 |
35 | link
36 | end
37 |
38 | defp find_html_link(tree) do
39 | el = Floki.find(tree, "a[href][rel~=webmention], link[href][rel~=webmention]")
40 | el && List.first(Floki.attribute(el, "href"))
41 | end
42 |
43 | def discover(%Tesla.Env{url: url, body: body} = resp) do
44 | # TODO: HTML base tag??
45 | link =
46 | find_http_link(parse_http_links(Tesla.get_headers(resp, "link"))) ||
47 | find_html_link(Floki.parse(body))
48 |
49 | cond do
50 | not is_binary(link) ->
51 | nil
52 |
53 | link == "" ->
54 | url
55 |
56 | String.starts_with?(link, "http") ->
57 | link
58 |
59 | true ->
60 | URI.merge(url, link) |> URI.to_string()
61 | end
62 | end
63 |
64 | def perform(source: source, target: target) do
65 | Timber.add_context(que: %{job_id: Logger.metadata()[:job_id]})
66 |
67 | Logger.info("sending", event: %{webmention_start: %{source: source, target: target}})
68 |
69 | endpoint = discover(HttpClient.get!(target))
70 |
71 | Logger.info("endpoint '#{endpoint}' found",
72 | event: %{webmention_endpoint_discovered: %{endpoint: endpoint, for: target}}
73 | )
74 |
75 | resp = HttpClient.post!(endpoint, %{source: source, target: target})
76 |
77 | if resp.status >= 200 and resp.status < 300 do
78 | Logger.info("sent", event: %{webmention_success: Convert.resp2log(resp)})
79 | else
80 | Logger.warn("failed to send", event: %{webmention_failure: Convert.resp2log(resp)})
81 | end
82 | end
83 |
84 | def perform(url: :all, our_home_url: our_home_url) do
85 | Timber.add_context(que: %{job_id: Logger.metadata()[:job_id]})
86 |
87 | for url <- Post.urls_local_public() do
88 | Que.add(__MODULE__, url: url, our_home_url: our_home_url)
89 | end
90 | end
91 |
92 | def perform(url: url, our_home_url: our_home_url) do
93 | Timber.add_context(que: %{job_id: Logger.metadata()[:job_id]})
94 |
95 | full_url = our_home_url <> url
96 | post = %Post.DbAsMap{}[url]
97 |
98 | if post do
99 | for target <-
100 | MapSet.union(
101 | Post.contexts_for(post.props),
102 | Markup.contexts_for(Convert.as_one(post.props["content"]))
103 | ) do
104 | Que.add(__MODULE__, source: full_url, target: target)
105 | end
106 |
107 | # TODO: also move these to a different property so that we don't pester
108 | # no-longer-mentioned sites with our removed mentions too much
109 | for target <- post.props["x-sr2-ctxs-removed"] || [] do
110 | Que.add(__MODULE__, source: full_url, target: target)
111 | end
112 | else
113 | Logger.info("no post for url #{inspect(url)}",
114 | event: %{webmention_no_post: %{url: url, our_home_url: our_home_url}}
115 | )
116 | end
117 | end
118 |
119 | def enqueue_all do
120 | Que.add(__MODULE__, url: :all, our_home_url: Sweetroll2.canonical_home_url())
121 | end
122 | end
123 |
--------------------------------------------------------------------------------
/lib/sweetroll2/markup.ex:
--------------------------------------------------------------------------------
1 | defmodule Sweetroll2.Markup do
2 | @moduledoc """
3 | Handling of post markup, trusted (local) and untrusted (fetched).
4 | """
5 |
6 | import Sweetroll2.Convert
7 | require Logger
8 | alias Phoenix.HTML, as: PH
9 | alias Phoenix.HTML.Format, as: PHF
10 |
11 | def render_tree(tree), do: Floki.raw_html(tree)
12 |
13 | def contexts_for(content) do
14 | Floki.find(content_to_tree(content), "a[href^=http]:not([rel~=nofollow])")
15 | |> Stream.map(&List.first(Floki.attribute(&1, "href")))
16 | |> MapSet.new()
17 | end
18 |
19 | @doc """
20 | Parse a snippet of HTML without html/head/body tags into a tree.
21 | """
22 | def html_part_to_tree(html) do
23 | # html5ever always inserts a skeleton
24 | # the cdata thing is basically before_scrub from the sanitizer
25 | case html |> String.replace(" Floki.parse() do
26 | [{"html", _, [{"head", _, _}, {"body", _, [part]}]}] -> part
27 | [{"html", _, [{"head", _, _}, {"body", _, parts}]}] -> parts
28 | x -> x
29 | end
30 | end
31 |
32 | defp text_to_tree(t), do: t |> PHF.text_to_html() |> PH.safe_to_string() |> html_part_to_tree
33 |
34 | @doc """
35 | Parse a JSON content object into a tree.
36 | """
37 | def content_to_tree(%{"markdown" => md}), do: md |> Earmark.as_html!() |> html_part_to_tree
38 | def content_to_tree(%{markdown: md}), do: md |> Earmark.as_html!() |> html_part_to_tree
39 | def content_to_tree(%{"html" => h}), do: h |> html_part_to_tree
40 | def content_to_tree(%{html: h}), do: h |> html_part_to_tree
41 | def content_to_tree(%{"value" => t}), do: t |> text_to_tree
42 | def content_to_tree(%{value: t}), do: t |> text_to_tree
43 | def content_to_tree(%{"text" => t}), do: t |> text_to_tree
44 | def content_to_tree(%{text: t}), do: t |> text_to_tree
45 | def content_to_tree(x), do: x |> to_string |> text_to_tree
46 | # NOTE: mf2 parser generates :html atom
47 |
48 | @doc """
49 | Sanitize untrusted HTML trees (fetched posts in contexts).
50 | """
51 | def sanitize_tree(tree) do
52 | HtmlSanitizeEx.Traverser.traverse(tree, HtmlSanitizeEx.Scrubber.MarkdownHTML)
53 | rescue
54 | CaseClauseError ->
55 | # https://github.com/rrrene/html_sanitize_ex/issues/28
56 | Logger.warn("html sanitizer error, falling back to escaping", event: %{sanitizer_error: %{}})
57 |
58 | tree |> render_tree |> Plug.HTML.html_escape() |> text_to_tree
59 | end
60 |
61 | @langs RustledSyntect.supported_langs()
62 | |> Stream.flat_map(fn %RustledSyntect.Syntax{file_extensions: exts, name: name} ->
63 | Enum.map(exts, &{&1, name})
64 | end)
65 | |> Stream.concat([{"ruby", "Ruby"}, {"python", "Python"}, {"haskell", "Haskell"}])
66 | |> Map.new()
67 |
68 | @doc """
69 | Apply syntax highlighting to pre>code blocks that have known languages as classes.
70 | """
71 | def highlight_code({"pre", p_attrs, {"code", c_attrs, content}}) do
72 | clss = Enum.concat(klasses(p_attrs), klasses(c_attrs))
73 | hl_lang = Enum.find(clss, nil, &@langs[&1])
74 |
75 | if hl_lang do
76 | Logger.debug("highlighting",
77 | event: %{code_highlighter_found: %{hl: @langs[hl_lang], for: hl_lang}}
78 | )
79 |
80 | # TODO: make RustledSyntect produce a parsed tree
81 | code_tree =
82 | content
83 | |> src_text
84 | |> String.split("\n")
85 | |> RustledSyntect.hilite_stream(lang: @langs[hl_lang])
86 | |> Enum.into([])
87 | |> List.flatten()
88 | |> Enum.join("")
89 | |> html_part_to_tree
90 |
91 | {"pre", add_klass(p_attrs, "syntect"), {"code", c_attrs, code_tree}}
92 | else
93 | Logger.debug("could not highlight", event: %{code_highlighter_not_found: %{classes: clss}})
94 | {"pre", p_attrs, {"code", c_attrs, content}}
95 | end
96 | end
97 |
98 | def highlight_code({"pre", attrs, content}) when is_list(content) do
99 | code_tag =
100 | Enum.find(content, nil, fn
101 | {"code", _, _} -> true
102 | _ -> false
103 | end)
104 |
105 | if code_tag && length(Enum.filter(content, &is_tuple/1)) < 3 do
106 | highlight_code({"pre", attrs, code_tag})
107 | else
108 | {"pre", attrs, content}
109 | end
110 | end
111 |
112 | def highlight_code({tag, attrs, content}), do: {tag, attrs, highlight_code(content)}
113 |
114 | def highlight_code(l) when is_list(l), do: Enum.map(l, &highlight_code/1)
115 |
116 | def highlight_code(non_tag), do: non_tag
117 |
118 | @doc """
119 | Render tags like photo-here[id=something] inline from a map of properties
120 | using provided templates (renderers).
121 | """
122 | def inline_media_into_content({tag, attrs, content}, renderers, props)
123 | when is_binary(tag) and is_list(attrs) do
124 | if String.ends_with?(tag, "-here") do
125 | media_type = String.trim_trailing(tag, "-here")
126 |
127 | Logger.debug("inlining #{tag}..")
128 |
129 | with {_, {_, id}} <-
130 | {:no_id_attr, Enum.find(attrs, fn {k, _} -> k == "id" end)},
131 | _ = Logger.debug(" #{media_type} id: #{id}"),
132 | {_, _, rend} when is_function(rend, 1) <-
133 | {:no_renderer, media_type, renderers[media_type]},
134 | _ = Logger.debug(" #{media_type} renderer found"),
135 | medias = as_many(props[media_type]),
136 | _ = Logger.debug(" #{media_type} media of this type: #{Enum.count(medias)}"),
137 | {_, _, _, media} when is_map(media) <-
138 | {:no_media_id, media_type, id, Enum.find(medias, &(&1["id"] == id))} do
139 | _ = Logger.debug(" #{media_type} found object for id: #{id}")
140 | media |> rend.() |> PH.safe_to_string() |> html_part_to_tree
141 | # TODO: would be amazing to have taggart output to a tree directly
142 | else
143 | err ->
144 | Logger.warn("could not inline #{media_type}",
145 | event: %{media_inlining_failure: %{type: media_type, error: inspect(err)}}
146 | )
147 |
148 | {"div", [{"class", "sweetroll2-error"}],
149 | ["Media embedding failed.", {"pre", [], inspect(err)}]}
150 | end
151 | else
152 | {tag, attrs, inline_media_into_content(content, renderers, props)}
153 | end
154 | end
155 |
156 | def inline_media_into_content(l, renderers, props) when is_list(l),
157 | do: Enum.map(l, &inline_media_into_content(&1, renderers, props))
158 |
159 | def inline_media_into_content(non_tag, _renderers, _props), do: non_tag
160 |
161 | @doc """
162 | Remove media that was inserted by inline_media_into_content from a data property.
163 | """
164 | def exclude_inlined_media(tree, media_name, media_items) do
165 | used_ids =
166 | Floki.find(tree, "#{media_name}-here")
167 | |> Enum.map(fn {_, a, _} ->
168 | {_, id} = Enum.find(a, {:x, nil}, fn {k, _} -> k == "id" end)
169 | id
170 | end)
171 |
172 | Enum.filter(media_items, fn i ->
173 | is_binary(i) or not Enum.member?(used_ids, i["id"])
174 | end)
175 | end
176 |
177 | defp klasses(attrs) do
178 | c =
179 | Stream.filter(attrs, fn {k, _} -> k == "class" end)
180 | |> Enum.map(fn {"class", c} -> c end)
181 | |> List.first()
182 |
183 | if c, do: String.split(c, ~r/\s+/), else: []
184 | end
185 |
186 | defp add_klass(attrs, val) do
187 | if Enum.find(attrs, nil, fn {k, _} -> k == "class" end) do
188 | Enum.map(attrs, fn
189 | {"class", c} -> {"class", "#{val} #{c}"}
190 | x -> x
191 | end)
192 | else
193 | [{"class", val} | attrs]
194 | end
195 | end
196 |
197 | defp src_text({_tag, _attrs, content}), do: src_text(content)
198 | defp src_text(s) when is_binary(s), do: s
199 | defp src_text(l) when is_list(l), do: Stream.map(l, &src_text/1) |> Enum.join("")
200 | end
201 |
--------------------------------------------------------------------------------
/lib/sweetroll2/media_upload.ex:
--------------------------------------------------------------------------------
1 | defmodule Sweetroll2.MediaUpload do
2 | @moduledoc """
3 | A Mnesia table for storing media processing tokens.
4 | """
5 |
6 | require Logger
7 |
8 | alias Sweetroll2.{Post, Events}
9 |
10 | use EventBus.EventSource
11 |
12 | use Memento.Table,
13 | attributes: [:token, :date, :url, :object]
14 |
15 | def create(url) when is_binary(url) do
16 | token = "U-" <> Nanoid.Secure.generate()
17 |
18 | Memento.transaction!(fn ->
19 | now = DateTime.utc_now()
20 |
21 | Memento.Query.write(%__MODULE__{
22 | token: token,
23 | date: now,
24 | url: url,
25 | object: nil
26 | })
27 | end)
28 |
29 | token
30 | end
31 |
32 | def fill(token, obj) when is_binary(token) do
33 | Memento.transaction!(fn ->
34 | upload = Memento.Query.read(__MODULE__, token)
35 |
36 | Logger.info("filling media upload for '#{upload.url}'",
37 | event: %{filling_upload: %{token: token, upload: upload.url, object: obj}}
38 | )
39 |
40 | EventSource.notify(%{topic: :upload_processed},
41 | do: %SSE.Chunk{data: Jason.encode!(%{url: upload.url, object: obj})}
42 | )
43 |
44 | Memento.Query.write(%{upload | object: obj})
45 |
46 | for post <- Memento.Query.select(Post, {:"/=", :status, :fetched}) do
47 | if Enum.any?(post.props, fn {k, v} -> is_list(v) and upload.url in v end) do
48 | Logger.info("inserting media object for '#{upload.url}' into '#{post.url}'",
49 | event: %{inserting_upload: %{upload: upload.url, post: post.url}}
50 | )
51 |
52 | Memento.Query.write(%{
53 | post
54 | | props: Post.replace_in_props(post.props, &if(&1 == upload.url, do: obj, else: &1))
55 | })
56 |
57 | post.url
58 | else
59 | nil
60 | end
61 | end
62 | end)
63 | |> Enum.reject(&is_nil/1)
64 | |> Events.notify_urls_updated()
65 | end
66 |
67 | def replace_all(props) do
68 | replacements =
69 | Memento.transaction!(fn ->
70 | Memento.Query.all(__MODULE__)
71 | end)
72 | |> Stream.filter(&(!is_nil(&1.object)))
73 | |> Stream.map(&{&1.url, &1.object})
74 | |> Enum.into(%{})
75 |
76 | Post.replace_in_props(props, &Map.get(replacements, &1, &1))
77 | end
78 | end
79 |
--------------------------------------------------------------------------------
/lib/sweetroll2/micropub.ex:
--------------------------------------------------------------------------------
1 | defmodule Sweetroll2.Micropub do
2 | @behaviour PlugMicropub.HandlerBehaviour
3 |
4 | require Logger
5 | import ExEarlyRet
6 | alias Sweetroll2.{Auth.Bearer, Auth.AccessToken, Events, Post, Markup, MediaUpload, Job}
7 | import Sweetroll2.Convert
8 |
9 | @impl true
10 | def handle_create(type, properties, token) do
11 | if Bearer.is_allowed?(token, :create) do
12 | {properties, url} =
13 | if type == "entry" or type == "review" do
14 | cat = category_for(properties)
15 |
16 | {Map.update(properties, "category", ["_" <> cat], &["_" <> cat | &1]),
17 | as_one(properties["url"]) || "/#{cat}/#{slug_for(properties)}"}
18 | else
19 | {properties, as_one(properties["url"]) || "/__wtf__/#{slug_for(properties)}"}
20 | end
21 |
22 | clid = AccessToken.get_client_id(token)
23 |
24 | properties =
25 | if(!is_nil(clid), do: Map.put(properties, "client-id", clid), else: properties)
26 | |> MediaUpload.replace_all()
27 |
28 | params = %{type: type, properties: properties, url: url}
29 |
30 | result =
31 | Memento.transaction!(fn ->
32 | old_post = Memento.Query.read(Post, url)
33 |
34 | if is_nil(old_post) or old_post.deleted do
35 | Post.from_map(params)
36 | |> Map.update(:published, DateTime.utc_now(), &(&1 || DateTime.utc_now()))
37 | |> Map.update(:status, :published, &(&1 || :published))
38 | |> Memento.Query.write()
39 |
40 | {:ok, :created, url}
41 | else
42 | Logger.error("micropub: url already exists '#{url}'")
43 | {:error, :invalid_request, :url_exists}
44 | end
45 | end)
46 |
47 | case result do
48 | {:ok, :created, url} ->
49 | ctxs = Post.contexts_for(properties)
50 | fetch_contexts(ctxs, url: url)
51 | Events.notify_urls_updated([url])
52 | {:ok, :created, Process.get(:our_home_url) <> url}
53 |
54 | x ->
55 | x
56 | end
57 | else
58 | {:error, :insufficient_scope, :unauthorized}
59 | end
60 | end
61 |
62 | @impl true
63 | def handle_update(url, replace, add, delete, token) do
64 | if Bearer.is_allowed?(token, :update) do
65 | url = read_url(url)
66 |
67 | ctxs =
68 | Memento.transaction!(fn ->
69 | post = Memento.Query.read(Post, url)
70 |
71 | # We want to e.g. notify posts that aren't mentioned anymore too
72 | all_old_ctxs =
73 | MapSet.union(
74 | Post.contexts_for(post.props),
75 | Markup.contexts_for(as_one(post.props["content"]))
76 | )
77 |
78 | props =
79 | Enum.reduce(replace, post.props, fn {k, v}, props ->
80 | Map.put(props, k, v)
81 | end)
82 |
83 | props =
84 | Enum.reduce(add, props, fn {k, v}, props ->
85 | Map.update(props, k, v, &(&1 ++ v))
86 | end)
87 |
88 | props =
89 | Enum.reduce(delete, props, fn
90 | {k, v}, props ->
91 | if Map.has_key?(props, k) do
92 | Map.update!(props, k, &(&1 -- v))
93 | else
94 | props
95 | end
96 |
97 | k, props ->
98 | Map.delete(props, k)
99 | end)
100 |
101 | ctxs_prop = Post.contexts_for(props)
102 | ctxs_cont = Markup.contexts_for(as_one(props["content"]))
103 |
104 | removed_ctxs = MapSet.difference(all_old_ctxs, MapSet.union(ctxs_prop, ctxs_cont))
105 |
106 | props =
107 | Map.update(
108 | props,
109 | "x-sr2-ctxs-removed",
110 | MapSet.to_list(removed_ctxs),
111 | &(MapSet.new(as_many(&1))
112 | |> MapSet.union(removed_ctxs)
113 | |> MapSet.difference(ctxs_prop)
114 | |> MapSet.difference(ctxs_cont)
115 | |> MapSet.to_list())
116 | )
117 |
118 | Memento.Query.write(%{
119 | post
120 | | props: props |> Map.delete("status") |> MediaUpload.replace_all(),
121 | updated: DateTime.utc_now(),
122 | status: Post.valid_status(props["status"])
123 | })
124 |
125 | ctxs_prop
126 | end)
127 |
128 | # the fetches also notify, and notifications are debounced,
129 | # so in the ideal case (fast fetches), the actions (generate etc) will be taken only once
130 | fetch_contexts(ctxs, url: url)
131 | Events.notify_urls_updated([url])
132 |
133 | :ok
134 | else
135 | {:error, :insufficient_scope, :unauthorized}
136 | end
137 | end
138 |
139 | @impl true
140 | def handle_delete(url, token) do
141 | if Bearer.is_allowed?(token, :delete) do
142 | url = read_url(url)
143 |
144 | Memento.transaction!(fn ->
145 | post = Memento.Query.read(Post, url)
146 | Memento.Query.write(%{post | deleted: true})
147 | end)
148 |
149 | Events.notify_urls_updated([url])
150 |
151 | :ok
152 | else
153 | {:error, :insufficient_scope, :unauthorized}
154 | end
155 | end
156 |
157 | @impl true
158 | def handle_undelete(url, token) do
159 | if Bearer.is_allowed?(token, :undelete) do
160 | url = read_url(url)
161 |
162 | Memento.transaction!(fn ->
163 | post = Memento.Query.read(Post, url)
164 | Memento.Query.write(%{post | deleted: false})
165 | end)
166 |
167 | Events.notify_urls_updated([url])
168 |
169 | :ok
170 | else
171 | {:error, :insufficient_scope, :unauthorized}
172 | end
173 | end
174 |
175 | @impl true
176 | def handle_config_query(token) do
177 | if Bearer.is_allowed?(token) do
178 | {:ok, %{}}
179 | else
180 | {:error, :insufficient_scope, :unauthorized}
181 | end
182 | end
183 |
184 | @impl true
185 | defearlyret handle_source_query(url, _filter_properties, token) do
186 | ret_if not Bearer.is_allowed?(token), do: {:error, :insufficient_scope, :unauthorized}
187 |
188 | # TODO: filter properties
189 | # XXX: duplication of Serve/get_ logic
190 | url = read_url(url)
191 | # urls_local = Post.urls_local()
192 | posts = %Post.DbAsMap{}
193 |
194 | ret_if posts[url].deleted, do: {:error, :insufficient_scope, :deleted}
195 |
196 | {:ok, Post.to_full_map(posts[url])}
197 | end
198 |
199 | @impl true
200 | def handle_syndicate_to_query(token) do
201 | if Bearer.is_allowed?(token) do
202 | {:ok, %{}}
203 | else
204 | {:error, :insufficient_scope, :unauthorized}
205 | end
206 | end
207 |
208 | def s3_bucket, do: System.get_env("SR2_S3_BUCKET")
209 | def s3_region, do: System.get_env("SR2_S3_REGION")
210 |
211 | def upload_file(file) do
212 | # key slugified esp. to avoid https://stackoverflow.com/q/44779042
213 | key = Slugger.slugify(Path.rootname(file.filename)) <> Path.extname(file.filename)
214 | url = "https://#{s3_bucket()}.s3.dualstack.#{s3_region()}.amazonaws.com/#{key}"
215 | token = MediaUpload.create(url)
216 |
217 | ExAws.S3.Upload.stream_file(file.path)
218 | |> ExAws.S3.upload(s3_bucket(), key,
219 | content_disposition: "inline",
220 | content_type: file.content_type,
221 | acl: :public_read,
222 | meta: ["imgroll-cb": Process.get(:our_home_url) <> "/__imgroll_callback__/" <> token]
223 | )
224 | |> ExAws.request!(region: s3_region())
225 |
226 | url
227 | end
228 |
229 | @impl true
230 | def handle_media(file, token) do
231 | if Bearer.is_allowed?(token, :media) do
232 | {:ok, upload_file(file)}
233 | else
234 | {:error, :insufficient_scope, :unauthorized}
235 | end
236 | end
237 |
238 | defp read_url(url) do
239 | new_url = String.replace_prefix(url, Process.get(:our_home_url), "")
240 |
241 | Logger.info("micropub: url '#{url}' -> '#{new_url}'")
242 | new_url
243 | end
244 |
245 | defp slug_for(properties) do
246 | custom = as_one(properties["mp-slug"] || properties["slug"])
247 |
248 | if is_binary(custom) && String.length(custom) > 5 do
249 | custom
250 | else
251 | name = as_one(properties["name"])
252 |
253 | if is_binary(name) && String.length(name) > 5 do
254 | Slugger.slugify(name)
255 | else
256 | Timex.format!(DateTime.utc_now(), "{ISOdate}-{h24}-{m}-{s}")
257 | end
258 | end
259 | end
260 |
261 | defp category_for(%{"rating" => x}) when is_list(x) and length(x) != 0, do: "reviews"
262 | defp category_for(%{"item" => x}) when is_list(x) and length(x) != 0, do: "reviews"
263 | defp category_for(%{"ingredient" => x}) when is_list(x) and length(x) != 0, do: "recipes"
264 | defp category_for(%{"name" => x}) when is_list(x) and length(x) != 0, do: "articles"
265 | defp category_for(%{"in-reply-to" => x}) when is_list(x) and length(x) != 0, do: "replies"
266 | defp category_for(%{"like-of" => x}) when is_list(x) and length(x) != 0, do: "likes"
267 | defp category_for(%{"repost-of" => x}) when is_list(x) and length(x) != 0, do: "reposts"
268 | defp category_for(%{"quotation-of" => x}) when is_list(x) and length(x) != 0, do: "quotations"
269 | defp category_for(%{"bookmark-of" => x}) when is_list(x) and length(x) != 0, do: "bookmarks"
270 | defp category_for(%{"rsvp" => x}) when is_list(x) and length(x) != 0, do: "rsvp"
271 | defp category_for(_), do: "notes"
272 |
273 | defp fetch_contexts(ctxs, url: url) do
274 | for ctx_url <- ctxs do
275 | Que.add(Job.Fetch,
276 | url: ctx_url,
277 | check_mention: nil,
278 | save_mention: nil,
279 | notify_update: [url]
280 | )
281 | end
282 | end
283 | end
284 |
--------------------------------------------------------------------------------
/lib/sweetroll2/post.ex:
--------------------------------------------------------------------------------
1 | defmodule Sweetroll2.Post do
2 | @moduledoc """
3 | A Mnesia table for storing microformats2 style posts.
4 |
5 | Fields and conventions:
6 |
7 | - `status` is `:fetched` | `:published` | `:draft` | `:private`
8 | - `type` is the mf2 type without the `h-` prefix, and each entry has one type
9 | (practically there was no need for multiple types ever in sweetroll 1)
10 | - `props` are the "meat" of the post, the mf2 properties (with string keys) expect the special ones:
11 | - `url` is extracted because it's the primary key
12 | - `published` and `updated` are extracted for storage as DateTime records instead of text
13 | """
14 |
15 | import Sweetroll2.Convert
16 | require Logger
17 |
18 | use Memento.Table,
19 | attributes: [:url, :deleted, :published, :updated, :status, :type, :props, :children]
20 |
21 | def urls_local do
22 | :mnesia.dirty_select(__MODULE__, [
23 | {
24 | {__MODULE__, :"$1", :"$2", :"$3", :"$4", :"$5", :"$6", :"$7", :"$8"},
25 | [{:"/=", :"$5", :fetched}],
26 | [:"$1"]
27 | }
28 | ])
29 | end
30 |
31 | def urls_local_public do
32 | :mnesia.dirty_select(__MODULE__, [
33 | {
34 | {__MODULE__, :"$1", :"$2", :"$3", :"$4", :"$5", :"$6", :"$7", :"$8"},
35 | [{:==, :"$5", :published}],
36 | [:"$1"]
37 | }
38 | ])
39 | end
40 |
41 | def import_json_lines(text, local_domains \\ ["http://localhost", "https://localhost"])
42 | when is_binary(text) and is_list(local_domains) do
43 | Memento.transaction!(fn ->
44 | text
45 | |> String.splitter("\n")
46 | |> Stream.filter(&(String.length(&1) > 1))
47 | |> Stream.map(&Jason.decode!/1)
48 | |> Stream.map(&__MODULE__.from_map/1)
49 | |> Stream.map(fn post ->
50 | url = Enum.reduce(local_domains, post.url, &String.replace_prefix(&2, &1, ""))
51 |
52 | %{
53 | post
54 | | url: url,
55 | status:
56 | post.status ||
57 | if(String.starts_with?(url, "/"), do: :published, else: :fetched)
58 | }
59 | end)
60 | |> Enum.each(&Memento.Query.write/1)
61 | end)
62 | end
63 |
64 | @doc """
65 | Converts an mf2/jf2 map to a Post struct.
66 |
67 | Keys can be either strings or atoms on the top level.
68 | Should be strings inside properties though
69 | (we don't touch it here and the rest of the system expects strings).
70 | """
71 | def from_map(map) when is_map(map) do
72 | url = map_prop(map, "url", :url)
73 |
74 | published =
75 | case DateTimeParser.parse_datetime(map_prop(map, "published", :published), assume_utc: true) do
76 | {:ok, d} ->
77 | d
78 |
79 | {:error, e} ->
80 | Logger.warn(
81 | "could not parse published: '#{inspect(map_prop(map, "published", :published))}'",
82 | event: %{date_parse_failed: %{prop: "published", map: map}}
83 | )
84 |
85 | nil
86 | end
87 |
88 | updated =
89 | case DateTimeParser.parse_datetime(map_prop(map, "updated", :updated), assume_utc: true) do
90 | {:ok, d} ->
91 | d
92 |
93 | {:error, e} ->
94 | Logger.warn("could not parse updated: '#{inspect(map_prop(map, "updated", :updated))}'",
95 | event: %{date_parse_failed: %{prop: "updated", map: map}}
96 | )
97 |
98 | nil
99 | end
100 |
101 | %__MODULE__{
102 | props:
103 | (map["properties"] || %{})
104 | |> Map.merge(map[:properties] || %{})
105 | |> Map.merge(map["props"] || %{})
106 | |> Map.merge(map[:props] || %{})
107 | |> Map.merge(
108 | map
109 | |> Map.delete("properties")
110 | |> Map.delete(:properties)
111 | |> Map.delete("props")
112 | |> Map.delete(:props)
113 | |> Map.delete("type")
114 | |> Map.delete(:type)
115 | |> Map.delete("deleted")
116 | |> Map.delete(:deleted)
117 | |> Map.delete("acl")
118 | |> Map.delete(:acl)
119 | |> Map.delete("children")
120 | |> Map.delete(:children)
121 | |> Map.delete("tsv")
122 | |> Map.delete(:tsv)
123 | )
124 | |> Map.delete("url")
125 | |> Map.delete(:url)
126 | |> Map.delete("published")
127 | |> Map.delete(:published)
128 | |> Map.delete("updated")
129 | |> Map.delete(:updated)
130 | |> Map.delete("status")
131 | |> Map.delete(:status)
132 | |> Map.update(
133 | "category",
134 | [],
135 | &Enum.filter(as_many(&1), fn x -> is_binary(x) and String.length(x) > 0 end)
136 | ),
137 | url: if(is_binary(url), do: url, else: "___WTF"),
138 | type: String.replace_prefix(as_one(map["type"] || map[:type]), "h-", ""),
139 | deleted: map["deleted"] || map[:deleted],
140 | published: published,
141 | updated: updated,
142 | status: valid_status(map_prop(map, "status", :status)),
143 | children: map["children"] || map[:children]
144 | }
145 | end
146 |
147 | @doc """
148 | Converts a Post struct to a "simplified" (jf2-ish) map.
149 | """
150 | def to_map(%__MODULE__{
151 | status: status,
152 | props: props,
153 | url: url,
154 | type: type,
155 | deleted: deleted,
156 | published: published,
157 | updated: updated,
158 | children: children
159 | }) do
160 | props
161 | |> add_dates(published: published, updated: updated)
162 | |> Map.put("url", url)
163 | |> Map.put("type", type)
164 | |> Map.put("deleted", deleted)
165 | |> Map.put("children", children)
166 | |> Map.put("status", to_string(status))
167 | end
168 |
169 | def to_map(x) when is_map(x), do: x
170 |
171 | @doc """
172 | Converts a Post struct to a "full" (mf2-source-ish) map.
173 | """
174 | def to_full_map(%__MODULE__{
175 | status: status,
176 | props: props,
177 | url: url,
178 | type: type,
179 | # deleted: deleted,
180 | published: published,
181 | updated: updated,
182 | children: children
183 | }) do
184 | props =
185 | props
186 | |> add_dates(published: published, updated: updated)
187 | |> Map.put("url", url)
188 | |> Map.put("status", as_many(to_string(status)))
189 |
190 | %{
191 | type: as_many("h-" <> type),
192 | properties: for({k, v} <- props, into: %{}, do: {k, as_many(v)}),
193 | children: children
194 | }
195 | end
196 |
197 | def to_full_map(x) when is_map(x), do: x
198 |
199 | defp add_dates(props, published: published, updated: updated) when is_map(props) do
200 | props
201 | |> (fn x ->
202 | if published, do: Map.put(x, "published", DateTime.to_iso8601(published)), else: x
203 | end).()
204 | |> (fn x -> if updated, do: Map.put(x, "updated", DateTime.to_iso8601(updated)), else: x end).()
205 | end
206 |
207 | defp map_prop(map, prop_str, prop_atom)
208 | when is_map(map) and is_binary(prop_str) and is_atom(prop_atom) do
209 | as_one(
210 | map[prop_str] || map[prop_atom] ||
211 | map["properties"][prop_str] || map[:properties][prop_atom]
212 | )
213 | end
214 |
215 | def as_url(s) when is_binary(s), do: s
216 | def as_url(m) when is_map(m), do: map_prop(m, "url", :url)
217 |
218 | def contexts_for(props) do
219 | (as_many(props["in-reply-to"]) ++
220 | as_many(props["like-of"]) ++
221 | as_many(props["repost-of"]) ++
222 | as_many(props["quotation-of"]) ++
223 | as_many(props["bookmark-of"]) ++
224 | as_many(props["syndication"]))
225 | |> Enum.map(&as_url/1)
226 | |> MapSet.new()
227 | end
228 |
229 | def filter_type(urls, posts, type) when is_binary(type) do
230 | Stream.filter(urls, fn url ->
231 | posts[url] && posts[url].type == type && !(posts[url].deleted || false) &&
232 | String.starts_with?(url, "/")
233 | end)
234 | end
235 |
236 | def filter_type(urls, posts, types) when is_list(types) do
237 | Stream.filter(urls, fn url ->
238 | posts[url] && Enum.any?(types, &(posts[url].type == &1)) && !(posts[url].deleted || false) &&
239 | String.starts_with?(url, "/")
240 | end)
241 | end
242 |
243 | def valid_status([x]), do: valid_status(x)
244 | def valid_status("fetched"), do: :fetched
245 | def valid_status("published"), do: :published
246 | def valid_status("draft"), do: :draft
247 | def valid_status("private"), do: :private
248 | def valid_status(_), do: nil
249 |
250 | def replace_in_props(props, replacer) when is_map(props) do
251 | Enum.map(props, fn {k, v} ->
252 | {k, as_many(v) |> Enum.map(replacer)}
253 | end)
254 | |> Enum.into(%{})
255 | end
256 | end
257 |
--------------------------------------------------------------------------------
/lib/sweetroll2/post/comments.ex:
--------------------------------------------------------------------------------
1 | defmodule Sweetroll2.Post.Comments do
2 | @moduledoc """
3 | Data helpers for presenting post responses/reactions.
4 | """
5 |
6 | require Logger
7 | import Sweetroll2.Convert
8 | alias Sweetroll2.Post
9 |
10 | @doc """
11 | Splits "comments" (saved webmentions) by post type.
12 |
13 | Requires entries to be maps (does not load urls from the database).
14 | i.e. inline_comments should be done first.
15 |
16 | Lists are reversed.
17 | """
18 | def separate_comments(%Post{url: url, props: %{"comment" => comments}})
19 | when is_list(comments) do
20 | Enum.reduce(comments, %{}, fn x, acc ->
21 | cond do
22 | not is_map(x) -> acc
23 | # TODO reacji
24 | compare_property(x, "in-reply-to", url) -> Map.update(acc, :replies, [x], &[x | &1])
25 | compare_property(x, "like-of", url) -> Map.update(acc, :likes, [x], &[x | &1])
26 | compare_property(x, "repost-of", url) -> Map.update(acc, :reposts, [x], &[x | &1])
27 | compare_property(x, "bookmark-of", url) -> Map.update(acc, :bookmarks, [x], &[x | &1])
28 | compare_property(x, "quotation-of", url) -> Map.update(acc, :quotations, [x], &[x | &1])
29 | true -> acc
30 | end
31 | end)
32 | end
33 |
34 | def separate_comments(%Post{}), do: %{}
35 |
36 | @doc """
37 | Inlines posts mentioned by URL in the `comment` property.
38 |
39 | The inlined ones are Post structs, but other things in the array remain as-is.
40 | """
41 | def inline_comments(%Post{url: url, props: props} = post, posts) do
42 | comments =
43 | props["comment"]
44 | |> as_many()
45 | |> Enum.map(fn
46 | u when is_binary(u) ->
47 | Logger.debug("inlining", event: %{inlining_comment: %{comment: u, into: url}})
48 | posts[u]
49 |
50 | x ->
51 | x
52 | end)
53 |
54 | Map.put(post, :props, Map.put(props, "comment", comments))
55 | end
56 |
57 | def inline_comments(post_url, posts) when is_binary(post_url) do
58 | res = posts[post_url]
59 | if res != post_url, do: inline_comments(res, posts), else: res
60 | end
61 |
62 | def inline_comments(x, _), do: x
63 |
64 | defp lookup_property(%Post{props: props}, prop), do: props[prop]
65 |
66 | defp lookup_property(x, prop) when is_map(x) do
67 | x[prop] || x["properties"][prop] || x[:properties][prop] || x["props"][prop] ||
68 | x[:props][prop]
69 | end
70 |
71 | defp lookup_property(_, _), do: false
72 |
73 | defp get_url(s) when is_binary(s), do: s
74 |
75 | defp get_url(m) when is_map(m), do: lookup_property(m, "url") |> as_one
76 |
77 | defp get_url(x) do
78 | Logger.warn("cannot get_url", event: %{get_url_unknown_type: %{thing: inspect(x)}})
79 | nil
80 | end
81 |
82 | defp compare_property(x, prop, url) when is_binary(prop) and is_binary(url) do
83 | lookup_property(x, prop)
84 | |> as_many()
85 | |> Stream.map(&get_url/1)
86 | |> Enum.any?(fn val ->
87 | val &&
88 | (val == url || URI.parse(val).path == URI.parse(url).path)
89 | end)
90 |
91 | # Assumes that path match is enough to avoid needing to know our host.
92 | # If a post is already in the comments list, it has been verified to link here.
93 | # Faking e.g. a like by liking the same path on a different domain and just mentioning this one is..
94 | # Not a significant concern really.
95 | end
96 | end
97 |
--------------------------------------------------------------------------------
/lib/sweetroll2/post/db_as_map.ex:
--------------------------------------------------------------------------------
1 | defmodule Sweetroll2.Post.DbAsMap do
2 | @behaviour Access
3 | @moduledoc """
4 | Access implementation for the post database.
5 |
6 | The idea is that you can either use a Map
7 | (for a preloaded local snapshot of the DB or for test data)
8 | or this blank struct (for live DB access).
9 |
10 | Process dictionary caching is used because Mnesia access is
11 | not actually as fast as process-local data.
12 | So this is designed for web requests, not long-running processes.
13 | """
14 |
15 | defstruct []
16 |
17 | @impl Access
18 | def fetch(%__MODULE__{}, key) do
19 | if result = Process.get(key) do
20 | {:ok, result}
21 | else
22 | case :mnesia.dirty_read(Sweetroll2.Post, key) do
23 | # Memento.Query.Data.load is too dynamic -> too slow
24 | [{Sweetroll2.Post, url, deleted, published, updated, status, type, props, children} | _] ->
25 | post = %Sweetroll2.Post{
26 | url: url,
27 | deleted: deleted,
28 | published: published,
29 | updated: updated,
30 | status: status,
31 | type: type,
32 | props: props,
33 | children: children
34 | }
35 |
36 | Process.put(key, post)
37 | {:ok, post}
38 |
39 | _ ->
40 | :error
41 | end
42 | end
43 | end
44 | end
45 |
--------------------------------------------------------------------------------
/lib/sweetroll2/post/generative.ex:
--------------------------------------------------------------------------------
1 | defmodule Sweetroll2.Post.Generative do
2 | @moduledoc """
3 | Behaviour for post type processors that generate virtual sub-posts.
4 | """
5 |
6 | alias Sweetroll2.Post
7 |
8 | @type args :: %{atom => any}
9 | @type posts :: any
10 | @type local_urls :: any
11 |
12 | @callback apply_args(%Post{}, args, posts, local_urls) :: %Post{}
13 | @callback child_urls(%Post{}, posts, local_urls) :: %{String.t() => args}
14 | @callback parse_url_segment(%Post{}, String.t()) :: {String.t(), args} | :error
15 |
16 | def apply_args(%Post{type: "x-dynamic-feed"} = post, args, posts, local_urls),
17 | do: Post.Generative.Feed.apply_args(post, args, posts, local_urls)
18 |
19 | def apply_args(%Post{type: "x-inbox-feed"} = post, args, posts, local_urls),
20 | do: Post.Generative.Inbox.apply_args(post, args, posts, local_urls)
21 |
22 | def apply_args(%Post{type: "x-paginated-feed"} = post, args, posts, local_urls),
23 | do: Post.Generative.Pagination.apply_args(post, args, posts, local_urls)
24 |
25 | def apply_args(%Post{type: "x-dynamic-tag-feed"} = post, args, posts, local_urls),
26 | do: Post.Generative.Tag.apply_args(post, args, posts, local_urls)
27 |
28 | def apply_args(%Post{} = post, _, _, _), do: post
29 |
30 | def child_urls(%Post{type: "x-dynamic-feed"} = post, posts, local_urls),
31 | do: Post.Generative.Feed.child_urls(post, posts, local_urls)
32 |
33 | def child_urls(%Post{type: "x-inbox-feed"} = post, posts, local_urls),
34 | do: Post.Generative.Inbox.child_urls(post, posts, local_urls)
35 |
36 | def child_urls(%Post{type: "x-paginated-feed"} = post, posts, local_urls),
37 | do: Post.Generative.Pagination.child_urls(post, posts, local_urls)
38 |
39 | def child_urls(%Post{type: "x-dynamic-tag-feed"} = post, posts, local_urls),
40 | do: Post.Generative.Tag.child_urls(post, posts, local_urls)
41 |
42 | def child_urls(%Post{}, _, _), do: []
43 |
44 | def parse_url_segment(%Post{type: "x-dynamic-feed"} = post, seg),
45 | do: Post.Generative.Feed.parse_url_segment(post, seg)
46 |
47 | def parse_url_segment(%Post{type: "x-inbox-feed"} = post, seg),
48 | do: Post.Generative.Inbox.parse_url_segment(post, seg)
49 |
50 | def parse_url_segment(%Post{type: "x-paginated-feed"} = post, seg),
51 | do: Post.Generative.Pagination.parse_url_segment(post, seg)
52 |
53 | def parse_url_segment(%Post{type: "x-dynamic-tag-feed"} = post, seg),
54 | do: Post.Generative.Tag.parse_url_segment(post, seg)
55 |
56 | def parse_url_segment(%Post{}, _), do: :error
57 |
58 | @doc ~S"""
59 | Recursively expands a list of URLs to include sub-posts generated by generative posts.
60 |
61 | iex> Generative.list_generated_urls(["/notes", "/tag", "/notes/dank-meme-7"], Map.merge(
62 | ...> %{
63 | ...> "/tag" => %Sweetroll2.Post{
64 | ...> url: "/tag", type: "x-dynamic-tag-feed",
65 | ...> props: %{ "filter" => [ %{"category" => ["{tag}"]} ] }
66 | ...> },
67 | ...> "/notes" => %Sweetroll2.Post{
68 | ...> url: "/notes", type: "x-dynamic-feed",
69 | ...> props: %{ "filter" => [ %{"category" => ["_notes"]} ] }
70 | ...> },
71 | ...> },
72 | ...> Map.new(0..11, &{ "/notes/dank-meme-#{&1}",
73 | ...> %Sweetroll2.Post{
74 | ...> url: "/notes/dank-meme-#{&1}", type: "entry",
75 | ...> props: %{ "category" => ["_notes", "memes"] ++ (if &1 < 5, do: ["dank"], else: []) }
76 | ...> }
77 | ...> })), ["/notes" | ["/tag" | Enum.map(0..11, &"/notes/dank-meme-#{&1}")]])
78 | ["/notes", "/notes/page1", "/tag", "/tag/dank", "/tag/memes", "/tag/memes/page1", "/notes/dank-meme-7"]
79 | """
80 | def list_generated_urls(urls, posts, local_urls) do
81 | Enum.flat_map(urls, fn url ->
82 | [url | child_urls_rec(posts[url], posts, local_urls)]
83 | end)
84 | end
85 |
86 | def child_urls_rec(post, posts, local_urls) do
87 | child_urls(post, posts, local_urls)
88 | |> Enum.flat_map(fn {url, args} ->
89 | [url | child_urls_rec(apply_args(post, args, posts, local_urls), posts, local_urls)]
90 | end)
91 | |> Enum.uniq()
92 | end
93 |
94 | @doc ~S"""
95 | Looks up a post in posts, even if it's a sub-post generated by (a chain of) generative posts.
96 |
97 | iex> post = Generative.lookup("/tag/memes/page1", Map.merge(
98 | ...> %{
99 | ...> "/tag" => %Sweetroll2.Post{
100 | ...> url: "/tag", type: "x-dynamic-tag-feed",
101 | ...> props: %{ "filter" => [ %{"category" => ["{tag}"]} ] }
102 | ...> },
103 | ...> },
104 | ...> Map.new(0..11, &{ "/notes/dank-meme-#{&1}",
105 | ...> %Sweetroll2.Post{
106 | ...> url: "/notes/dank-meme-#{&1}", type: "entry",
107 | ...> props: %{ "category" => ["dank", "memes"] }
108 | ...> }
109 | ...> })), ["/tag" | Enum.map(0..11, &"/notes/dank-meme-#{&1}")])
110 | iex> post.children == ["/notes/dank-meme-10", "/notes/dank-meme-11"]
111 | true
112 | iex> post.url == "/tag/memes/page1"
113 | true
114 | iex> post.type == "feed"
115 | true
116 |
117 | iex> post = Generative.lookup("/tag",
118 | ...> %{
119 | ...> "/tag" => %Sweetroll2.Post{
120 | ...> url: "/tag", type: "x-dynamic-tag-feed",
121 | ...> props: %{ "filter" => [ %{"category" => ["{tag}"]} ] }
122 | ...> },
123 | ...> }, ["/tag"])
124 | iex> post.type == "feed"
125 | true
126 |
127 | iex> post = Generative.lookup("/",
128 | ...> %{
129 | ...> "/" => %Sweetroll2.Post{
130 | ...> url: "/", type: "entry",
131 | ...> },
132 | ...> }, ["/"])
133 | iex> post.type == "entry"
134 | true
135 | """
136 | def lookup(url, posts, local_urls) do
137 | {generator, suffix} =
138 | if url == "/",
139 | do: {posts["/"], ""},
140 | else: find_first_matching_prefix(String.split(url, "/", trim: true), [], posts)
141 |
142 | generator && lookup_rec(generator, suffix, posts, local_urls)
143 | end
144 |
145 | defp lookup_rec(%Post{type: type} = post, "", _, _)
146 | when type != "x-dynamic-feed" and type != "x-inbox-feed" and type != "x-dynamic-tag-feed",
147 | do: post
148 |
149 | defp lookup_rec(%Post{type: type}, _, _, _)
150 | when type != "x-dynamic-feed" and type != "x-inbox-feed" and type != "x-dynamic-tag-feed",
151 | do: nil
152 |
153 | defp lookup_rec(%Post{} = generator, url_suffix, posts, local_urls) do
154 | case parse_url_segment(generator, url_suffix) do
155 | :error ->
156 | nil
157 |
158 | {next_suffix, args} ->
159 | apply_args(generator, args, posts, local_urls)
160 | |> lookup_rec(next_suffix, posts, local_urls)
161 | end
162 | end
163 |
164 | @doc """
165 | Finds the generative post probably responsible for the URL.
166 | (First argument is split parts of the URL, second is for recursion.)
167 |
168 | iex> Generative.find_first_matching_prefix([""], [], %{"/" => 1})
169 | {1, ""}
170 |
171 | iex> Generative.find_first_matching_prefix(["page1"], [], %{"/" => 1})
172 | {1, "/page1"}
173 |
174 | iex> Generative.find_first_matching_prefix(["one"], [], %{"/one" => 1})
175 | {1, ""}
176 |
177 | iex> Generative.find_first_matching_prefix(["one", "page2"], [], %{"/one" => 1})
178 | {1, "/page2"}
179 |
180 | iex> Generative.find_first_matching_prefix(["tag", "memes", "page69"], [], %{"/tag" => :tagpage})
181 | {:tagpage, "/memes/page69"}
182 |
183 | iex> Generative.find_first_matching_prefix(["memes", "page69"], [], %{"/tag" => :tagpage})
184 | nil
185 | """
186 | def find_first_matching_prefix(l, r, posts) do
187 | if post = posts["/#{Enum.join(l, "/")}"] do
188 | r_path = if Enum.empty?(r), do: "", else: "/" <> Enum.join(r, "/")
189 | {post, r_path}
190 | else
191 | case Enum.split(l, -1) do
192 | {ll, [rr]} -> find_first_matching_prefix(ll, [rr | r], posts)
193 | _ -> nil
194 | end
195 | end
196 | end
197 | end
198 |
--------------------------------------------------------------------------------
/lib/sweetroll2/post/generative/feed.ex:
--------------------------------------------------------------------------------
1 | defmodule Sweetroll2.Post.Generative.Feed do
2 | @moduledoc """
3 | Post type processor for `x-dynamic-feed`.
4 | """
5 |
6 | alias Sweetroll2.{Convert, Post, Post.Generative, Post.Generative.Pagination}
7 |
8 | @behaviour Generative
9 |
10 | defp to_paginated_feed(post, posts, local_urls) do
11 | children =
12 | filter_feed_entries(post, posts, local_urls)
13 | |> sort_feed_entries(posts)
14 |
15 | %{post | type: "x-paginated-feed", children: children}
16 | end
17 |
18 | @impl true
19 | def apply_args(%Post{type: "x-dynamic-feed"} = post, args, posts, local_urls) do
20 | to_paginated_feed(post, posts, local_urls)
21 | |> Pagination.apply_args(args, posts, local_urls)
22 | end
23 |
24 | @impl true
25 | def child_urls(%Post{type: "x-dynamic-feed"} = post, posts, local_urls) do
26 | to_paginated_feed(post, posts, local_urls)
27 | |> Pagination.child_urls(posts, local_urls)
28 | end
29 |
30 | def filter_feed_entries(%Post{type: "x-dynamic-feed"} = feed, posts, local_urls) do
31 | Stream.filter(
32 | local_urls,
33 | &(not is_nil(posts[&1]) and !(posts[&1].deleted || false) and String.starts_with?(&1, "/") and
34 | in_feed?(posts[&1], feed))
35 | )
36 | end
37 |
38 | @doc """
39 | iex> Feed.matches_filter?(%Post{props: %{"category" => "test", "x" => "y"}}, %{"category" => "test"})
40 | true
41 |
42 | iex> Feed.matches_filter?(%Post{props: %{"category" => ["test", "memes"], "what" => "ever"}}, %{"category" => "test"})
43 | true
44 |
45 | iex> Feed.matches_filter?(%Post{props: %{"category" => ["test"], "ping" => "pong"}}, %{"category" => ["test"]})
46 | true
47 |
48 | iex> Feed.matches_filter?(%Post{props: %{"category" => [], "ping" => "pong"}}, %{"category" => ["test"]})
49 | false
50 |
51 | iex> Feed.matches_filter?(%Post{props: %{"aaa" => "bbb"}}, %{"category" => ["test"]})
52 | false
53 | """
54 | def matches_filter?(%Post{} = post, filter) do
55 | Enum.all?(filter, fn {k, v} ->
56 | vals = Convert.as_many(post.props[k])
57 | Enum.all?(Convert.as_many(v), &Enum.member?(vals, &1))
58 | end)
59 | end
60 |
61 | def matches_filters?(%Post{} = post, filters) do
62 | Enum.any?(filters, &matches_filter?(post, &1))
63 | end
64 |
65 | def in_feed?(%Post{} = post, %Post{} = feed) do
66 | matches_filters?(post, Convert.as_many(feed.props["filter"])) and
67 | not matches_filters?(post, Convert.as_many(feed.props["unfilter"]))
68 | end
69 |
70 | def sort_feed_entries(urls, posts) do
71 | now = DateTime.utc_now()
72 |
73 | Enum.sort_by(urls, &(-DateTime.to_unix((posts[&1] && posts[&1].published) || now)))
74 | end
75 |
76 | @impl true
77 | defdelegate parse_url_segment(post, seg), to: Pagination
78 | end
79 |
--------------------------------------------------------------------------------
/lib/sweetroll2/post/generative/inbox.ex:
--------------------------------------------------------------------------------
1 | defmodule Sweetroll2.Post.Generative.Inbox do
2 | @moduledoc """
3 | Post type processor for `x-inbox-feed`.
4 | """
5 |
6 | alias Sweetroll2.{
7 | Convert,
8 | Post,
9 | Post.Generative,
10 | Post.Generative.Feed,
11 | Post.Generative.Pagination
12 | }
13 |
14 | @behaviour Generative
15 |
16 | defp to_paginated_feed(post, posts, local_urls) do
17 | children =
18 | filter_feed_entries(post, posts, local_urls)
19 | |> Feed.sort_feed_entries(posts)
20 |
21 | %{post | type: "x-paginated-feed", children: children}
22 | end
23 |
24 | @impl true
25 | def apply_args(%Post{type: "x-inbox-feed"} = post, args, posts, local_urls) do
26 | to_paginated_feed(post, posts, local_urls)
27 | |> Pagination.apply_args(args, posts, local_urls)
28 | end
29 |
30 | @impl true
31 | def child_urls(%Post{type: "x-inbox-feed"} = post, posts, local_urls) do
32 | to_paginated_feed(post, posts, local_urls)
33 | |> Pagination.child_urls(posts, local_urls)
34 | end
35 |
36 | def filter_feed_entries(%Post{type: "x-inbox-feed"} = feed, posts, local_urls) do
37 | Stream.filter(
38 | local_urls,
39 | &(not is_nil(posts[&1]) and !(posts[&1].deleted || false) and String.starts_with?(&1, "/"))
40 | )
41 | |> Stream.flat_map(&Convert.as_many(posts[&1].props["comment"]))
42 | |> Stream.uniq()
43 | |> Stream.filter(
44 | &(posts[&1] && posts[&1].published && !(posts[&1].deleted || false) &&
45 | Feed.in_feed?(posts[&1], feed))
46 | )
47 | end
48 |
49 | @impl true
50 | defdelegate parse_url_segment(post, seg), to: Pagination
51 | end
52 |
--------------------------------------------------------------------------------
/lib/sweetroll2/post/generative/pagination.ex:
--------------------------------------------------------------------------------
1 | defmodule Sweetroll2.Post.Generative.Pagination do
2 | @moduledoc """
3 | Post type processor for `x-paginated-feed`.
4 |
5 | Mainly intended for use by other feeds, but if you wanted to paginate a manually curated feed, you could.
6 | """
7 |
8 | require Logger
9 | alias Sweetroll2.{Convert, Post, Post.Generative}
10 |
11 | @behaviour Generative
12 |
13 | def page_url(url, 0), do: url
14 | def page_url(url, page), do: String.replace_leading("#{url}/page#{page}", "//", "/")
15 |
16 | def per_page(%Post{props: props}) do
17 | Convert.as_one(props["pagination-settings"])["per-page"] || 10
18 | end
19 |
20 | @impl true
21 | def apply_args(
22 | %Post{type: "x-paginated-feed", url: url, props: props, children: children} = post,
23 | %{page: page},
24 | _,
25 | _
26 | ) do
27 | pp = per_page(post)
28 |
29 | %{
30 | post
31 | | url: page_url(url, page),
32 | type: "feed",
33 | children: Enum.slice(children, page * pp, pp),
34 | props:
35 | props
36 | |> Map.put("x-feed-base-url", url)
37 | |> Map.put("x-cur-page", page)
38 | |> Map.put("x-page-count", ceil(Enum.count(children) / per_page(post)))
39 | }
40 | end
41 |
42 | @impl true
43 | def child_urls(%Post{type: "x-paginated-feed", url: url, children: children} = post, _, _) do
44 | cnt = ceil(Enum.count(children) / per_page(post))
45 |
46 | if cnt < 2, do: %{}, else: Map.new(1..(cnt - 1), &{page_url(url, &1), %{page: &1}})
47 | end
48 |
49 | @impl true
50 | @doc """
51 | iex> Pagination.parse_url_segment(nil, "/page123")
52 | {"", %{page: 123}}
53 |
54 | iex> Pagination.parse_url_segment(nil, "/page1/what")
55 | {"/what", %{page: 1}}
56 | """
57 | def parse_url_segment(_, ""), do: {"", %{page: 0}}
58 |
59 | def parse_url_segment(_, "/page" <> n) do
60 | case Integer.parse(n) do
61 | {n, rest} -> {rest, %{page: n}}
62 | :error -> :error
63 | end
64 | end
65 |
66 | def parse_url_segment(_, _), do: :error
67 | end
68 |
--------------------------------------------------------------------------------
/lib/sweetroll2/post/generative/tag.ex:
--------------------------------------------------------------------------------
1 | defmodule Sweetroll2.Post.Generative.Tag do
2 | @moduledoc """
3 | Post type processor for `x-dynamic-tag-feed`.
4 | """
5 |
6 | alias Sweetroll2.{Convert, Post, Post.Generative}
7 |
8 | @behaviour Generative
9 |
10 | # TODO: something with this
11 | def feeds_get_with_tags(feed_urls, posts: posts, local_urls: local_urls) do
12 | Enum.flat_map(feed_urls, fn url ->
13 | post = posts[url]
14 |
15 | if post.type == "x-dynamic-tag-feed" do
16 | child_urls(post, posts, local_urls)
17 | |> Map.values()
18 | |> Enum.map(&apply_args(post, &1, posts, local_urls))
19 | else
20 | [post]
21 | end
22 | end)
23 | end
24 |
25 | @impl true
26 | def apply_args(
27 | %Post{type: "x-dynamic-tag-feed", url: url, props: props} = post,
28 | %{tag: tag},
29 | _,
30 | _
31 | ) do
32 | props =
33 | props
34 | |> Map.update("name", tag, &String.replace(Convert.as_one(&1), "{tag}", tag))
35 | |> Map.update("filter", [], &subst_inner(Convert.as_many(&1), tag))
36 |
37 | %{post | type: "x-dynamic-feed", props: props, url: "#{url}/#{tag}"}
38 | end
39 |
40 | defp subst_inner(m, tag) when is_map(m),
41 | do: Enum.map(m, fn {k, v} -> {k, subst_inner(v, tag)} end) |> Enum.into(%{})
42 |
43 | defp subst_inner(l, tag) when is_list(l), do: Enum.map(l, &subst_inner(&1, tag))
44 | defp subst_inner(s, tag) when is_binary(s), do: String.replace(s, "{tag}", tag)
45 | defp subst_inner(x, _), do: x
46 |
47 | @impl true
48 | def child_urls(%Post{type: "x-dynamic-tag-feed", url: url}, posts, local_urls) do
49 | local_urls
50 | |> Stream.flat_map(&Convert.as_many(posts[&1].props["category"]))
51 | |> Stream.filter(&(is_binary(&1) and !String.starts_with?(&1, "_")))
52 | |> Enum.uniq()
53 | |> Map.new(&{url <> "/" <> &1, %{tag: &1}})
54 | end
55 |
56 | @impl true
57 | @doc """
58 | iex> Tag.parse_url_segment(nil, "/whatevs")
59 | {"", %{tag: "whatevs"}}
60 |
61 | iex> Tag.parse_url_segment(nil, "/hello%20world/page69")
62 | {"/page69", %{tag: "hello%20world"}}
63 | """
64 |
65 | def parse_url_segment(_, "/" <> arg) do
66 | case String.split(arg, "/", parts: 2) do
67 | [tag, rest] -> {"/" <> rest, %{tag: tag}}
68 | [tag] -> {"", %{tag: tag}}
69 | _ -> :error
70 | end
71 | end
72 |
73 | def parse_url_segment(_, ""), do: {"", %{tag: ""}}
74 |
75 | def parse_url_segment(_, _), do: :error
76 | end
77 |
--------------------------------------------------------------------------------
/lib/sweetroll2/post/page.ex:
--------------------------------------------------------------------------------
1 | defmodule Sweetroll2.Post.Page do
2 | alias Sweetroll2.{Post, Convert}
3 | require Logger
4 |
5 | defdelegate render(tpl, ctx), to: Liquid.Template
6 |
7 | def get_template(%Post{type: "x-custom-page", url: url} = post) do
8 | ConCache.get_or_store(:parsed_tpl, url, fn ->
9 | get_template_raw(post)
10 | end)
11 | end
12 |
13 | def get_template_raw(%Post{type: "x-custom-page", props: props}) do
14 | Logger.debug("parsing tpl", event: %{parsing_template: %{props: props}})
15 |
16 | (Convert.as_one(props["content"]) || %{})["html"]
17 | |> Liquid.Template.parse()
18 | end
19 |
20 | def clear_cached_template(url: url), do: ConCache.delete(:parsed_tpl, url)
21 |
22 | def used_feeds(%Post{} = post), do: used_feeds(get_template(post))
23 | def used_feeds(%Liquid.Template{root: root}), do: used_feeds(root)
24 |
25 | def used_feeds(%Liquid.Block{nodelist: nodelist}),
26 | do: Enum.flat_map(nodelist, &used_feeds/1)
27 |
28 | def used_feeds(%Liquid.Tag{name: :feedpreview, markup: markup}), do: [markup]
29 | def used_feeds(_), do: []
30 | end
31 |
--------------------------------------------------------------------------------
/lib/sweetroll2/render.ex:
--------------------------------------------------------------------------------
1 | defmodule Sweetroll2.Render.Tpl do
2 | defmacro deftpl(name, file) do
3 | quote do
4 | EEx.function_from_file(:def, unquote(name), unquote(file), [:assigns],
5 | engine: Phoenix.HTML.Engine
6 | )
7 | end
8 | end
9 | end
10 |
11 | defmodule Sweetroll2.Render do
12 | alias Sweetroll2.{Post, Markup}
13 | import Sweetroll2.{Convert, Render.Tpl}
14 | import Phoenix.HTML.Tag
15 | import Phoenix.HTML
16 | require Logger
17 | require EEx
18 |
19 | deftpl :head, "tpl/head.html.eex"
20 | deftpl :header, "tpl/header.html.eex"
21 | deftpl :footer, "tpl/footer.html.eex"
22 | deftpl :entry, "tpl/entry.html.eex"
23 | deftpl :cite, "tpl/cite.html.eex"
24 | deftpl :page_entry, "tpl/page_entry.html.eex"
25 | deftpl :page_feed, "tpl/page_feed.html.eex"
26 | deftpl :page_login, "tpl/page_login.html.eex"
27 | deftpl :page_authorize, "tpl/page_authorize.html.eex"
28 |
29 | @doc """
30 | Renders a post, choosing the right template based on its type.
31 |
32 | - `post`: current post
33 | - `posts`: `Access` object for retrieval of posts by URL
34 | - `local_urls`: Enumerable of at least local URLs -- all URLs are fine, will be filtered anyway
35 | - `logged_in`: bool
36 | """
37 | def render_post(
38 | post: post = %Post{},
39 | posts: posts,
40 | local_urls: local_urls,
41 | logged_in: logged_in
42 | ) do
43 | feed_urls = Post.filter_type(local_urls, posts, ["x-dynamic-feed", "x-dynamic-tag-feed"])
44 |
45 | feeds_with_tags =
46 | feed_urls_filter(feed_urls,
47 | posts: posts,
48 | show_prop: "show-in-post",
49 | order_prop: "order-in-post"
50 | )
51 | |> Post.Generative.Tag.feeds_get_with_tags(posts: posts, local_urls: local_urls)
52 |
53 | cond do
54 | post.type == "entry" || post.type == "review" ->
55 | post = Post.Comments.inline_comments(post, posts)
56 |
57 | page_entry(
58 | entry: post,
59 | posts: posts,
60 | local_urls: local_urls,
61 | feed_urls: feed_urls,
62 | feeds_with_tags: feeds_with_tags,
63 | logged_in: logged_in,
64 | csp_nonce: :crypto.strong_rand_bytes(24) |> Base.url_encode64()
65 | )
66 |
67 | post.type == "feed" ->
68 | page_feed(
69 | feed: %{
70 | post
71 | | children: Enum.map(post.children, &Post.Comments.inline_comments(&1, posts))
72 | },
73 | posts: posts,
74 | local_urls: local_urls,
75 | feed_urls: feed_urls,
76 | feeds_with_tags: feeds_with_tags,
77 | logged_in: logged_in,
78 | csp_nonce: :crypto.strong_rand_bytes(24) |> Base.url_encode64()
79 | )
80 |
81 | post.type == "x-custom-page" ->
82 | {:ok, html, _} =
83 | Post.Page.get_template(post)
84 | |> Post.Page.render(%{
85 | "csp_nonce" => :crypto.strong_rand_bytes(24) |> Base.url_encode64(),
86 | "canonical_home_url" => Sweetroll2.canonical_home_url(),
87 | page: post,
88 | posts: posts,
89 | logged_in: logged_in,
90 | local_urls: local_urls,
91 | feed_urls: feed_urls,
92 | feeds_with_tags: feeds_with_tags
93 | })
94 |
95 | {:safe, html}
96 |
97 | true ->
98 | {:error, :unknown_type, post.type}
99 | end
100 | end
101 |
102 | defmacro tif(expr, do: block) do
103 | quote do
104 | if unquote(expr) do
105 | taggart do
106 | unquote(block)
107 | end
108 | else
109 | []
110 | end
111 | end
112 | end
113 |
114 | # for returning one thing inside taggart
115 | # but with local vars
116 | defmacro t1if(expr, do: block) do
117 | quote do
118 | if unquote(expr) do
119 | unquote(block)
120 | else
121 | []
122 | end
123 | end
124 | end
125 |
126 | @asset_dir "priv/static"
127 |
128 | def asset(url) do
129 | "/__as__/#{url}?vsn=#{
130 | ConCache.get_or_store(:asset_rev, url, fn ->
131 | :crypto.hash(:sha256, File.read!(Path.join(@asset_dir, url)))
132 | |> Base.url_encode64()
133 | |> String.slice(0, 24)
134 | end)
135 | }"
136 | end
137 |
138 | def icon(data) do
139 | content_tag :svg,
140 | role: "image",
141 | "aria-hidden": if(data[:title], do: "false", else: "true"),
142 | class: Enum.join([:icon] ++ (data[:class] || []), " "),
143 | title: data[:title] do
144 | content_tag :use, "xlink:href": "#{asset("icons.svg")}##{data[:name]}" do
145 | if data[:title] do
146 | content_tag :title do
147 | data[:title]
148 | end
149 | end
150 | end
151 | end
152 | end
153 |
154 | def reaction_icon(:replies), do: "reply"
155 | def reaction_icon(:likes), do: "star"
156 | def reaction_icon(:reposts), do: "megaphone"
157 | def reaction_icon(:quotations), do: "quote"
158 | def reaction_icon(:bookmarks), do: "bookmark"
159 | def reaction_icon(_), do: "link"
160 |
161 | def reaction_class(:replies), do: "reply"
162 | def reaction_class(:likes), do: "like"
163 | def reaction_class(:reposts), do: "repost"
164 | def reaction_class(:quotations), do: "quotation"
165 | def reaction_class(:bookmarks), do: "bookmark"
166 | def reaction_class(_), do: "comment"
167 |
168 | def readable_datetime!(dt), do: Timex.format!(dt, "{Mshort} {D}, {YYYY} {h24}:{m}")
169 |
170 | def time_permalink(%Post{published: published, url: url}, rel: rel) do
171 | use Taggart.HTML
172 |
173 | attrdt = if published, do: DateTime.to_iso8601(published), else: ""
174 |
175 | readabledt = if published, do: readable_datetime!(published), else: ""
176 |
177 | time datetime: attrdt, class: "dt-published" do
178 | a href: url, class: "u-url u-uid", rel: rel do
179 | readabledt
180 | end
181 | end
182 | end
183 |
184 | def time_permalink_cite(cite) when is_map(cite) do
185 | use Taggart.HTML
186 |
187 | dt =
188 | if is_binary(cite["published"]) do
189 | DateTimeParser.parse_datetime(cite["published"], assume_utc: true)
190 | else
191 | {:error, "weird non-string date"}
192 | end
193 |
194 | attrdt =
195 | case dt do
196 | {:ok, d} -> DateTime.to_iso8601(d)
197 | _ -> ""
198 | end
199 |
200 | readabledt =
201 | case dt do
202 | {:ok, d} -> readable_datetime!(d)
203 | _ -> ""
204 | end
205 |
206 | time datetime: attrdt, class: "dt-published" do
207 | a href: filter_scheme(as_one(cite["url"])), class: "u-url u-uid" do
208 | readabledt
209 | end
210 | end
211 | end
212 |
213 | def trim_url_stuff(url) when is_binary(url) do
214 | url
215 | |> String.replace_leading("http://", "")
216 | |> String.replace_leading("https://", "")
217 | |> String.replace_trailing("/", "")
218 | end
219 |
220 | def client_id(clid) do
221 | use Taggart.HTML
222 |
223 | lnk = as_one(clid)
224 |
225 | a href: lnk, class: "u-client-id" do
226 | trim_url_stuff(lnk)
227 | end
228 | end
229 |
230 | def syndication_name(url) do
231 | cond do
232 | String.contains?(url, "indieweb.xyz") ->
233 | trim_url_stuff(url) |> String.replace_leading("indieweb.xyz", "")
234 |
235 | String.contains?(url, "news.indieweb.org") ->
236 | "IndieNews"
237 |
238 | String.contains?(url, "lobste.rs") ->
239 | "lobste.rs"
240 |
241 | String.contains?(url, "news.ycombinator.com") ->
242 | "HN"
243 |
244 | String.contains?(url, "twitter.com") ->
245 | "Twitter"
246 |
247 | true ->
248 | trim_url_stuff(url)
249 | end
250 | end
251 |
252 | def post_title(%Post{props: props, published: published}) do
253 | name = as_one(props["name"])
254 |
255 | if is_binary(name) and String.length(name) > 0,
256 | do: name,
257 | else: DateTime.to_iso8601(published)
258 | end
259 |
260 | def responsive_container(media, do: body) when is_map(media) do
261 | use Taggart.HTML
262 |
263 | is_resp = is_integer(media["width"]) && is_integer(media["height"])
264 |
265 | # This supports 2 formats:
266 | # - legacy sweetroll-mu: {DarkMuted:{color:"#4c4138",population:34514}}
267 | # - new imgroll: [{b:5,g:6,r:6}] (sorted)
268 | col =
269 | case as_one(
270 | Enum.sort_by(media["palette"] || [], fn
271 | {_, v} -> if is_map(v), do: v["population"], else: 0
272 | _ -> nil
273 | end)
274 | ) do
275 | {_, %{"color" => c}} -> c
276 | %{"r" => r, "g" => g, "b" => b} -> "rgb(#{r},#{g},#{b})"
277 | _ -> nil
278 | end
279 |
280 | prv = media["tiny_preview"]
281 |
282 | bcg =
283 | if col || prv,
284 | do: "background:#{col || ""} #{if prv, do: "url('#{prv}')", else: ""};",
285 | else: ""
286 |
287 | pad =
288 | if is_resp,
289 | do: "padding-bottom:#{media["height"] / media["width"] * 100}%",
290 | else: ""
291 |
292 | content_tag(
293 | :"responsive-container",
294 | [class: if(is_resp, do: "has-pad", else: nil), style: "#{bcg}#{pad}"],
295 | do: body
296 | )
297 | end
298 |
299 | def responsive_container(_, do: body), do: content_tag(:"responsive-container", [], do: body)
300 |
301 | def src_of_srcset(src) do
302 | cond do
303 | is_binary(src["src"]) ->
304 | src["src"]
305 |
306 | is_list(src["srcset"]) ->
307 | s = List.first(src["srcset"])
308 | if is_map(s), do: s["src"], else: "ERROR"
309 | end
310 | end
311 |
312 | def format_srcset(src) do
313 | cond do
314 | is_binary(src["srcset"]) ->
315 | src["srcset"]
316 |
317 | is_list(src["srcset"]) ->
318 | Enum.map(src["srcset"], fn
319 | s when is_map(s) -> "#{s["src"]} #{s["width"]}w"
320 | _ -> "ERROR"
321 | end)
322 | |> Enum.join(", ")
323 |
324 | is_binary(src["src"]) ->
325 | src["src"]
326 |
327 | true ->
328 | "ERROR"
329 | end
330 | end
331 |
332 | def photo_rendered(photo) do
333 | use Taggart.HTML
334 |
335 | figure class: "entry-photo" do
336 | responsive_container(photo) do
337 | cond do
338 | is_binary(photo) ->
339 | img(class: "u-photo", src: photo, alt: "")
340 |
341 | is_map(photo) && photo["source"] ->
342 | srcs = as_many(photo["source"])
343 |
344 | default =
345 | srcs
346 | |> Stream.filter(&is_map/1)
347 | |> Enum.sort_by(fn x -> {x["default"] || false, x["type"] != "image/jpeg"} end)
348 | |> as_one
349 |
350 | content_tag :picture do
351 | taggart do
352 | srcs
353 | |> Stream.filter(fn src -> src != default && !src["original"] end)
354 | |> Enum.map(fn src ->
355 | source(
356 | srcset: format_srcset(src),
357 | media: src["media"],
358 | sizes: src["sizes"],
359 | type: src["type"]
360 | )
361 | end)
362 |
363 | img(
364 | class: "u-photo",
365 | src: src_of_srcset(default),
366 | srcset:
367 | if(is_list(default["srcset"]) and length(default["srcset"]) > 1,
368 | do: format_srcset(default),
369 | else: nil
370 | ),
371 | alt: photo["alt"] || ""
372 | )
373 | end
374 | end
375 |
376 | is_map(photo) && is_binary(photo["value"]) ->
377 | img(class: "u-photo", src: photo["value"], alt: photo["alt"] || "")
378 |
379 | true ->
380 | {:safe, ""}
381 | end
382 | end
383 |
384 | t1if is_map(photo) do
385 | original = as_many(photo["source"]) |> Enum.find(& &1["original"])
386 | aperture = photo["aperture"]
387 | focal_length = photo["focal_length"]
388 | iso = photo["iso"]
389 | shutter = photo["shutter_speed"]
390 |
391 | t1if original || aperture || focal_length || iso || shutter do
392 | figcaption class: "entry-photo-meta" do
393 | tif aperture || focal_length || iso || shutter do
394 | icon(name: "eye", title: "Photo parameters")
395 |
396 | t1if focal_length do
397 | span(class: "camera-focal-length", do: "#{focal_length} mm ")
398 | end
399 |
400 | t1if iso do
401 | span(class: "camera-iso", do: "ISO #{iso} ")
402 | end
403 |
404 | t1if is_list(shutter) and length(shutter) > 1 do
405 | [x, y] = shutter
406 |
407 | span(
408 | class: "camera-shutter",
409 | do: if(x / y >= 0.3, do: "#{Float.round(x / y, 2)}s ", else: "#{x}/#{y} ")
410 | )
411 | end
412 |
413 | t1if aperture do
414 | span(class: "camera-aperture", do: "ƒ/#{aperture} ")
415 | end
416 | end
417 |
418 | tif original do
419 | icon(name: "desktop-download")
420 | a(class: "camera-original", href: src_of_srcset(original), do: "Download original")
421 | end
422 | end
423 | end
424 | end
425 | end
426 | end
427 |
428 | def video_rendered(video) do
429 | use Taggart.HTML
430 |
431 | figure class: "entry-video" do
432 | responsive_container(video) do
433 | cond do
434 | is_binary(video) ->
435 | video(class: "u-video", src: video)
436 |
437 | is_map(video) && video["source"] ->
438 | srcs = as_many(video["source"])
439 | poster = srcs |> Enum.find(&String.starts_with?(&1["type"], "image"))
440 |
441 | video class: "u-video",
442 | poster: poster["src"],
443 | controls: video["controls"] || true,
444 | autoplay: video["autoplay"] || false,
445 | loop: video["loop"] || false,
446 | muted: video["muted"] || false,
447 | playsinline: video["playsinline"] || false,
448 | width: video["width"],
449 | height: video["height"] do
450 | for src <- Enum.filter(srcs, &(!String.starts_with?(&1["type"], "image"))) do
451 | source(src: src["src"], type: src["type"])
452 | end
453 |
454 | for track <- as_many(video["track"]) do
455 | track(
456 | src: track["src"],
457 | kind: track["kind"],
458 | label: track["label"],
459 | srclang: track["srclang"],
460 | default: track["default"] || false
461 | )
462 | end
463 | end
464 |
465 | is_map(video) && is_binary(video["value"]) ->
466 | video(class: "u-video", src: video["value"])
467 |
468 | true ->
469 | {:safe, ""}
470 | end
471 | end
472 | end
473 | end
474 |
475 | def audio_rendered(audio) do
476 | use Taggart.HTML
477 |
478 | audio class: "u-audio entry-audio",
479 | controls: audio["controls"] || true,
480 | autoplay: audio["autoplay"] || false,
481 | loop: audio["loop"] || false,
482 | muted: audio["muted"] || false do
483 | tif is_list(audio["source"]) or is_binary(audio["source"]) do
484 | for src <- as_many(audio["source"]) do
485 | source(src: src["src"], type: src["type"])
486 | end
487 | end
488 |
489 | t1if is_binary(audio["value"]) do
490 | source(src: audio["value"])
491 | end
492 | end
493 | end
494 |
495 | def inline_media_into_content(tree, props: props) do
496 | Markup.inline_media_into_content(
497 | tree,
498 | %{
499 | "photo" => &photo_rendered/1,
500 | "video" => &video_rendered/1,
501 | "audio" => &audio_rendered/1
502 | },
503 | %{
504 | "photo" => as_many(props["photo"]),
505 | "video" => as_many(props["video"]),
506 | "audio" => as_many(props["audio"])
507 | }
508 | )
509 | end
510 |
511 | def to_cite(url, posts: posts) when is_binary(url) do
512 | if posts[url] do
513 | posts[url] |> Post.to_map() |> simplify
514 | else
515 | url
516 | end
517 | end
518 |
519 | def to_cite(%Post{} = entry, posts: _), do: Post.to_map(entry) |> simplify
520 |
521 | def to_cite(entry, posts: _) when is_map(entry), do: simplify(entry)
522 |
523 | def author(author, posts: _) when is_map(author) do
524 | use Taggart.HTML
525 |
526 | a href: filter_scheme(author["url"]),
527 | class: "u-author #{if author["name"], do: "h-card", else: ""}" do
528 | author["name"] || author["url"] || ""
529 | end
530 | end
531 |
532 | def author(author, posts: posts) when is_binary(author) do
533 | if posts[author] do
534 | posts[author] |> Post.to_map() |> simplify |> author(posts: posts)
535 | else
536 | author(%{"url" => author}, posts: posts)
537 | end
538 | end
539 |
540 | def home(posts) do
541 | posts["/"] ||
542 | %Post{
543 | url: "/",
544 | props: %{"name" => "Create an entry at the root URL (/)!"}
545 | }
546 | end
547 |
548 | def feed_urls_filter(feed_urls, posts: posts, show_prop: show_prop, order_prop: order_prop) do
549 | feed_urls
550 | |> Stream.filter(fn url ->
551 | try do
552 | Access.get(as_one(posts[url].props["feed-settings"]), show_prop, true)
553 | rescue
554 | _ -> true
555 | end
556 | end)
557 | |> Enum.sort_by(fn url ->
558 | try do
559 | Access.get(as_one(posts[url].props["feed-settings"]), order_prop, 1)
560 | rescue
561 | _ -> 1
562 | end
563 | end)
564 | end
565 |
566 | def filter_scheme("http://" <> _ = x), do: x
567 | def filter_scheme("https://" <> _ = x), do: x
568 | def filter_scheme(_), do: "#non_http_url_found"
569 | end
570 |
571 | defmodule Sweetroll2.Render.LiquidTags.Head do
572 | alias Sweetroll2.Render
573 |
574 | def parse(%Liquid.Tag{} = tag, %Liquid.Template{} = context) do
575 | {tag, context}
576 | end
577 |
578 | def render(output, _tag, context) do
579 | {:safe, data} =
580 | Render.head(
581 | title: Render.home(context.assigns.posts).props["site-name"],
582 | cur_url: context.assigns.page.url,
583 | csp_nonce: context.assigns["csp_nonce"]
584 | )
585 |
586 | {[IO.iodata_to_binary(data)] ++ output, context}
587 | end
588 | end
589 |
590 | defmodule Sweetroll2.Render.LiquidTags.Header do
591 | alias Sweetroll2.Render
592 |
593 | def parse(%Liquid.Tag{} = tag, %Liquid.Template{} = context) do
594 | {tag, context}
595 | end
596 |
597 | def render(output, _tag, context) do
598 | {:safe, data} =
599 | Render.header(
600 | posts: context.assigns.posts,
601 | cur_url: context.assigns.page.url,
602 | feed_urls: context.assigns.feed_urls
603 | )
604 |
605 | {[IO.iodata_to_binary(data)] ++ output, context}
606 | end
607 | end
608 |
609 | defmodule Sweetroll2.Render.LiquidTags.Footer do
610 | alias Sweetroll2.Render
611 |
612 | def parse(%Liquid.Tag{} = tag, %Liquid.Template{} = context) do
613 | {tag, context}
614 | end
615 |
616 | def render(output, _tag, context) do
617 | {:safe, data} =
618 | Render.footer(
619 | logged_in: context.assigns.logged_in,
620 | csp_nonce: context.assigns["csp_nonce"]
621 | )
622 |
623 | {[IO.iodata_to_binary(data)] ++ output, context}
624 | end
625 | end
626 |
627 | defmodule Sweetroll2.Render.LiquidTags.FeedPreview do
628 | alias Sweetroll2.{Render, Post}
629 |
630 | def parse(%Liquid.Tag{} = tag, %Liquid.Template{} = context) do
631 | {tag, context}
632 | end
633 |
634 | def render(output, tag, context) do
635 | feed =
636 | Sweetroll2.Post.Generative.lookup(
637 | tag.markup,
638 | context.assigns.posts,
639 | context.assigns.local_urls
640 | )
641 |
642 | children =
643 | (feed.children || [])
644 | |> Enum.slice(0, 5)
645 | |> Enum.map(&Post.Comments.inline_comments(&1, context.assigns.posts))
646 |
647 | # TODO: adjustable number
648 |
649 | {(Enum.map(children, fn entry ->
650 | {:safe, data} =
651 | Render.entry(
652 | posts: context.assigns.posts,
653 | cur_url: context.assigns.page.url,
654 | logged_in: context.assigns.logged_in,
655 | entry: entry,
656 | feed_urls: context.assigns.feed_urls,
657 | feeds_with_tags: context.assigns.feeds_with_tags,
658 | local_urls: context.assigns.local_urls,
659 | expand_comments: false
660 | )
661 |
662 | IO.iodata_to_binary([~S[], data, ""])
663 | end)
664 | |> Enum.reverse()) ++ output, context}
665 | end
666 | end
667 |
--------------------------------------------------------------------------------
/lib/sweetroll2/serve.ex:
--------------------------------------------------------------------------------
1 | defmodule Sweetroll2.Serve do
2 | @parsers [:urlencoded, {:multipart, length: 20_000_000}, :json]
3 |
4 | alias Sweetroll2.{Auth, Post, MediaUpload, Render, Job}
5 |
6 | use Plug.Router
7 |
8 | if Mix.env() == :dev do
9 | use Plug.Debugger, otp_app: :sweetroll2
10 | end
11 |
12 | use Plug.ErrorHandler
13 |
14 | plug :fprofile
15 | plug Plug.RequestId
16 | plug RemoteIp
17 | plug Timber.Plug.HTTPContext
18 | plug Timber.Plug.Event
19 | plug Plug.SSL, rewrite_on: [:x_forwarded_proto]
20 | plug Plug.Head
21 | plug :add_host_to_process
22 | plug :add_links
23 |
24 | plug Plug.Static,
25 | at: "/__as__",
26 | from: :sweetroll2,
27 | cache_control_for_vsn_requests: "public, max-age=31536000, immutable",
28 | gzip: true,
29 | brotli: true
30 |
31 | plug Plug.MethodOverride
32 | plug :match
33 | plug Plug.Parsers, parsers: @parsers, json_decoder: Jason
34 | plug Auth.Session
35 | plug :fetch_session
36 | plug :skip_csrf_anon
37 | plug Plug.CSRFProtection
38 | plug :dispatch
39 |
40 | forward "/__auth__", to: Auth.Serve
41 |
42 | forward "/__micropub__",
43 | to: PlugMicropub,
44 | init_opts: [
45 | handler: Sweetroll2.Micropub,
46 | json_encoder: Jason
47 | ]
48 |
49 | post "/__imgroll_callback__/:token" do
50 | MediaUpload.fill(token, conn.body_params)
51 | send_resp(conn, :ok, "ok")
52 | end
53 |
54 | post "/__webmention__" do
55 | sourceu = URI.parse(conn.body_params["source"])
56 | targetu = URI.parse(conn.body_params["target"])
57 | posts = %Post.DbAsMap{}
58 |
59 | cond do
60 | is_nil(conn.body_params["source"]) ->
61 | send_resp(conn, :bad_request, "No source parameter")
62 |
63 | is_nil(conn.body_params["target"]) ->
64 | send_resp(conn, :bad_request, "No target parameter")
65 |
66 | sourceu.scheme != "https" and sourceu.scheme != "http" ->
67 | send_resp(conn, :bad_request, "Non-HTTP(S) source parameter")
68 |
69 | String.starts_with?(conn.body_params["source"], Process.get(:our_home_url)) ->
70 | send_resp(
71 | conn,
72 | :bad_request,
73 | "Source parameter on our host (must not be on '#{Process.get(:our_home_url)}')"
74 | )
75 |
76 | !String.starts_with?(conn.body_params["target"], Process.get(:our_home_url)) ->
77 | send_resp(
78 | conn,
79 | :bad_request,
80 | "Target parameter not on our host (must be on '#{Process.get(:our_home_url)}')"
81 | )
82 |
83 | match?(
84 | {:deny, _},
85 | Hammer.check_rate("wm:#{conn.remote_ip |> :inet.ntoa()}", 10 * 60_000, 10)
86 | ) ->
87 | send_resp(conn, :too_many_requests, "Your IP address is rate limited")
88 |
89 | is_nil(posts[targetu.path]) || posts[targetu.path].deleted ->
90 | send_resp(conn, :bad_request, "Target post does not exist")
91 |
92 | true ->
93 | Que.add(Job.Fetch,
94 | url: conn.body_params["source"],
95 | check_mention: conn.body_params["target"],
96 | save_mention: targetu.path,
97 | notify_update: [targetu.path]
98 | )
99 |
100 | send_resp(conn, :accepted, "Accepted for processing")
101 | end
102 | end
103 |
104 | get "/__firehose__" do
105 | SSE.stream(conn, {[:url_updated], %SSE.Chunk{data: ""}})
106 | end
107 |
108 | get "/__media_firehose__" do
109 | logged_in = !is_nil(Auth.Session.current_token(conn))
110 |
111 | if logged_in do
112 | SSE.stream(conn, {[:upload_processed], %SSE.Chunk{data: ""}})
113 | else
114 | send_resp(conn, :unauthorized, "hello :)")
115 | end
116 | end
117 |
118 | get _ do
119 | conn =
120 | conn
121 | |> put_resp_content_type("text/html")
122 | |> put_resp_header(
123 | "Feature-Policy",
124 | "unsized-media 'none'; sync-xhr 'none'; document-write 'none'"
125 | )
126 | |> put_resp_header("Referrer-Policy", "strict-origin")
127 | |> put_resp_header("X-XSS-Protection", "1; mode=block")
128 | |> put_resp_header("X-Content-Type-Options", "nosniff")
129 |
130 | url = conn.request_path
131 | logged_in = !is_nil(Auth.Session.current_token(conn))
132 | posts = %Post.DbAsMap{}
133 | urls_local = if logged_in, do: Post.urls_local(), else: Post.urls_local_public()
134 | post = Post.Generative.lookup(url, posts, urls_local)
135 |
136 | cond do
137 | !post ->
138 | send_resp(conn, 404, "Page not found")
139 |
140 | post.status != :published and not logged_in ->
141 | send_resp(conn, 401, "Unauthorized")
142 |
143 | post.deleted ->
144 | send_resp(conn, 410, "Gone")
145 |
146 | true ->
147 | # NOTE: chunking without special considerations would break CSRF tokens
148 | {:safe, data} =
149 | Render.render_post(
150 | post: post,
151 | posts: posts,
152 | local_urls: urls_local,
153 | logged_in: logged_in
154 | )
155 |
156 | send_resp(conn, :ok, data)
157 | end
158 | end
159 |
160 | def handle_errors(conn, %{kind: _kind, reason: _reason, stack: _stack}) do
161 | send_resp(conn, 500, "Something went wrong")
162 | end
163 |
164 | @link_header ExHttpLink.generate([
165 | {"/__webmention__", {"rel", "webmention"}},
166 | {Job.NotifyWebsub.hub(), {"rel", "hub"}},
167 | {"/__micropub__", {"rel", "micropub"}},
168 | {"/__auth__/authorize", {"rel", "authorization_endpoint"}},
169 | {"/__auth__/token", {"rel", "token_endpoint"}}
170 | ])
171 |
172 | defp add_links(conn, _opts) do
173 | put_resp_header(
174 | conn,
175 | "link",
176 | @link_header <>
177 | ", " <>
178 | ExHttpLink.generate([
179 | {"#{Process.get(:our_home_url)}#{conn.request_path}", {"rel", "self"}}
180 | ])
181 | )
182 | end
183 |
184 | defp skip_csrf_anon(conn, _opts) do
185 | # we don't have anonymous sessions, so we can't exactly store the CSRF token in a session
186 | # when logged out (this enables the login form to work)
187 | # also allow media
188 | if is_nil(Auth.Session.current_token(conn)) or conn.request_path == "/__micropub__/media" do
189 | put_private(conn, :plug_skip_csrf_protection, true)
190 | else
191 | conn
192 | end
193 | end
194 |
195 | @doc """
196 | Puts the request host with scheme and port but without path (not even /) into the process dictionary.
197 |
198 | NOTE: reverse proxies must be configured to preserve Host!
199 | """
200 | defp add_host_to_process(conn, _opts) do
201 | Process.put(
202 | :our_home_url,
203 | if(conn.port != 443 and conn.port != 80,
204 | do: "#{conn.scheme}://#{conn.host}:#{conn.port}",
205 | else: "#{conn.scheme}://#{conn.host}"
206 | )
207 | )
208 |
209 | conn
210 | end
211 |
212 | defp fprofile(conn, _opts) do
213 | conn = fetch_query_params(conn)
214 |
215 | if Mix.env() != :prod and Map.has_key?(conn.query_params, "fprof") do
216 | :fprof.trace(:start)
217 |
218 | register_before_send(conn, fn conn ->
219 | :fprof.trace(:stop)
220 | :fprof.profile()
221 | conn
222 | end)
223 | else
224 | conn
225 | end
226 | end
227 | end
228 |
--------------------------------------------------------------------------------
/mix.exs:
--------------------------------------------------------------------------------
1 | defmodule Sweetroll2.MixProject do
2 | use Mix.Project
3 |
4 | def project do
5 | [
6 | app: :sweetroll2,
7 | version: "0.1.0",
8 | elixir: "~> 1.7",
9 | start_permanent: Mix.env() == :prod,
10 | deps: deps()
11 | ]
12 | end
13 |
14 | def application do
15 | [
16 | extra_applications: [:logger],
17 | mod: {Sweetroll2.Application, []}
18 | ]
19 | end
20 |
21 | defp deps do
22 | [
23 | {:ex_early_ret, "~> 0.1"},
24 | {:timex, "~> 3.5"},
25 | {:date_time_parser, "~> 1.0.0-rc1"},
26 | {:jason, "~> 1.1"},
27 | {:earmark, "~> 1.4"},
28 | {:phoenix_html, "~> 2.13"},
29 | {:taggart, "~> 0.1.5"},
30 | {:floki, "~> 0.23", override: true},
31 | {:html_sanitize_ex, "~> 1.3"},
32 | {:rustler, "~> 0.21.0", override: true},
33 | {:html5ever, "~> 0.7.0"},
34 | {:rustled_syntect, "~> 0.1"},
35 | {:microformats2, git: "https://github.com/ckruse/microformats2-elixir"},
36 | {:plug_micropub, git: "https://github.com/bismark/plug_micropub"},
37 | {:ex_http_link, "~> 0.1.2"},
38 | {:argon2_elixir, "~> 2.0"},
39 | {:nanoid, "~> 2.0.1"},
40 | {:slugger, "~> 0.3.0"},
41 | {:file_system, git: "https://github.com/falood/file_system"},
42 | {:debounce, "~> 0.1.0"},
43 | {:con_cache, "~> 0.13"},
44 | {:quantum, "~> 2.3"},
45 | {:memento, "~> 0.3.1"},
46 | {:liquid, "~> 0.9"},
47 | {:nimble_parsec, "~> 0.5", override: true},
48 | {:que, git: "https://github.com/sheharyarn/que"},
49 | {:ssl_verify_fun, "~> 1.1", override: true},
50 | {:hackney, "~> 1.15"},
51 | {:tesla, "~> 1.3"},
52 | {:hammer, "~> 6.0"},
53 | {:timber, "~> 3.1"},
54 | {:timber_plug, "~> 1.0"},
55 | {:timber_exceptions, "~> 2.0"},
56 | {:exceptional, "~> 2.1"},
57 | {:ex_aws, "~> 2.1"},
58 | {:ex_aws_s3, "~> 2.0"},
59 | {:ex_aws_sts, "~> 2.0"},
60 | {:sweet_xml, "~> 0.6"},
61 | {:plug_cowboy, "~> 2.0"},
62 | {:remote_ip, "~> 0.2"},
63 | {:sse, "~> 0.4"},
64 | {:event_bus, ">= 1.6.0"},
65 | {:observer_cli, "~> 1.5"},
66 | {:credo, "~> 1.1", only: [:dev], runtime: false},
67 | {:dialyxir, "~> 1.0.0-rc.7", only: [:dev], runtime: false}
68 | ]
69 | end
70 | end
71 |
--------------------------------------------------------------------------------
/mix.lock:
--------------------------------------------------------------------------------
1 | %{
2 | "argon2_elixir": {:hex, :argon2_elixir, "2.3.0", "e251bdafd69308e8c1263e111600e6d68bd44f23d2cccbe43fcb1a417a76bc8e", [:make, :mix], [{:comeonin, "~> 5.3", [hex: :comeonin, repo: "hexpm", optional: false]}, {:elixir_make, "~> 0.6", [hex: :elixir_make, repo: "hexpm", optional: false]}], "hexpm"},
3 | "bunt": {:hex, :bunt, "0.2.0", "951c6e801e8b1d2cbe58ebbd3e616a869061ddadcc4863d0a2182541acae9a38", [:mix], [], "hexpm"},
4 | "certifi": {:hex, :certifi, "2.5.2", "b7cfeae9d2ed395695dd8201c57a2d019c0c43ecaf8b8bcb9320b40d6662f340", [:rebar3], [{:parse_trans, "~>3.3", [hex: :parse_trans, repo: "hexpm", optional: false]}], "hexpm"},
5 | "combine": {:hex, :combine, "0.10.0", "eff8224eeb56498a2af13011d142c5e7997a80c8f5b97c499f84c841032e429f", [:mix], [], "hexpm"},
6 | "comeonin": {:hex, :comeonin, "5.3.1", "7fe612b739c78c9c1a75186ef2d322ce4d25032d119823269d0aa1e2f1e20025", [:mix], [], "hexpm"},
7 | "con_cache": {:hex, :con_cache, "0.14.0", "863acb90fa08017be3129074993af944cf7a4b6c3ee7c06c5cd0ed6b94fbc223", [:mix], [], "hexpm"},
8 | "connection": {:hex, :connection, "1.0.4", "a1cae72211f0eef17705aaededacac3eb30e6625b04a6117c1b2db6ace7d5976", [:mix], [], "hexpm"},
9 | "cowboy": {:hex, :cowboy, "2.8.0", "f3dc62e35797ecd9ac1b50db74611193c29815401e53bac9a5c0577bd7bc667d", [:rebar3], [{:cowlib, "~> 2.9.1", [hex: :cowlib, repo: "hexpm", optional: false]}, {:ranch, "~> 1.7.1", [hex: :ranch, repo: "hexpm", optional: false]}], "hexpm"},
10 | "cowlib": {:hex, :cowlib, "2.9.1", "61a6c7c50cf07fdd24b2f45b89500bb93b6686579b069a89f88cb211e1125c78", [:rebar3], [], "hexpm"},
11 | "credo": {:hex, :credo, "1.4.0", "92339d4cbadd1e88b5ee43d427b639b68a11071b6f73854e33638e30a0ea11f5", [:mix], [{:bunt, "~> 0.2.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm"},
12 | "crontab": {:hex, :crontab, "1.1.10", "dc9bb1f4299138d47bce38341f5dcbee0aa6c205e864fba7bc847f3b5cb48241", [:mix], [{:ecto, "~> 1.0 or ~> 2.0 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: true]}], "hexpm"},
13 | "date_time_parser": {:hex, :date_time_parser, "1.0.0", "e43c423d98f223b968c22d2110acc0ec8c9408e222a8c25702f6d51a2dd86901", [:mix], [{:nimble_parsec, "~> 0.5.0", [hex: :nimble_parsec, repo: "hexpm", optional: false]}, {:timex, "~> 3.2", [hex: :timex, repo: "hexpm", optional: false]}], "hexpm"},
14 | "db_connection": {:hex, :db_connection, "2.0.6", "bde2f85d047969c5b5800cb8f4b3ed6316c8cb11487afedac4aa5f93fd39abfa", [:mix], [{:connection, "~> 1.0.2", [hex: :connection, repo: "hexpm", optional: false]}], "hexpm"},
15 | "debounce": {:hex, :debounce, "0.1.1", "51e76a43b375d925dcdb7f0a1e74536551f5c2f6f5a33041c9443fc17999a760", [:mix], [], "hexpm"},
16 | "decimal": {:hex, :decimal, "1.7.0", "30d6b52c88541f9a66637359ddf85016df9eb266170d53105f02e4a67e00c5aa", [:mix], [], "hexpm"},
17 | "dialyxir": {:hex, :dialyxir, "1.0.0", "6a1fa629f7881a9f5aaf3a78f094b2a51a0357c843871b8bc98824e7342d00a5", [:mix], [{:erlex, ">= 0.2.6", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm"},
18 | "earmark": {:hex, :earmark, "1.4.9", "837e4c1c5302b3135e9955f2bbf52c6c52e950c383983942b68b03909356c0d9", [:mix], [{:earmark_parser, ">= 1.4.9", [hex: :earmark_parser, repo: "hexpm", optional: false]}], "hexpm"},
19 | "earmark_parser": {:hex, :earmark_parser, "1.4.9", "819bda2049e6ee1365424e4ced1ba65806eacf0d2867415f19f3f80047f8037b", [:mix], [], "hexpm"},
20 | "elixir_make": {:hex, :elixir_make, "0.6.0", "38349f3e29aff4864352084fc736fa7fa0f2995a819a737554f7ebd28b85aaab", [:mix], [], "hexpm"},
21 | "erlex": {:hex, :erlex, "0.2.6", "c7987d15e899c7a2f34f5420d2a2ea0d659682c06ac607572df55a43753aa12e", [:mix], [], "hexpm"},
22 | "event_bus": {:hex, :event_bus, "1.6.1", "07331328b67ccc76d14a12872013464106390abaa47ea0d6a7755e3524899964", [:mix], [], "hexpm"},
23 | "ex_aws": {:hex, :ex_aws, "2.1.3", "26b6f036f0127548706aade4a509978fc7c26bd5334b004fba9bfe2687a525df", [:mix], [{:configparser_ex, "~> 4.0", [hex: :configparser_ex, repo: "hexpm", optional: true]}, {:hackney, "~> 1.9", [hex: :hackney, repo: "hexpm", optional: true]}, {:jason, "~> 1.1", [hex: :jason, repo: "hexpm", optional: true]}, {:jsx, "~> 2.8", [hex: :jsx, repo: "hexpm", optional: true]}, {:sweet_xml, "~> 0.6", [hex: :sweet_xml, repo: "hexpm", optional: true]}], "hexpm"},
24 | "ex_aws_s3": {:hex, :ex_aws_s3, "2.0.2", "c0258bbdfea55de4f98f0b2f0ca61fe402cc696f573815134beb1866e778f47b", [:mix], [{:ex_aws, "~> 2.0", [hex: :ex_aws, repo: "hexpm", optional: false]}, {:sweet_xml, ">= 0.0.0", [hex: :sweet_xml, repo: "hexpm", optional: true]}], "hexpm"},
25 | "ex_aws_sts": {:hex, :ex_aws_sts, "2.1.0", "43886580812a2fd9952b62c14cb55c548c6bf2fa7ed98b6496c929b7fbe46f18", [:mix], [{:ex_aws, "~> 2.0", [hex: :ex_aws, repo: "hexpm", optional: false]}], "hexpm"},
26 | "ex_early_ret": {:hex, :ex_early_ret, "0.1.1", "074df99d21f98ff4fa8c8da27135aae6df2e97ce2aa025a68702392bc301432a", [:mix], [], "hexpm"},
27 | "ex_http_link": {:hex, :ex_http_link, "0.1.2", "65a66617a9834e92d0464adda04d92f80f1c873f691eab8deaa1505c4c3c636e", [:mix], [{:nimble_parsec, "~> 0.5", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm"},
28 | "ex_utils": {:hex, :ex_utils, "0.1.7", "2c133e0bcdc49a858cf8dacf893308ebc05bc5fba501dc3d2935e65365ec0bf3", [:mix], [], "hexpm"},
29 | "exceptional": {:hex, :exceptional, "2.1.3", "cb17cb9b7c4882e763b82db08ba317678157ca95970fae96b31b3c90f5960c3d", [:mix], [], "hexpm"},
30 | "file_system": {:git, "https://github.com/falood/file_system", "b2e4ae37ae3f5799438a9350222bb42f05b9df73", []},
31 | "floki": {:hex, :floki, "0.26.0", "4df88977e2e357c6720e1b650f613444bfb48c5acfc6a0c646ab007d08ad13bf", [:mix], [{:html_entities, "~> 0.5.0", [hex: :html_entities, repo: "hexpm", optional: false]}], "hexpm"},
32 | "gen_stage": {:hex, :gen_stage, "1.0.0", "51c8ae56ff54f9a2a604ca583798c210ad245f415115453b773b621c49776df5", [:mix], [], "hexpm"},
33 | "gen_state_machine": {:hex, :gen_state_machine, "2.1.0", "a38b0e53fad812d29ec149f0d354da5d1bc0d7222c3711f3a0bd5aa608b42992", [:mix], [], "hexpm"},
34 | "gettext": {:hex, :gettext, "0.18.0", "406d6b9e0e3278162c2ae1de0a60270452c553536772167e2d701f028116f870", [:mix], [], "hexpm"},
35 | "hackney": {:hex, :hackney, "1.16.0", "5096ac8e823e3a441477b2d187e30dd3fff1a82991a806b2003845ce72ce2d84", [:rebar3], [{:certifi, "2.5.2", [hex: :certifi, repo: "hexpm", optional: false]}, {:idna, "6.0.1", [hex: :idna, repo: "hexpm", optional: false]}, {:metrics, "1.0.1", [hex: :metrics, repo: "hexpm", optional: false]}, {:mimerl, "~>1.1", [hex: :mimerl, repo: "hexpm", optional: false]}, {:parse_trans, "3.3.0", [hex: :parse_trans, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "1.1.6", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}], "hexpm"},
36 | "hammer": {:hex, :hammer, "6.0.0", "72ec6fff10e9d63856968988a22ee04c4d6d5248071ddccfbda50aa6c455c1d7", [:mix], [{:poolboy, "~> 1.5", [hex: :poolboy, repo: "hexpm", optional: false]}], "hexpm"},
37 | "html5ever": {:hex, :html5ever, "0.7.0", "9f63ec1c783b2dc9f326840fcc993c01e926dbdef4e51ba1bbe5355993c258b4", [:mix], [{:rustler, "~> 0.18.0", [hex: :rustler, repo: "hexpm", optional: false]}], "hexpm"},
38 | "html_entities": {:hex, :html_entities, "0.5.1", "1c9715058b42c35a2ab65edc5b36d0ea66dd083767bef6e3edb57870ef556549", [:mix], [], "hexpm"},
39 | "html_sanitize_ex": {:hex, :html_sanitize_ex, "1.4.0", "0310d27d7bafb662f30bff22ec732a72414799c83eaf44239781fd23b96216c0", [:mix], [{:mochiweb, "~> 2.15", [hex: :mochiweb, repo: "hexpm", optional: false]}], "hexpm"},
40 | "idna": {:hex, :idna, "6.0.1", "1d038fb2e7668ce41fbf681d2c45902e52b3cb9e9c77b55334353b222c2ee50c", [:rebar3], [{:unicode_util_compat, "0.5.0", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm"},
41 | "inet_cidr": {:hex, :inet_cidr, "1.0.4", "a05744ab7c221ca8e395c926c3919a821eb512e8f36547c062f62c4ca0cf3d6e", [:mix], [], "hexpm"},
42 | "jason": {:hex, :jason, "1.2.1", "12b22825e22f468c02eb3e4b9985f3d0cb8dc40b9bd704730efa11abd2708c44", [:mix], [{:decimal, "~> 1.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm"},
43 | "libring": {:hex, :libring, "1.5.0", "44313eb6862f5c9168594a061e9d5f556a9819da7c6444706a9e2da533396d70", [:mix], [], "hexpm"},
44 | "liquid": {:hex, :liquid, "0.9.1", "eef4797f4b8b1cdd14c44d6dcd52bb49df9caf6bcf5dacfd61911fba8b6a628d", [:mix], [{:timex, "~> 3.0", [hex: :timex, repo: "hexpm", optional: false]}], "hexpm"},
45 | "memento": {:hex, :memento, "0.3.1", "b2909390820550d8b90b68ec96f9e15ff8a45a28b6f97fa4a62ef50e87c2f9d9", [:mix], [], "hexpm"},
46 | "metrics": {:hex, :metrics, "1.0.1", "25f094dea2cda98213cecc3aeff09e940299d950904393b2a29d191c346a8486", [:rebar3], [], "hexpm"},
47 | "microformats2": {:git, "https://github.com/ckruse/microformats2-elixir", "c0de5bf99e6c6b7a5403d13060131cc1388e0051", []},
48 | "mime": {:hex, :mime, "1.3.1", "30ce04ab3175b6ad0bdce0035cba77bba68b813d523d1aac73d9781b4d193cf8", [:mix], [], "hexpm"},
49 | "mimerl": {:hex, :mimerl, "1.2.0", "67e2d3f571088d5cfd3e550c383094b47159f3eee8ffa08e64106cdf5e981be3", [:rebar3], [], "hexpm"},
50 | "mochiweb": {:hex, :mochiweb, "2.20.1", "e4dbd0ed716f076366ecf62ada5755a844e1d95c781e8c77df1d4114be868cdf", [:rebar3], [], "hexpm"},
51 | "msgpax": {:hex, :msgpax, "2.2.4", "7b3790ef684089076b63c0f08c2f4b079c6311daeb006b69e4ed2bf67518291e", [:mix], [{:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: true]}], "hexpm"},
52 | "nanoid": {:hex, :nanoid, "2.0.3", "58e9f6e64529ee3b0a5937181e95225ad4ccab0c86f8d6b99b2303422f2db8b6", [:mix], [], "hexpm"},
53 | "nimble_parsec": {:hex, :nimble_parsec, "0.6.0", "32111b3bf39137144abd7ba1cce0914533b2d16ef35e8abc5ec8be6122944263", [:mix], [], "hexpm"},
54 | "observer_cli": {:hex, :observer_cli, "1.5.4", "e8489b3a7c77c2155c0b67fa9f90d9afa76bf15c24fb7b312addcc117771d154", [:mix, :rebar3], [{:recon, "~>2.5.1", [hex: :recon, repo: "hexpm", optional: false]}], "hexpm"},
55 | "parse_trans": {:hex, :parse_trans, "3.3.0", "09765507a3c7590a784615cfd421d101aec25098d50b89d7aa1d66646bc571c1", [:rebar3], [], "hexpm"},
56 | "phoenix_html": {:hex, :phoenix_html, "2.14.2", "b8a3899a72050f3f48a36430da507dd99caf0ac2d06c77529b1646964f3d563e", [:mix], [{:plug, "~> 1.5", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"},
57 | "plug": {:hex, :plug, "1.10.3", "c9cebe917637d8db0e759039cc106adca069874e1a9034fd6e3fdd427fd3c283", [:mix], [{:mime, "~> 1.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.1.1 or ~> 1.2", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm"},
58 | "plug_cowboy": {:hex, :plug_cowboy, "2.3.0", "149a50e05cb73c12aad6506a371cd75750c0b19a32f81866e1a323dda9e0e99d", [:mix], [{:cowboy, "~> 2.7", [hex: :cowboy, repo: "hexpm", optional: false]}, {:plug, "~> 1.7", [hex: :plug, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm"},
59 | "plug_crypto": {:hex, :plug_crypto, "1.1.2", "bdd187572cc26dbd95b87136290425f2b580a116d3fb1f564216918c9730d227", [:mix], [], "hexpm"},
60 | "plug_micropub": {:git, "https://github.com/bismark/plug_micropub", "c6976ac9510035a78a6b1d1fa2e469cb94aa3248", []},
61 | "poolboy": {:hex, :poolboy, "1.5.2", "392b007a1693a64540cead79830443abf5762f5d30cf50bc95cb2c1aaafa006b", [:rebar3], [], "hexpm"},
62 | "quantum": {:hex, :quantum, "2.4.0", "f2ad4b20988f848455d35ed0e884ba0c7629a27ee86cbec6a6e0fc214b6e69cf", [:mix], [{:calendar, "~> 0.17", [hex: :calendar, repo: "hexpm", optional: true]}, {:crontab, "~> 1.1", [hex: :crontab, repo: "hexpm", optional: false]}, {:gen_stage, "~> 0.12 or ~> 1.0", [hex: :gen_stage, repo: "hexpm", optional: false]}, {:swarm, "~> 3.3", [hex: :swarm, repo: "hexpm", optional: false]}, {:timex, "~> 3.1", [hex: :timex, repo: "hexpm", optional: true]}, {:tzdata, "~> 1.0", [hex: :tzdata, repo: "hexpm", optional: true]}], "hexpm"},
63 | "que": {:git, "https://github.com/sheharyarn/que", "23bb7f855d9b608c3fe865afd1e34f339dd82512", []},
64 | "ranch": {:hex, :ranch, "1.7.1", "6b1fab51b49196860b733a49c07604465a47bdb78aa10c1c16a3d199f7f8c881", [:rebar3], [], "hexpm"},
65 | "recon": {:hex, :recon, "2.5.1", "430ffa60685ac1efdfb1fe4c97b8767c92d0d92e6e7c3e8621559ba77598678a", [:mix, :rebar3], [], "hexpm"},
66 | "remote_ip": {:hex, :remote_ip, "0.2.1", "cd27cd8ea54ecaaf3532776ff4c5e353b3804e710302e88c01eadeaaf42e7e24", [:mix], [{:combine, "~> 0.10", [hex: :combine, repo: "hexpm", optional: false]}, {:inet_cidr, "~> 1.0", [hex: :inet_cidr, repo: "hexpm", optional: false]}, {:plug, "~> 1.5", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"},
67 | "rustled_syntect": {:hex, :rustled_syntect, "0.1.2", "003c8638f69174cfa1fa91eeb0a0b77bf8fd1429217f174c85d7c86dac3fa917", [:mix], [{:rustler, "~> 0.21.0", [hex: :rustler, repo: "hexpm", optional: false]}], "hexpm"},
68 | "rustler": {:hex, :rustler, "0.21.1", "5299980be32da997c54382e945bacaa015ed97a60745e1e639beaf6a7b278c65", [:mix], [{:toml, "~> 0.5.2", [hex: :toml, repo: "hexpm", optional: false]}], "hexpm"},
69 | "slugger": {:hex, :slugger, "0.3.0", "efc667ab99eee19a48913ccf3d038b1fb9f165fa4fbf093be898b8099e61b6ed", [:mix], [], "hexpm"},
70 | "sse": {:hex, :sse, "0.4.0", "f17affacbc4618bac07590eec7bff849aa27d1f71bb3d41da3fd3cb255d16910", [:mix], [{:event_bus, ">= 1.6.0", [hex: :event_bus, repo: "hexpm", optional: false]}, {:plug, ">= 1.4.5", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"},
71 | "ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.6", "cf344f5692c82d2cd7554f5ec8fd961548d4fd09e7d22f5b62482e5aeaebd4b0", [:make, :mix, :rebar3], [], "hexpm"},
72 | "swarm": {:hex, :swarm, "3.4.0", "64f8b30055d74640d2186c66354b33b999438692a91be275bb89cdc7e401f448", [:mix], [{:gen_state_machine, "~> 2.0", [hex: :gen_state_machine, repo: "hexpm", optional: false]}, {:libring, "~> 1.0", [hex: :libring, repo: "hexpm", optional: false]}], "hexpm"},
73 | "sweet_xml": {:hex, :sweet_xml, "0.6.6", "fc3e91ec5dd7c787b6195757fbcf0abc670cee1e4172687b45183032221b66b8", [:mix], [], "hexpm"},
74 | "taggart": {:hex, :taggart, "0.1.5", "c0e2a86a08ccebccae7a7d6ae4db1aba89689d1bf1f61d07e88d4f36839cfc44", [:mix], [{:floki, "~> 0.17", [hex: :floki, repo: "hexpm", optional: true]}, {:phoenix_html, "~> 2.10", [hex: :phoenix_html, repo: "hexpm", optional: false]}], "hexpm"},
75 | "telemetry": {:hex, :telemetry, "0.4.2", "2808c992455e08d6177322f14d3bdb6b625fbcfd233a73505870d8738a2f4599", [:rebar3], [], "hexpm"},
76 | "tesla": {:hex, :tesla, "1.3.3", "26ae98627af5c406584aa6755ab5fc96315d70d69a24dd7f8369cfcb75094a45", [:mix], [{:castore, "~> 0.1", [hex: :castore, repo: "hexpm", optional: true]}, {:exjsx, ">= 3.0.0", [hex: :exjsx, repo: "hexpm", optional: true]}, {:fuse, "~> 2.4", [hex: :fuse, repo: "hexpm", optional: true]}, {:gun, "~> 1.3", [hex: :gun, repo: "hexpm", optional: true]}, {:hackney, "~> 1.6", [hex: :hackney, repo: "hexpm", optional: true]}, {:ibrowse, "~> 4.4.0", [hex: :ibrowse, repo: "hexpm", optional: true]}, {:jason, ">= 1.0.0", [hex: :jason, repo: "hexpm", optional: true]}, {:mime, "~> 1.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mint, "~> 1.0", [hex: :mint, repo: "hexpm", optional: true]}, {:poison, ">= 1.0.0", [hex: :poison, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: true]}], "hexpm"},
77 | "timber": {:hex, :timber, "3.1.2", "12c3fed5cef790619a89e0d80a24560650c6644b88e0188cca919de5faf5d239", [:mix], [{:hackney, "~> 1.9", [hex: :hackney, repo: "hexpm", optional: false]}, {:jason, "~> 1.1", [hex: :jason, repo: "hexpm", optional: false]}, {:msgpax, "~> 2.2", [hex: :msgpax, repo: "hexpm", optional: false]}], "hexpm"},
78 | "timber_exceptions": {:hex, :timber_exceptions, "2.1.0", "0048afd74074b1ef7f1ee88d3baf711f16700bf80a79ad13584d0a6beb5cc031", [:mix], [{:timber, "~> 3.1", [hex: :timber, repo: "hexpm", optional: false]}], "hexpm"},
79 | "timber_plug": {:hex, :timber_plug, "1.1.0", "d4c950b943775006e61ca0b6176ef7c056462df281cf48145282989e680018cc", [:mix], [{:plug, "~> 1.2", [hex: :plug, repo: "hexpm", optional: false]}, {:timber, "~> 3.1", [hex: :timber, repo: "hexpm", optional: false]}], "hexpm"},
80 | "timex": {:hex, :timex, "3.6.2", "845cdeb6119e2fef10751c0b247b6c59d86d78554c83f78db612e3290f819bc2", [:mix], [{:combine, "~> 0.10", [hex: :combine, repo: "hexpm", optional: false]}, {:gettext, "~> 0.10", [hex: :gettext, repo: "hexpm", optional: false]}, {:tzdata, "~> 0.1.8 or ~> 0.5 or ~> 1.0.0", [hex: :tzdata, repo: "hexpm", optional: false]}], "hexpm"},
81 | "toml": {:hex, :toml, "0.5.2", "e471388a8726d1ce51a6b32f864b8228a1eb8edc907a0edf2bb50eab9321b526", [:mix], [], "hexpm"},
82 | "tzdata": {:hex, :tzdata, "1.0.3", "73470ad29dde46e350c60a66e6b360d3b99d2d18b74c4c349dbebbc27a09a3eb", [:mix], [{:hackney, "~> 1.0", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm"},
83 | "unicode_util_compat": {:hex, :unicode_util_compat, "0.5.0", "8516502659002cec19e244ebd90d312183064be95025a319a6c7e89f4bccd65b", [:rebar3], [], "hexpm"},
84 | }
85 |
--------------------------------------------------------------------------------
/mrb/as.rb:
--------------------------------------------------------------------------------
1 | lambda do |env|
2 | headers = {}
3 | if env["QUERY_STRING"].include?("vsn=")
4 | headers["cache-control"] = "public, max-age=31536000, immutable"
5 | end
6 | return [399, headers, []]
7 | end
8 |
--------------------------------------------------------------------------------
/mrb/out.rb:
--------------------------------------------------------------------------------
1 | lambda do |env|
2 | if env["h2o.remaining_delegations"] == 5
3 | return [401, {}, ["Where do you think you're going??"]]
4 | end
5 | if File.exist?(File.join(ENV["SR2_STATIC_GEN_OUT_DIR"] || "out/", env["PATH_INFO"], "gone"))
6 | # XXX: No way to serve the file still??
7 | return [410, {
8 | "link" => "<#{env['rack.url_scheme']}://#{env['SERVER_NAME']}#{env['PATH_INFO']}>; rel=\"self\""
9 | }, ["Gone"]]
10 | end
11 | return [399, {
12 | "link" => "<#{env['rack.url_scheme']}://#{env['SERVER_NAME']}#{env['PATH_INFO']}>; rel=\"self\""
13 | }, []]
14 | end
15 |
--------------------------------------------------------------------------------
/mrb/root.rb:
--------------------------------------------------------------------------------
1 | lambda do |env|
2 | if env["PATH_INFO"].end_with?("/") && env["PATH_INFO"] != "/"
3 | loc = env["PATH_INFO"]
4 | while loc.end_with?("/") && loc != "/"
5 | loc = loc.chomp("/")
6 | end
7 | if env["QUERY_STRING"].length > 0
8 | loc += "?#{env["QUERY_STRING"]}"
9 | end
10 | return [301, { "location" => loc }, []]
11 | end
12 | if (env["HTTP_COOKIE"].nil? || !env["HTTP_COOKIE"].include?("wheeeee=C")) &&
13 | File.exist?(File.join(ENV["SR2_STATIC_GEN_OUT_DIR"] || "out/", env["PATH_INFO"], "index.html"))
14 | out = (!env["HTTP_USER_AGENT"].nil? && env["HTTP_USER_AGENT"].include?("AppEngine-Google")) ? "crapp_engine" : "out"
15 | return [307, { "x-reproxy-url" => "/__#{out}__" + env["PATH_INFO"] }, []]
16 | end
17 | return [399, {}, []]
18 | end
19 |
--------------------------------------------------------------------------------
/priv/static/fnt/Inter-Regular.woff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/valpackett/sweetroll2/e4ee6491f9559a4a0b118e3952fa83cb7614e646/priv/static/fnt/Inter-Regular.woff
--------------------------------------------------------------------------------
/priv/static/fnt/Inter-Regular.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/valpackett/sweetroll2/e4ee6491f9559a4a0b118e3952fa83cb7614e646/priv/static/fnt/Inter-Regular.woff2
--------------------------------------------------------------------------------
/priv/static/fnt/Inter-upright.var.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/valpackett/sweetroll2/e4ee6491f9559a4a0b118e3952fa83cb7614e646/priv/static/fnt/Inter-upright.var.woff2
--------------------------------------------------------------------------------
/priv/static/icons.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/priv/static/icons/arrow-down.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/priv/static/icons/arrow-up.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/priv/static/icons/bookmark.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/priv/static/icons/desktop-download.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/priv/static/icons/device-camera.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/priv/static/icons/eye.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/priv/static/icons/info.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/priv/static/icons/link.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/priv/static/icons/lock.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/priv/static/icons/megaphone.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/priv/static/icons/octoface.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/priv/static/icons/paintcan.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/priv/static/icons/quote.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/priv/static/icons/reply.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/priv/static/icons/star.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/priv/static/icons/telescope.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/priv/static/sr2.js:
--------------------------------------------------------------------------------
1 | (function () {
2 | function throttle (timer) {
3 | let queuedCallback
4 | return callback => {
5 | if (!queuedCallback) {
6 | timer(() => {
7 | const cb = queuedCallback
8 | queuedCallback = null
9 | cb()
10 | })
11 | }
12 | queuedCallback = callback
13 | }
14 | }
15 |
16 |
17 | customElements.define('responsive-container', class extends HTMLElement {
18 | alignToVGrid () {
19 | /* Why is there no modulo in CSS calc?!? */
20 | const lh = parseFloat(window.getComputedStyle(document.body).getPropertyValue('line-height'))
21 | console.log(lh)
22 | this.style.marginBottom = `${lh - this.getBoundingClientRect().height % lh}px`
23 | }
24 |
25 | connectedCallback () {
26 | this.onFrame = throttle(requestAnimationFrame)
27 | this.onFrame(() => this.alignToVGrid())
28 | window.addEventListener('resize', e => {
29 | this.onFrame(() => this.alignToVGrid())
30 | })
31 | }
32 | })
33 | })()
34 |
--------------------------------------------------------------------------------
/priv/static/style.css:
--------------------------------------------------------------------------------
1 | /*! https://rsms.me/inter/ | SIL OFL */
2 | @font-face {
3 | font-family: 'Inter';
4 | font-style: normal;
5 | font-weight: 400;
6 | font-display: swap;
7 | src: url("fnt/Inter-Regular.woff2?vsn=3.9") format("woff2"), url("fnt/Inter-Regular.woff?vsn=3.9") format("woff");
8 | }
9 | @font-face {
10 | font-family: 'Inter var';
11 | font-weight: 100 900;
12 | font-style: normal;
13 | font-named-instance: 'Regular';
14 | font-display: swap;
15 | src: url("fnt/Inter-upright.var.woff2?vsn=3.9") format("woff2");
16 | }
17 |
18 | /*! sanitize.css | CC0 License | github.com/csstools/sanitize.css */
19 | *, ::before, ::after { /*! background-repeat: no-repeat; */ box-sizing: border-box; }
20 | ::before, ::after { text-decoration: inherit; vertical-align: inherit; }
21 | html {
22 | cursor: default;
23 | -moz-tab-size: 4; tab-size: 4;
24 | -ms-text-size-adjust: 100%; -webkit-text-size-adjust: 100%;
25 | word-break: break-word;
26 | }
27 | body { margin: 0; }
28 | hr { height: 0; overflow: visible; }
29 | main { display: block; }
30 | nav ol, nav ul { list-style: none; }
31 | a { background-color: transparent; }
32 | abbr[title] { text-decoration: underline; text-decoration: underline dotted; }
33 | b, strong { font-weight: bolder; }
34 | pre, code, kbd, samp { font-family: monospace; font-size: inherit; }
35 | small { font-size: 0.79em; }
36 | ::selection { background-color: #b3d4fc; color: #000; text-shadow: none; }
37 | audio, canvas, iframe, img, svg, video { vertical-align: middle; }
38 | svg:not([fill]) { fill: currentColor; }
39 | table { border-collapse: collapse; }
40 | button, input, select, textarea { background-color: transparent; border: 1px solid; color: inherit; font: inherit; letter-spacing: inherit; }
41 | button, input, select { margin: 0; }
42 | button, select { overflow: visible; text-transform: none; }
43 | button, [type="button"], [type="reset"], [type="submit"] { -webkit-appearance: button; }
44 | fieldset { padding: 0.35em 0.75em 0.625em; }
45 | input { overflow: visible; }
46 | progress { vertical-align: baseline; }
47 | textarea { margin: 0; overflow: auto; resize: vertical; }
48 | [type="search"] { -webkit-appearance: textfield; outline-offset: -2px; }
49 | ::-webkit-inner-spin-button, ::-webkit-outer-spin-button { height: auto; }
50 | ::-webkit-input-placeholder { color: inherit; opacity: 0.54; }
51 | ::-webkit-search-decoration { -webkit-appearance: none; }
52 | ::-webkit-file-upload-button { -webkit-appearance: button; font: inherit; }
53 | ::-moz-focus-inner { border-style: none; padding: 0; }
54 | :-moz-focusring { outline: 1px dotted ButtonText; }
55 | summary { display: list-item; }
56 | a, area, button, input, label, select, summary, textarea, [tabindex] { touch-action: manipulation; }
57 | [aria-busy="true"] { cursor: progress; }
58 | [aria-controls] { cursor: pointer; }
59 | [aria-disabled="true"], [disabled] { cursor: not-allowed; }
60 | [aria-hidden="false"][hidden]:not(:focus) { clip: rect(0, 0, 0, 0); display: inherit; position: absolute; }
61 | iframe, img, input, select, textarea { height: auto; max-width: 100%; }
62 |
63 | /*! normalize-opentype.css | MIT License | kennethormandy.com/journal/normalize-opentype-css */
64 | html, body, table {
65 | font-feature-settings: "kern" 1, "liga" 1, "calt" 1, "pnum" 1, "tnum" 0, "onum" 1, "lnum" 0, "dlig" 0, "case" 1; }
66 | h1, h2, h3 {
67 | font-feature-settings: "kern" 1, "liga" 1, "calt" 1, "pnum" 1, "tnum" 0, "onum" 1, "lnum" 0, "dlig" 1, "case" 1; }
68 | abbr {
69 | font-feature-settings: "kern" 1, "liga" 1, "calt" 1, "pnum" 1, "tnum" 0, "onum" 1, "lnum" 0, "smcp" 1, "c2sc" 1, "case" 1; }
70 | time {
71 | font-feature-settings: "kern" 1, "liga" 1, "calt" 1, "pnum" 1, "tnum" 0, "onum" 1, "lnum" 0; }
72 | pre, kbd, samp, code {
73 | font-feature-settings: "kern" 0, "liga" 0, "calt" 1, "dlig" 0, "pnum" 0, "tnum" 1, "onum" 0, "lnum" 1, "zero" 1; }
74 | sup {
75 | font-feature-settings: "kern" 1, "liga" 1, "calt" 1, "pnum" 1, "tnum" 0, "onum" 1, "lnum" 0, "dlig" 0, "sups" 1; }
76 | sub {
77 | font-feature-settings: "kern" 1, "liga" 1, "calt" 1, "pnum" 1, "tnum" 0, "onum" 1, "lnum" 0, "dlig" 0, "subs" 1; }
78 |
79 | /*! sweetroll 2 | UNLICENSE | unrelenting.technology */
80 | html {
81 | font: 14px/22px "Inter", system-ui, sans-serif, "Noto Color Emoji", "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol";
82 | font-weight: 400;
83 | background: #33414C;
84 | color: #fefefe;
85 | background-blend-mode: overlay;
86 | hyphens: auto; word-wrap: break-word;
87 | }
88 | *:focus { outline-color: #fff; }
89 | @supports (font-variation-settings: normal) {
90 | html { font-family: 'Inter var', system-ui, sans-serif, "Noto Color Emoji", "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol"; }
91 | * { font-synthesis: none; }
92 | }
93 | :root {
94 | --fntsz: 14px;
95 | --lineh: calc(var(--fntsz) * 1.5);
96 | --hspac: calc(var(--lineh) * 0.889);
97 | font-size: var(--fntsz);
98 | line-height: var(--lineh);
99 | }
100 | @media (min-width: 600px) and (max-width: 1300px) {
101 | :root { --fntsz: calc(14px + (16 - 14) * ((100vw - 600px) / (1300 - 600))); }
102 | }
103 | @media (min-width: 1300px) {
104 | :root { --fntsz: 16px; }
105 | }
106 | code { word-wrap: normal; }
107 | pre, code { white-space: pre-wrap; }
108 | input, abbr, acronym, blockquote, code, kbd, q, samp, var, #site-h1 { hyphens: none; }
109 | abbr[title]:hover { cursor: help; }
110 | audio, video { width: 100%; }
111 | responsive-container { display: block; background-size: cover !important; background-repeat: no-repeat !important; width: 100%; position: relative; }
112 | responsive-container.has-pad > *, responsive-container.has-pad picture img { position: absolute; top: 0; left: 0; width: 100%; height: 100%; }
113 | .icon { height: 1em; width: 1em; margin: 0 0.1em 0 0.01em; vertical-align: middle; shape-rendering: geometricprecision; fill: currentColor; }
114 | blockquote { border-left: 0.35rem solid #ddd; padding: 0 1rem; }
115 | audio[controls] { height: calc(var(--lineh) * 2); }
116 |
117 | main {
118 | margin-top: calc(var(--lineh) * 2);
119 | letter-spacing: -0.004em;
120 | }
121 | body { min-height: 100vh; }
122 | a { color: #f7fdfe; transition: color ease 0.2s; }
123 | a:hover, a:focus { color: #fff; }
124 |
125 | p, pre, figure { margin: var(--lineh) 0; overflow: hidden; }
126 | h1, h2, h3, h4 { font-weight: 550; margin: var(--lineh) 0; }
127 | h1 { font-size: 1.602rem; line-height: calc(var(--lineh) * 2); }
128 | h2 { font-size: 1.424rem; }
129 | h3 { font-size: 1.266rem; }
130 | h4 { font-size: 1.125rem; }
131 |
132 | #site-h1 {
133 | font-size: 2.281rem;
134 | letter-spacing: -0.007em;
135 | font-weight: 900;
136 | line-height: calc(var(--lineh) * 3);
137 | margin: var(--lineh) var(--hspac);
138 | }
139 | #site-h1 a { text-decoration: none; }
140 | #site-h1 a:hover, #site-h1 a:focus { text-decoration: underline; }
141 |
142 | #site-nav {
143 | text-transform: lowercase;
144 | font-size: 1.266rem;
145 | font-weight: 500;
146 | margin: var(--lineh) var(--hspac);
147 | display: grid;
148 | grid-template-columns: repeat(auto-fill, minmax(128px, 1fr));
149 | grid-gap: 0 var(--hspac);
150 | }
151 | #site-nav a {
152 | display: block;
153 | text-decoration: none;
154 | padding-bottom: 0.4rem;
155 | border-bottom: 0.15rem solid;
156 | margin-top: calc(var(--lineh) - 0.55rem);
157 | }
158 | #site-nav a:first-child { color: #96d9f8; }
159 | #site-nav a:nth-child(2) { color: #b3ec88; }
160 | #site-nav a:nth-child(3) { color: #d3aff4; }
161 | #site-nav a:nth-child(4) { color: #f2abe8; }
162 | #site-nav a:nth-child(5) { color: #e9f2a6; }
163 | #site-nav a:nth-child(6) { color: #f0b794; }
164 | #site-nav a:nth-child(7) { color: #a2f1ec; }
165 | #site-nav a:nth-child(8) { color: #b7a2f1; }
166 | #site-nav a:nth-child(9) { color: #8683e7; }
167 | #site-nav a:hover, #site-nav a:focus { color: #fff; }
168 |
169 | article, .block-thingy {
170 | background: #fbfbfb; color: #444;
171 | overflow: hidden;
172 | margin-bottom: calc(var(--lineh) * 2);
173 | position: relative;
174 | }
175 | article *:focus, .block-thingy *:focus { outline-color: #333; }
176 | article a { color: #555; }
177 | article a:hover, article a:focus { color: #66b; }
178 | article > figure { margin: 0; }
179 | .entry-txt { padding: 0 var(--hspac); }
180 | .entry-info, .entry-ctx, .entry-photo-meta { background: #eaf4f6; text-shadow: #fff 0 1px 0; font-size: 0.889rem; padding: calc(var(--lineh) / 2) var(--hspac); }
181 | .entry-info a, #site-footer a { display: inline-block; }
182 | .entry-type-icon { position: absolute; right: -0.2rem; top: -0.1rem; width: 2.5rem; height: 2.5rem; border-radius: 4px; transform: rotateZ(8deg); opacity: 0.5; }
183 | .entry-type-icon-reply { background: #32E0F0; color: white; padding: 0.15rem 0.1rem 0 0; }
184 | .entry-type-icon-like { background: #ff91ed; color: white; padding: 0.1rem; }
185 | .entry-type-icon-repost { background: #f56060; color: white; padding: 0.25rem; }
186 | .entry-type-icon-bookmark { background: #607cf5; color: white; padding: 0.25rem 0.5rem; }
187 | .entry-ctx > blockquote.e-content { margin: 0; }
188 |
189 | .syntect .comment { color: #666; }
190 | .syntect .function, .syntect .tag, .syntect .object { color: #10a080; }
191 | .syntect .modifier { color: #bb8080; }
192 | .syntect .keyword, .syntect .property-name, .syntect .attribute-name { color: #3030bf; }
193 | .syntect .string { color: #e110ab; }
194 | .syntect .punctuation { color: #a04040; }
195 |
196 | .entry-facepile {
197 | margin: 0.5rem 0.5rem 0.5rem 0;
198 | display: inline-block;
199 | }
200 | .entry-facepile a { text-decoration: none; }
201 | .entry-facepile img {
202 | display: inline-block;
203 | margin-right: 0.1rem;
204 | width: 32px;
205 | height: 32px;
206 | min-width: 32px;
207 | border-radius: 4px;
208 | font-size: 0.5rem;
209 | background: linear-gradient(to bottom, #f678a0, #f83ce2);
210 | color: white;
211 | overflow: hidden;
212 | }
213 |
214 | #site-footer, .feed-nav { padding: 0 var(--hspac); }
215 | #site-footer { font-size: 0.889rem; margin: calc(var(--lineh) * 2) 0 var(--lineh); }
216 |
217 | .big-form { padding: var(--lineh) var(--hspac); }
218 | .big-form input, .big-form button { width: 100%; padding: 0 var(--hspac); margin: var(--lineh) 0; line-height: calc(var(--lineh) * 2); }
219 |
220 | @media screen and (min-width:40rem) {
221 | body {
222 | display: grid;
223 | grid-template-rows: min-content auto min-content;
224 | grid-gap: 0 var(--lineh);
225 | justify-content: center;
226 | grid-template-columns: auto calc(var(--lineh) * 2.25) calc(80% - 5vw) calc(var(--lineh) * 2.25) auto;
227 | grid-template-areas: ". . header . ." ". left main . ." ". . footer . .";
228 | }
229 | #site-h1 { grid-area: left; line-height: var(--lineh); margin: var(--lineh) 0 0 0; writing-mode: vertical-rl; transform: rotate(180deg); text-align: right; }
230 | @supports (writing-mode: sideways-lr) {
231 | #site-h1 { writing-mode: sideways-lr; transform: none; }
232 | }
233 | #site-nav { grid-area: header; }
234 | main { grid-area: main; margin-top: var(--lineh); }
235 | #site-footer { grid-area: footer; }
236 | }
237 |
238 | @media screen and (min-width:75rem) {
239 | body { grid-template-columns: auto calc(var(--lineh) * 2.25) calc(80% - 10vw) calc(var(--lineh) * 2.25) auto; }
240 | }
241 |
242 | @media screen and (min-width:85rem) {
243 | body { grid-template-columns: auto calc(var(--lineh) * 2.25) calc(80% - 15vw) calc(var(--lineh) * 2.25) auto; }
244 | }
245 |
246 | @media screen and (min-width:95rem) {
247 | body { grid-template-columns: auto calc(var(--lineh) * 2.25) calc(80% - 20vw) calc(var(--lineh) * 2.25) auto; }
248 | }
249 |
250 | @media screen and (min-width:105rem) {
251 | body { grid-template-columns: auto calc(var(--lineh) * 2.25) calc(80% - 25vw) calc(var(--lineh) * 2.25) auto; }
252 | }
253 |
254 | /*body {position: relative;}
255 | body::after { content: ' '; background: url(https://basehold.it/i/25.5); background-repeat: repeat; background-size: 4px var(--lineh); z-index: 666; display: block; width: auto; height: auto; pointer-events: none; position: absolute; top:0;bottom:0;left:0;right:0; opacity: 0.1;}*/
256 |
--------------------------------------------------------------------------------
/test/sweetroll2/markup_test.exs:
--------------------------------------------------------------------------------
1 | defmodule Sweetroll2.MarkupTest do
2 | use ExUnit.Case, async: true
3 | import Sweetroll2.Markup
4 | doctest Sweetroll2.Markup
5 |
6 | defp s_t(html),
7 | do: html |> html_part_to_tree |> sanitize_tree |> render_tree
8 |
9 | describe "sanitize_tree" do
10 | test "removes scripts but not text formatting" do
11 | # https://github.com/swisskyrepo/PayloadsAllTheThings/tree/master/XSS%20Injection
12 | assert s_t("hi ") == "hi alert('XSS')"
13 |
14 | assert s_t("\">
") ==
15 | ~S[">
]
16 |
17 | assert s_t("