├── .drone.yml
├── .gitignore
├── .isort.cfg
├── .readthedocs.yml
├── LICENSE
├── MANIFEST.in
├── README.md
├── dev-requirements.txt
├── docs
├── Makefile
├── _templates
│ ├── sidebar_badges.html
│ └── sidebar_end.html
├── conf.py
└── index.rst
├── little_boxes
├── __init__.py
├── __version__.py
├── activitypub.py
├── backend.py
├── collection.py
├── content_helper.py
├── errors.py
├── httpsig.py
├── key.py
├── linked_data_sig.py
├── urlutils.py
└── webfinger.py
├── requirements.txt
├── setup.cfg
├── setup.py
└── tests
├── test_backend.py
├── test_collection.py
├── test_content_helper.py
├── test_httpsig.py
├── test_key.py
├── test_linked_data_sig.py
├── test_urlutils.py
└── test_webfinger.py
/.drone.yml:
--------------------------------------------------------------------------------
1 | ---
2 | kind: pipeline
3 | name: default
4 | steps:
5 | - name : lint
6 | image: python:3
7 | commands:
8 | - pip install -r requirements.txt
9 | - pip install black flake8 mypy
10 | - black --check .
11 | - flake8 little_boxes
12 | - mypy --ignore-missing-imports little_boxes
13 |
14 | - name: test
15 | image: python:3
16 | commands:
17 | - pip install -r requirements.txt
18 | - pip install -r dev-requirements.txt
19 | - python -m pytest -vv --cov=little_boxes
20 | - codecov
21 | ---
22 | kind: signature
23 | hmac: 1a911726b099fc76fc3ea1181f5c7b0c0e4d8d4b1b314204def5da7f21fb299b
24 |
25 | ...
26 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | *.sw[op]
2 | key_*.pem
3 |
4 | .coverage
5 | coverage.xml
6 | *.egg-info
7 | dist/
8 | .pytest_cache
9 | .mypy_cache/
10 | __pycache__/
11 | docs/_build
12 |
--------------------------------------------------------------------------------
/.isort.cfg:
--------------------------------------------------------------------------------
1 | [settings]
2 | line_length=120
3 | force_single_line=true
4 |
--------------------------------------------------------------------------------
/.readthedocs.yml:
--------------------------------------------------------------------------------
1 | build:
2 | image: latest
3 |
4 | python:
5 | version: 3.6
6 | setup_py_install: true
7 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | ISC License
2 |
3 | Copyright (c) 2018, Thomas Sileo
4 |
5 | Permission to use, copy, modify, and/or distribute this software for any
6 | purpose with or without fee is hereby granted, provided that the above
7 | copyright notice and this permission notice appear in all copies.
8 |
9 | THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
10 | WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
11 | MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
12 | ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
13 | WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
14 | ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
15 | OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
16 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include README.md LICENSE
2 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Little Boxes
2 |
3 |
4 |
5 |
6 |
7 |
8 | Tiny [ActivityPub](https://activitypub.rocks/) framework written in Python, both database and server agnostic.
9 |
10 | **Still in early development, and not published on PyPI yet.**
11 |
12 | Until a first version is released, the main goal of this framework is to power the [microblog.pub microblog engine](http://github.com/tsileo/microblog.pub).
13 |
14 |
15 | ## Features
16 |
17 | - Database and server agnostic
18 | - You need to implement a backend that respond to activity side-effects
19 | - This also mean you're responsible for serving the activities/collections and receiving them
20 | - ActivityStreams helper classes
21 | - with Outbox/Inbox abstractions
22 | - Content helper using Markdown
23 | - with helpers for parsing hashtags and linkify content
24 | - Key (RSA) helper
25 | - HTTP signature helper
26 | - JSON-LD signature helper
27 | - Webfinger helper
28 |
29 |
30 | ## Getting Started
31 |
32 | ```python
33 | from little_boxes import activitypub as ap
34 |
35 | from mydb import db_client
36 |
37 |
38 | class MyBackend(ap.Backend):
39 |
40 | def __init__(self, db_connection):
41 | self.db_connection = db_connection
42 |
43 | def inbox_new(self, as_actor: ap.Person, activity: ap.Activity) -> None:
44 | # Save activity as "as_actor"
45 | # [...]
46 |
47 | def post_to_remote_inbox(self, as_actor: ap.Person, payload: ap.ObjectType, recipient: str) -> None:
48 | # Send the activity to the remote actor
49 | # [...]
50 |
51 |
52 | db_con = db_client()
53 | my_backend = MyBackend(db_con)
54 |
55 | ap.use_backend(my_backend)
56 |
57 | me = ap.Person({}) # Init an actor
58 | outbox = ap.Outbox(me)
59 |
60 | follow = ap.Follow(actor=me.id, object='http://iri-i-want-follow')
61 | outbox.post(follow)
62 | ```
63 |
64 |
65 | ## Projects using Little Boxes
66 |
67 | - [microblog.pub](http://github.com/tsileo/microblog.pub) (using MongoDB as a backend)
68 | - [pubgate](https://github.com/autogestion/pubgate)
69 |
70 |
71 | ## Contributions
72 |
73 | TODO: document Mypy, flake8 and black.
74 |
75 | PRs are welcome, please open an issue to start a discussion before your start any work.
76 |
77 |
78 | ## License
79 |
80 | ISC, see the LICENSE file.
81 |
--------------------------------------------------------------------------------
/dev-requirements.txt:
--------------------------------------------------------------------------------
1 | pytest
2 | pytest-cov
3 | httpretty
4 | black
5 | codecov
6 | flake8
7 | mypy
8 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line.
5 | SPHINXOPTS =
6 | SPHINXBUILD = sphinx-build
7 | SPHINXPROJ = LittleBoxes
8 | SOURCEDIR = .
9 | BUILDDIR = _build
10 |
11 | # Put it first so that "make" without argument is like "make help".
12 | help:
13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
14 |
15 | .PHONY: help Makefile
16 |
17 | # Catch-all target: route all unknown targets to Sphinx using the new
18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
19 | %: Makefile
20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
--------------------------------------------------------------------------------
/docs/_templates/sidebar_badges.html:
--------------------------------------------------------------------------------
1 |
2 |
4 |
5 |
--------------------------------------------------------------------------------
/docs/_templates/sidebar_end.html:
--------------------------------------------------------------------------------
1 | Resources
2 |
3 |
9 |
10 |
12 |
--------------------------------------------------------------------------------
/docs/conf.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Configuration file for the Sphinx documentation builder.
4 | #
5 | # This file does only contain a selection of the most common options. For a
6 | # full list see the documentation:
7 | # http://www.sphinx-doc.org/en/master/config
8 |
9 | # -- Path setup --------------------------------------------------------------
10 |
11 | # If extensions (or modules to document with autodoc) are in another directory,
12 | # add these directories to sys.path here. If the directory is relative to the
13 | # documentation root, use os.path.abspath to make it absolute, like shown here.
14 | #
15 | # import os
16 | # import sys
17 | # sys.path.insert(0, os.path.abspath('.'))
18 |
19 |
20 | # -- Project information -----------------------------------------------------
21 |
22 | project = "Little Boxes"
23 | copyright = "2018, Thomas Sileo"
24 | author = "Thomas Sileo"
25 |
26 | # The short X.Y version
27 | version = ""
28 | # The full version, including alpha/beta/rc tags
29 | release = ""
30 |
31 |
32 | # -- General configuration ---------------------------------------------------
33 |
34 | # If your documentation needs a minimal Sphinx version, state it here.
35 | #
36 | # needs_sphinx = '1.0'
37 |
38 | # Add any Sphinx extension module names here, as strings. They can be
39 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
40 | # ones.
41 | extensions = ["sphinx.ext.autodoc", "sphinx.ext.viewcode"]
42 |
43 | # Add any paths that contain templates here, relative to this directory.
44 | templates_path = ["_templates"]
45 |
46 | # The suffix(es) of source filenames.
47 | # You can specify multiple suffix as a list of string:
48 | #
49 | # source_suffix = ['.rst', '.md']
50 | source_suffix = ".rst"
51 |
52 | # The master toctree document.
53 | master_doc = "index"
54 |
55 | # The language for content autogenerated by Sphinx. Refer to documentation
56 | # for a list of supported languages.
57 | #
58 | # This is also used if you do content translation via gettext catalogs.
59 | # Usually you set "language" from the command line for these cases.
60 | language = None
61 |
62 | # List of patterns, relative to source directory, that match files and
63 | # directories to ignore when looking for source files.
64 | # This pattern also affects html_static_path and html_extra_path .
65 | exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
66 |
67 | # The name of the Pygments (syntax highlighting) style to use.
68 | pygments_style = "sphinx"
69 |
70 |
71 | # -- Options for HTML output -------------------------------------------------
72 |
73 | # The theme to use for HTML and HTML Help pages. See the documentation for
74 | # a list of builtin themes.
75 | #
76 | html_theme = "alabaster"
77 |
78 | # Theme options are theme-specific and customize the look and feel of a theme
79 | # further. For a list of options available for each theme, see the
80 | # documentation.
81 | #
82 | # html_theme_options = {}
83 |
84 | # Add any paths that contain custom static files (such as style sheets) here,
85 | # relative to this directory. They are copied after the builtin static files,
86 | # so a file named "default.css" will overwrite the builtin "default.css".
87 | html_static_path = ["_static"]
88 |
89 | # Custom sidebar templates, must be a dictionary that maps document names
90 | # to template names.
91 | #
92 | # The default sidebars (for documents that don't match any pattern) are
93 | # defined by theme itself. Builtin themes are using these templates by
94 | # default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
95 | # 'searchbox.html']``.
96 | #
97 | html_sidebars = {
98 | "**": [
99 | "about.html",
100 | "sidebar_badges.html",
101 | "navigation.html",
102 | "searchbox.html",
103 | "sidebar_end.html",
104 | ]
105 | }
106 |
107 |
108 | # -- Options for HTMLHelp output ---------------------------------------------
109 |
110 | # Output file base name for HTML help builder.
111 | htmlhelp_basename = "LittleBoxesdoc"
112 |
113 |
114 | # -- Options for LaTeX output ------------------------------------------------
115 |
116 | latex_elements = {
117 | # The paper size ('letterpaper' or 'a4paper').
118 | #
119 | # 'papersize': 'letterpaper',
120 | # The font size ('10pt', '11pt' or '12pt').
121 | #
122 | # 'pointsize': '10pt',
123 | # Additional stuff for the LaTeX preamble.
124 | #
125 | # 'preamble': '',
126 | # Latex figure (float) alignment
127 | #
128 | # 'figure_align': 'htbp',
129 | }
130 |
131 | # Grouping the document tree into LaTeX files. List of tuples
132 | # (source start file, target name, title,
133 | # author, documentclass [howto, manual, or own class]).
134 | latex_documents = [
135 | (
136 | master_doc,
137 | "LittleBoxes.tex",
138 | "Little Boxes Documentation",
139 | "Thomas Sileo",
140 | "manual",
141 | )
142 | ]
143 |
144 |
145 | # -- Options for manual page output ------------------------------------------
146 |
147 | # One entry per manual page. List of tuples
148 | # (source start file, name, description, authors, manual section).
149 | man_pages = [(master_doc, "littleboxes", "Little Boxes Documentation", [author], 1)]
150 |
151 |
152 | # -- Options for Texinfo output ----------------------------------------------
153 |
154 | # Grouping the document tree into Texinfo files. List of tuples
155 | # (source start file, target name, title, author,
156 | # dir menu entry, description, category)
157 | texinfo_documents = [
158 | (
159 | master_doc,
160 | "LittleBoxes",
161 | "Little Boxes Documentation",
162 | author,
163 | "LittleBoxes",
164 | "One line description of project.",
165 | "Miscellaneous",
166 | )
167 | ]
168 |
169 |
170 | # -- Extension configuration -------------------------------------------------
171 |
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 | .. Little Boxes documentation master file, created by
2 | sphinx-quickstart on Sat Jun 16 00:44:45 2018.
3 | You can adapt this file completely to your liking, but it should at least
4 | contain the root `toctree` directive.
5 |
6 | Little Boxes
7 | ============
8 |
9 | .. image:: https://img.shields.io/travis/tsileo/little-boxes.svg
10 | :target: https://travis-ci.org/tsileo/little-boxes
11 |
12 | .. image:: https://codecov.io/gh/tsileo/little-boxes/branch/master/graph/badge.svg
13 | :target: https://codecov.io/gh/tsileo/little-boxes
14 |
15 | .. image:: https://img.shields.io/pypi/v/little-boxes.svg
16 | :target: https://pypi.org/project/little-boxes
17 |
18 | .. image:: https://img.shields.io/pypi/pyversions/little-boxes.svg
19 | :target: https://pypi.org/project/little-boxes
20 |
21 | .. image:: https://img.shields.io/pypi/l/little-boxes.svg
22 | :target: https://github.com/tsileo/little-boxes
23 |
24 |
25 | Tiny `ActivityPub `_ framework written in Python, both database and server agnostic.
26 |
27 |
28 | Features
29 | --------
30 |
31 | * Database and server agnostic
32 | * You need to implement a backend that respond to activity side-effects
33 | * This also mean you're responsible for serving the activities/collections and receiving them
34 | * ActivityStreams helper classes
35 | * with Outbox/Inbox abstractions
36 | * Content helper using Mardown
37 | * with helpers for parsing hashtags and linkify content
38 | * Key (RSA) helper
39 | * HTTP signature helper
40 | * JSON-LD signature helper
41 | * Webfinger helper
42 |
43 |
44 | Project using Little Boxes
45 | --------------------------
46 |
47 | * `microblog.pub `_
48 |
49 |
50 | Documentation
51 | -------------
52 |
53 | .. toctree::
54 | :maxdepth: 2
55 | :caption: Contents:
56 |
57 |
58 | Indices and tables
59 | ------------------
60 |
61 | * :ref:`genindex`
62 | * :ref:`modindex`
63 | * :ref:`search`
64 |
--------------------------------------------------------------------------------
/little_boxes/__init__.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | logger = logging.getLogger(__name__)
4 |
5 |
6 | def strtobool(s: str) -> bool: # pragma: no cover
7 | if s in ["y", "yes", "true", "on", "1"]:
8 | return True
9 | if s in ["n", "no", "false", "off", "0"]:
10 | return False
11 |
12 | raise ValueError(f"cannot convert {s} to bool")
13 |
--------------------------------------------------------------------------------
/little_boxes/__version__.py:
--------------------------------------------------------------------------------
1 | VERSION = (0, 1, 0) # pragma: no cover
2 |
3 | __version__ = ".".join(map(str, VERSION)) # pragma: no cover
4 |
--------------------------------------------------------------------------------
/little_boxes/activitypub.py:
--------------------------------------------------------------------------------
1 | """Core ActivityPub classes."""
2 | import logging
3 | import weakref
4 | from datetime import datetime
5 | from datetime import timezone
6 | from enum import Enum
7 | from typing import Any
8 | from typing import Dict
9 | from typing import List
10 | from typing import Optional
11 | from typing import Type
12 | from typing import Union
13 |
14 | from .backend import Backend
15 | from .errors import ActivityGoneError
16 | from .errors import ActivityNotFoundError
17 | from .errors import ActivityUnavailableError
18 | from .errors import BadActivityError
19 | from .errors import NotAnActivityError
20 | from .errors import Error
21 | from .errors import UnexpectedActivityTypeError
22 | from .key import Key
23 |
24 | logger = logging.getLogger(__name__)
25 |
26 | UninitializedBackendError = Error("a backend must be initialized")
27 |
28 | # Helper/shortcut for typing
29 | ObjectType = Dict[str, Any]
30 | ActorType = Union["Person", "Application", "Group", "Organization", "Service"]
31 | ObjectOrIDType = Union[str, ObjectType]
32 |
33 | CTX_AS = "https://www.w3.org/ns/activitystreams"
34 | CTX_SECURITY = "https://w3id.org/security/v1"
35 | AS_PUBLIC = "https://www.w3.org/ns/activitystreams#Public"
36 |
37 | DEFAULT_CTX = COLLECTION_CTX = [
38 | "https://www.w3.org/ns/activitystreams",
39 | "https://w3id.org/security/v1",
40 | {
41 | # AS ext
42 | "Hashtag": "as:Hashtag",
43 | "sensitive": "as:sensitive",
44 | "manuallyApprovesFollowers": "as:manuallyApprovesFollowers",
45 | # toot
46 | "toot": "http://joinmastodon.org/ns#",
47 | "featured": "toot:featured",
48 | # schema
49 | "schema": "http://schema.org#",
50 | "PropertyValue": "schema:PropertyValue",
51 | "value": "schema:value",
52 | },
53 | ]
54 |
55 | # Will be used to keep track of all the defined activities
56 | _ACTIVITY_CLS: Dict["ActivityType", Type["BaseActivity"]] = {}
57 |
58 | BACKEND: Optional[Backend] = None
59 |
60 |
61 | def get_backend() -> Backend:
62 | if BACKEND is None:
63 | raise UninitializedBackendError
64 | return BACKEND
65 |
66 |
67 | def use_backend(backend_instance):
68 | global BACKEND
69 | BACKEND = backend_instance
70 |
71 |
72 | def format_datetime(dt: datetime) -> str:
73 | if dt.tzinfo is None:
74 | raise ValueError("datetime must be tz aware")
75 |
76 | return (
77 | dt.astimezone(timezone.utc)
78 | .replace(microsecond=0)
79 | .isoformat()
80 | .replace("+00:00", "Z")
81 | )
82 |
83 |
84 | class ActivityType(Enum):
85 | """Supported activity `type`."""
86 |
87 | ANNOUNCE = "Announce"
88 | BLOCK = "Block"
89 | LIKE = "Like"
90 | CREATE = "Create"
91 | UPDATE = "Update"
92 |
93 | ORDERED_COLLECTION = "OrderedCollection"
94 | ORDERED_COLLECTION_PAGE = "OrderedCollectionPage"
95 | COLLECTION_PAGE = "CollectionPage"
96 | COLLECTION = "Collection"
97 |
98 | NOTE = "Note"
99 | ARTICLE = "Article"
100 | VIDEO = "Video"
101 | AUDIO = "Audio"
102 | DOCUMENT = "Document"
103 |
104 | ACCEPT = "Accept"
105 | REJECT = "Reject"
106 | FOLLOW = "Follow"
107 |
108 | DELETE = "Delete"
109 | UNDO = "Undo"
110 |
111 | IMAGE = "Image"
112 | TOMBSTONE = "Tombstone"
113 |
114 | # Actor types
115 | PERSON = "Person"
116 | APPLICATION = "Application"
117 | GROUP = "Group"
118 | ORGANIZATION = "Organization"
119 | SERVICE = "Service"
120 |
121 | # Others
122 | MENTION = "Mention"
123 |
124 | # Mastodon specific?
125 | QUESTION = "Question"
126 |
127 | # Used by Prismo
128 | PAGE = "Page"
129 |
130 | # Misskey uses standalone Key object
131 | KEY = "Key"
132 |
133 |
134 | ACTOR_TYPES = [
135 | ActivityType.PERSON,
136 | ActivityType.APPLICATION,
137 | ActivityType.GROUP,
138 | ActivityType.ORGANIZATION,
139 | ActivityType.SERVICE,
140 | ActivityType.QUESTION, # Mastodon notoft the end of a question with an update from that question
141 | ]
142 |
143 | CREATE_TYPES = [
144 | ActivityType.NOTE,
145 | ActivityType.ARTICLE,
146 | ActivityType.VIDEO,
147 | ActivityType.AUDIO,
148 | ActivityType.QUESTION,
149 | ActivityType.DOCUMENT,
150 | ActivityType.PAGE,
151 | ]
152 |
153 | COLLECTION_TYPES = [ActivityType.COLLECTION, ActivityType.ORDERED_COLLECTION]
154 |
155 |
156 | def parse_activity(
157 | payload: ObjectType, expected: Optional[ActivityType] = None
158 | ) -> "BaseActivity":
159 | if "type" not in payload:
160 | raise BadActivityError(f"the payload has no type: {payload!r}")
161 |
162 | t = ActivityType(_to_list(payload["type"])[0])
163 |
164 | if expected and t != expected:
165 | raise UnexpectedActivityTypeError(
166 | f'expected a {expected.name} activity, got a {payload["type"]}: {payload}'
167 | )
168 |
169 | if t not in _ACTIVITY_CLS:
170 | raise BadActivityError(
171 | f'unsupported activity type {payload["type"]}: {payload}'
172 | )
173 |
174 | activity = _ACTIVITY_CLS[t](**payload)
175 |
176 | return activity
177 |
178 |
179 | def _to_list(data: Union[List[Any], Any]) -> List[Any]:
180 | """Helper to convert fields that can be either an object or a list of objects to a
181 | list of object."""
182 | if isinstance(data, list):
183 | return data
184 | return [data]
185 |
186 |
187 | def clean_activity(activity: ObjectType) -> Dict[str, Any]:
188 | """Clean the activity before rendering it.
189 | - Remove the hidden bco and bcc field
190 | """
191 | for field in ["bto", "bcc", "source"]:
192 | if field in activity:
193 | del activity[field]
194 | if activity["type"] == "Create" and field in activity["object"]:
195 | del activity["object"][field]
196 | return activity
197 |
198 |
199 | def _get_actor_id(actor: ObjectOrIDType) -> str:
200 | """Helper for retrieving an actor `id`."""
201 | if isinstance(actor, dict):
202 | return actor["id"]
203 | return actor
204 |
205 |
206 | def _get_id(obj) -> Optional[str]:
207 | if obj is None:
208 | return None
209 | elif isinstance(obj, str):
210 | return obj
211 | elif isinstance(obj, dict):
212 | try:
213 | return obj["id"]
214 | except KeyError:
215 | raise ValueError(f"object is missing ID: {obj!r}")
216 | else:
217 | raise ValueError(f"unexpected object: {obj!r}")
218 |
219 |
220 | def _has_type(
221 | obj_type: Union[str, List[str]],
222 | _types: Union[ActivityType, str, List[Union[ActivityType, str]]],
223 | ):
224 | """Returns `True` if one of `obj_type` equals one of `_types`."""
225 | types_str = [
226 | _type.value if isinstance(_type, ActivityType) else _type
227 | for _type in _to_list(_types)
228 | ]
229 | for _type in _to_list(obj_type):
230 | if _type in types_str:
231 | return True
232 | return False
233 |
234 |
235 | class _ActivityMeta(type):
236 | """Metaclass for keeping track of subclass."""
237 |
238 | def __new__(meta, name, bases, class_dict):
239 | cls = type.__new__(meta, name, bases, class_dict)
240 |
241 | # Ensure the class has an activity type defined
242 | if name != "BaseActivity" and not cls.ACTIVITY_TYPE:
243 | raise ValueError(f"class {name} has no ACTIVITY_TYPE")
244 |
245 | # Register it
246 | _ACTIVITY_CLS[cls.ACTIVITY_TYPE] = cls
247 | return cls
248 |
249 |
250 | class BaseActivity(object, metaclass=_ActivityMeta):
251 | """Base class for ActivityPub activities."""
252 |
253 | ACTIVITY_TYPE: Optional[
254 | ActivityType
255 | ] = None # the ActivityTypeEnum the class will represent
256 | OBJECT_REQUIRED = False # Whether the object field is required or note
257 | ALLOWED_OBJECT_TYPES: List[ActivityType] = []
258 | ACTOR_REQUIRED = (
259 | True
260 | ) # Most of the object requires an actor, so this flag in on by default
261 |
262 | def __init__(self, **kwargs) -> None: # noqa: C901
263 | if not self.ACTIVITY_TYPE:
264 | raise Error("should never happen")
265 |
266 | # Initialize the dict that will contains all the activity fields
267 | self._data: Dict[str, Any] = {}
268 |
269 | if not kwargs.get("type"):
270 | self._data["type"] = self.ACTIVITY_TYPE.value
271 | else:
272 | atype = kwargs.pop("type")
273 | if self.ACTIVITY_TYPE.value not in _to_list(atype):
274 | raise UnexpectedActivityTypeError(
275 | f"Expect the type to be {self.ACTIVITY_TYPE.value!r}"
276 | )
277 | self._data["type"] = atype
278 |
279 | logger.debug(f"initializing a {self.ACTIVITY_TYPE.value} activity: {kwargs!r}")
280 |
281 | # A place to set ephemeral data
282 | self.__ctx: Any = {}
283 |
284 | self.__obj: Optional["BaseActivity"] = None
285 | self.__actor: Optional[List[ActorType]] = None
286 |
287 | # The id may not be present for new activities
288 | if "id" in kwargs:
289 | self._data["id"] = kwargs.pop("id")
290 |
291 | if self.ACTIVITY_TYPE not in ACTOR_TYPES and self.ACTOR_REQUIRED:
292 | actor = kwargs.get("actor")
293 | if actor:
294 | kwargs.pop("actor")
295 | actor = self._validate_actor(actor)
296 | self._data["actor"] = actor
297 | elif self.ACTIVITY_TYPE in CREATE_TYPES:
298 | if "attributedTo" not in kwargs:
299 | raise BadActivityError(f"Note is missing attributedTo")
300 | else:
301 | raise BadActivityError("missing actor")
302 |
303 | if self.OBJECT_REQUIRED and "object" in kwargs:
304 | obj = kwargs.pop("object")
305 | if isinstance(obj, str):
306 | # The object is a just a reference the its ID/IRI
307 | # FIXME(tsileo): fetch the ref
308 | self._data["object"] = obj
309 | elif isinstance(obj, dict):
310 | if not self.ALLOWED_OBJECT_TYPES:
311 | raise UnexpectedActivityTypeError("unexpected object")
312 | if "type" not in obj or (
313 | self.ACTIVITY_TYPE != ActivityType.CREATE and "id" not in obj
314 | ):
315 | raise BadActivityError("invalid object, missing type")
316 | if not _has_type( # type: ignore # XXX too complicated
317 | obj["type"], self.ALLOWED_OBJECT_TYPES
318 | ):
319 | raise UnexpectedActivityTypeError(
320 | f'unexpected object type {obj["type"]} (allowed={self.ALLOWED_OBJECT_TYPES!r})'
321 | )
322 | self._data["object"] = obj
323 | else:
324 | raise BadActivityError(
325 | f"invalid object type ({type(obj).__qualname__}): {obj!r}"
326 | )
327 |
328 | if "@context" not in kwargs:
329 | self._data["@context"] = CTX_AS
330 | else:
331 | self._data["@context"] = kwargs.pop("@context")
332 |
333 | # @context check
334 | if not isinstance(self._data["@context"], list):
335 | self._data["@context"] = [self._data["@context"]]
336 | if CTX_SECURITY not in self._data["@context"]:
337 | self._data["@context"].append(CTX_SECURITY)
338 | if isinstance(self._data["@context"][-1], dict):
339 | self._data["@context"][-1]["Hashtag"] = "as:Hashtag"
340 | self._data["@context"][-1]["sensitive"] = "as:sensitive"
341 | self._data["@context"][-1]["toot"] = "http://joinmastodon.org/ns#"
342 | self._data["@context"][-1]["featured"] = "toot:featured"
343 | else:
344 | self._data["@context"].append(
345 | {
346 | "Hashtag": "as:Hashtag",
347 | "sensitive": "as:sensitive",
348 | "toot": "http://joinmastodon.org/ns#",
349 | "featured": "toot:featured",
350 | }
351 | )
352 |
353 | # Remove keys with `None` value
354 | valid_kwargs = {}
355 | for k, v in kwargs.items():
356 | if v is None:
357 | continue
358 | valid_kwargs[k] = v
359 | self._data.update(**valid_kwargs)
360 |
361 | try:
362 | self._init()
363 | except NotImplementedError:
364 | pass
365 |
366 | def _init(self) -> None:
367 | """Optional init callback."""
368 | raise NotImplementedError
369 |
370 | def has_type(
371 | self, _types: Union[ActivityType, str, List[Union[ActivityType, str]]]
372 | ):
373 | """Return True if the activity has the given type."""
374 | return _has_type(self._data["type"], _types)
375 |
376 | def get_url(self, preferred_mimetype: str = "text/html") -> str:
377 | """Returns the url attributes as a str.
378 |
379 | Returns the URL if it's a str, or the href of the first link.
380 |
381 | """
382 | if isinstance(self.url, str):
383 | return self.url
384 | elif isinstance(self.url, dict):
385 | if self.url.get("type") != "Link":
386 | raise BadActivityError(f"invalid type {self.url}")
387 | return str(self.url.get("href"))
388 | elif isinstance(self.url, list):
389 | last_link = None
390 | for link in self.url:
391 | last_link = link
392 | if link.get("type") != "Link":
393 | raise BadActivityError(f"invalid type {link}")
394 | if link.get("mimeType").startswith(preferred_mimetype):
395 | return link.get("href")
396 | if not last_link:
397 | raise BadActivityError(f"invalid type for {self.url}")
398 | return last_link
399 | else:
400 | raise BadActivityError(f"invalid type for {self.url}")
401 |
402 | def ctx(self) -> Any:
403 | if self.__ctx:
404 | return self.__ctx()
405 |
406 | def set_ctx(self, ctx: Any) -> None:
407 | # FIXME(tsileo): does not use the ctx to set the id to the "parent" when building delete
408 | self.__ctx = weakref.ref(ctx)
409 |
410 | def __repr__(self) -> str:
411 | """Pretty repr."""
412 | return "{}({!r})".format(self.__class__.__qualname__, self._data.get("id"))
413 |
414 | def __str__(self) -> str:
415 | """Returns the ID/IRI when castign to str."""
416 | return str(self._data.get("id", f"[new {self.ACTIVITY_TYPE} activity]"))
417 |
418 | def __getattr__(self, name: str) -> Any:
419 | """Allow to access the object field as regular attributes."""
420 | if self._data.get(name):
421 | return self._data.get(name)
422 |
423 | def _set_id(self, uri: str, obj_id: str) -> None:
424 | """Optional callback for subclasses to so something with a newly generated ID (for outbox activities)."""
425 | raise NotImplementedError
426 |
427 | def set_id(self, uri: str, obj_id: str) -> None:
428 | """Set the ID for a new activity."""
429 | logger.debug(f"setting ID {uri} / {obj_id}")
430 | self._data["id"] = uri
431 | try:
432 | self._set_id(uri, obj_id)
433 | except NotImplementedError:
434 | pass
435 |
436 | def _actor_id(self, obj: ObjectOrIDType) -> str:
437 | if isinstance(obj, dict) and _has_type( # type: ignore
438 | obj["type"], ACTOR_TYPES
439 | ):
440 | obj_id = obj.get("id")
441 | if not obj_id:
442 | raise BadActivityError(f"missing object id: {obj!r}")
443 | return obj_id
444 | elif isinstance(obj, str):
445 | return obj
446 | else:
447 | raise BadActivityError(f'invalid "actor" field: {obj!r}')
448 |
449 | def _validate_actor(self, obj: ObjectOrIDType) -> str:
450 | if BACKEND is None:
451 | raise UninitializedBackendError
452 |
453 | obj_id = self._actor_id(obj)
454 | try:
455 | actor = BACKEND.fetch_iri(obj_id)
456 | except (ActivityGoneError, ActivityNotFoundError):
457 | raise
458 | except Exception:
459 | raise BadActivityError(f"failed to validate actor {obj!r}")
460 |
461 | if not actor or "id" not in actor:
462 | raise BadActivityError(f"invalid actor {actor}")
463 |
464 | if not _has_type( # type: ignore # XXX: too complicated
465 | actor["type"], ACTOR_TYPES
466 | ):
467 | raise UnexpectedActivityTypeError(f'actor has wrong type {actor["type"]!r}')
468 |
469 | return actor["id"]
470 |
471 | def get_object_id(self) -> str:
472 | if BACKEND is None:
473 | raise UninitializedBackendError
474 |
475 | if self.__obj:
476 | return self.__obj.id
477 | if isinstance(self._data["object"], dict):
478 | return self._data["object"]["id"]
479 | elif isinstance(self._data["object"], str):
480 | return self._data["object"]
481 | else:
482 | raise ValueError(f"invalid object {self._data['object']}")
483 |
484 | def get_object(self) -> "BaseActivity":
485 | """Returns the object as a BaseActivity instance."""
486 | if BACKEND is None:
487 | raise UninitializedBackendError
488 |
489 | if self.__obj:
490 | return self.__obj
491 | if isinstance(self._data["object"], dict):
492 | p = parse_activity(self._data["object"])
493 | else:
494 | obj = BACKEND.fetch_iri(self._data["object"])
495 | if ActivityType(obj.get("type")) not in self.ALLOWED_OBJECT_TYPES:
496 | raise UnexpectedActivityTypeError(
497 | f'invalid object type {obj.get("type")!r}'
498 | )
499 | p = parse_activity(obj)
500 |
501 | self.__obj = p
502 | return p
503 |
504 | def reset_object_cache(self) -> None:
505 | self.__obj = None
506 |
507 | def to_dict(
508 | self, embed: bool = False, embed_object_id_only: bool = False
509 | ) -> ObjectType:
510 | """Serializes the activity back to a dict, ready to be JSON serialized."""
511 | data = dict(self._data)
512 | if embed:
513 | for k in ["@context", "signature"]:
514 | if k in data:
515 | del data[k]
516 | if (
517 | data.get("object")
518 | and embed_object_id_only
519 | and isinstance(data["object"], dict)
520 | ):
521 | try:
522 | data["object"] = data["object"]["id"]
523 | except KeyError:
524 | raise BadActivityError(
525 | f'embedded object {data["object"]!r} should have an id'
526 | )
527 |
528 | return data
529 |
530 | def get_actor(self) -> ActorType:
531 | if BACKEND is None:
532 | raise UninitializedBackendError
533 |
534 | if self.__actor:
535 | return self.__actor[0]
536 |
537 | actor = self._data.get("actor")
538 | if not actor and self.ACTOR_REQUIRED:
539 | # Quick hack for Note objects
540 | if self.ACTIVITY_TYPE in CREATE_TYPES:
541 | actor = self._data.get("attributedTo")
542 | if not actor:
543 | raise BadActivityError(f"missing attributedTo")
544 | else:
545 | raise BadActivityError(f"failed to fetch actor: {self._data!r}")
546 |
547 | self.__actor: List[ActorType] = []
548 | for item in _to_list(actor):
549 | if not isinstance(item, (str, dict)):
550 | raise BadActivityError(f"invalid actor: {self._data!r}")
551 |
552 | actor_id = self._actor_id(item)
553 |
554 | p = parse_activity(BACKEND.fetch_iri(actor_id))
555 | if not p.has_type(ACTOR_TYPES): # type: ignore
556 | raise UnexpectedActivityTypeError(f"{p!r} is not an actor")
557 | self.__actor.append(p) # type: ignore
558 |
559 | return self.__actor[0]
560 |
561 | def _recipients(self) -> List[str]:
562 | return []
563 |
564 | def recipients(self) -> List[str]: # noqa: C901
565 | if BACKEND is None:
566 | raise UninitializedBackendError
567 |
568 | recipients = self._recipients()
569 | actor_id = self.get_actor().id
570 |
571 | out: List[str] = []
572 | if self.type == ActivityType.CREATE.value:
573 | out = BACKEND.extra_inboxes()
574 |
575 | for recipient in recipients:
576 | if recipient in [actor_id, AS_PUBLIC, None]:
577 | continue
578 |
579 | try:
580 | actor = fetch_remote_activity(recipient)
581 | except (ActivityGoneError, ActivityNotFoundError, NotAnActivityError):
582 | logger.info(f"{recipient} is gone")
583 | continue
584 | except ActivityUnavailableError:
585 | # TODO(tsileo): retry separately?
586 | logger.info(f"failed {recipient} to fetch recipient")
587 | continue
588 |
589 | if actor.ACTIVITY_TYPE in ACTOR_TYPES:
590 | if actor.endpoints:
591 | shared_inbox = actor.endpoints.get("sharedInbox")
592 | if shared_inbox:
593 | if shared_inbox not in out:
594 | out.append(shared_inbox)
595 | continue
596 |
597 | if actor.inbox and actor.inbox not in out:
598 | out.append(actor.inbox)
599 |
600 | # Is the activity a `Collection`/`OrderedCollection`?
601 | elif actor.ACTIVITY_TYPE in COLLECTION_TYPES:
602 | for item in BACKEND.parse_collection(actor.to_dict()):
603 | # XXX(tsileo): is nested collection support needed here?
604 |
605 | if item in [actor_id, AS_PUBLIC]:
606 | continue
607 |
608 | try:
609 | col_actor = fetch_remote_activity(item)
610 | except ActivityUnavailableError:
611 | # TODO(tsileo): retry separately?
612 | logger.info(f"failed {recipient} to fetch recipient")
613 | continue
614 | except (
615 | ActivityGoneError,
616 | ActivityNotFoundError,
617 | NotAnActivityError,
618 | ):
619 | logger.info(f"{item} is gone")
620 | continue
621 |
622 | if col_actor.endpoints:
623 | shared_inbox = col_actor.endpoints.get("sharedInbox")
624 | if shared_inbox:
625 | if shared_inbox not in out:
626 | out.append(shared_inbox)
627 | continue
628 |
629 | if col_actor.inbox and col_actor.inbox not in out:
630 | out.append(col_actor.inbox)
631 | else:
632 | raise BadActivityError(f"failed to parse {recipient}")
633 |
634 | return out
635 |
636 |
637 | class Person(BaseActivity):
638 | ACTIVITY_TYPE = ActivityType.PERSON
639 | OBJECT_REQUIRED = False
640 | ACTOR_REQUIRED = False
641 |
642 | def get_key(self) -> Key:
643 | return Key.from_dict(self.publicKey)
644 |
645 |
646 | class Service(Person):
647 | ACTIVITY_TYPE = ActivityType.SERVICE
648 |
649 |
650 | class Application(Person):
651 | ACTIVITY_TYPE = ActivityType.APPLICATION
652 |
653 |
654 | class Group(Person):
655 | ACTIVITY_TYPE = ActivityType.GROUP
656 |
657 |
658 | class Organization(Person):
659 | ACTIVITY_TYPE = ActivityType.ORGANIZATION
660 |
661 |
662 | class Block(BaseActivity):
663 | ACTIVITY_TYPE = ActivityType.BLOCK
664 | OBJECT_REQUIRED = True
665 | ACTOR_REQUIRED = True
666 |
667 |
668 | class Collection(BaseActivity):
669 | ACTIVITY_TYPE = ActivityType.COLLECTION
670 | OBJECT_REQUIRED = False
671 | ACTOR_REQUIRED = False
672 |
673 |
674 | class OerderedCollection(BaseActivity):
675 | ACTIVITY_TYPE = ActivityType.ORDERED_COLLECTION
676 | OBJECT_REQUIRED = False
677 | ACTOR_REQUIRED = False
678 |
679 |
680 | class Image(BaseActivity):
681 | ACTIVITY_TYPE = ActivityType.IMAGE
682 | OBJECT_REQUIRED = False
683 | ACTOR_REQUIRED = False
684 |
685 | def __repr__(self):
686 | return "Image({!r})".format(self._data.get("url"))
687 |
688 |
689 | class Follow(BaseActivity):
690 | ACTIVITY_TYPE = ActivityType.FOLLOW
691 | ALLOWED_OBJECT_TYPES = ACTOR_TYPES
692 | OBJECT_REQUIRED = True
693 | ACTOR_REQUIRED = True
694 |
695 | def _recipients(self) -> List[str]:
696 | return [self.get_object().id]
697 |
698 | def build_undo(self) -> BaseActivity:
699 | return Undo(object=self.to_dict(embed=True), actor=self.get_actor().id)
700 |
701 |
702 | class Accept(BaseActivity):
703 | ACTIVITY_TYPE = ActivityType.ACCEPT
704 | ALLOWED_OBJECT_TYPES = [ActivityType.FOLLOW]
705 | OBJECT_REQUIRED = True
706 | ACTOR_REQUIRED = True
707 |
708 | def _recipients(self) -> List[str]:
709 | return [self.get_object().get_actor().id]
710 |
711 |
712 | class Undo(BaseActivity):
713 | ACTIVITY_TYPE = ActivityType.UNDO
714 | ALLOWED_OBJECT_TYPES = [
715 | ActivityType.FOLLOW,
716 | ActivityType.LIKE,
717 | ActivityType.ANNOUNCE,
718 | ActivityType.BLOCK,
719 | ]
720 | OBJECT_REQUIRED = True
721 | ACTOR_REQUIRED = True
722 |
723 | def _recipients(self) -> List[str]:
724 | obj = self.get_object()
725 | if obj.ACTIVITY_TYPE == ActivityType.FOLLOW:
726 | return [obj.get_object().id]
727 | else:
728 | return [obj.get_object().get_actor().id]
729 |
730 |
731 | class Like(BaseActivity):
732 | ACTIVITY_TYPE = ActivityType.LIKE
733 | ALLOWED_OBJECT_TYPES = CREATE_TYPES
734 | OBJECT_REQUIRED = True
735 | ACTOR_REQUIRED = True
736 |
737 | def _recipients(self) -> List[str]:
738 | return [self.get_object().get_actor().id]
739 |
740 | def build_undo(self) -> BaseActivity:
741 | return Undo(
742 | object=self.to_dict(embed=True, embed_object_id_only=True),
743 | actor=self.get_actor().id,
744 | )
745 |
746 |
747 | class Announce(BaseActivity):
748 | ACTIVITY_TYPE = ActivityType.ANNOUNCE
749 | ALLOWED_OBJECT_TYPES = CREATE_TYPES
750 | OBJECT_REQUIRED = True
751 | ACTOR_REQUIRED = True
752 |
753 | def _recipients(self) -> List[str]:
754 | recipients = [self.get_object().get_actor().id]
755 |
756 | for field in ["to", "cc"]:
757 | if field in self._data:
758 | recipients.extend(_to_list(self._data[field]))
759 |
760 | return list(set(recipients))
761 |
762 | def build_undo(self) -> BaseActivity:
763 | return Undo(actor=self.get_actor().id, object=self.to_dict(embed=True))
764 |
765 |
766 | class Delete(BaseActivity):
767 | ACTIVITY_TYPE = ActivityType.DELETE
768 | ALLOWED_OBJECT_TYPES = CREATE_TYPES + ACTOR_TYPES + [ActivityType.TOMBSTONE]
769 | OBJECT_REQUIRED = True
770 |
771 | def _get_actual_object(self) -> BaseActivity:
772 | if BACKEND is None:
773 | raise UninitializedBackendError
774 |
775 | # XXX(tsileo): overrides get_object instead?
776 | obj = self.get_object()
777 | if (
778 | obj.id.startswith(BACKEND.base_url())
779 | and obj.ACTIVITY_TYPE == ActivityType.TOMBSTONE
780 | ):
781 | obj = parse_activity(BACKEND.fetch_iri(obj.id))
782 | if obj.ACTIVITY_TYPE == ActivityType.TOMBSTONE:
783 | # If we already received it, we may be able to get a copy
784 | better_obj = BACKEND.fetch_iri(obj.id)
785 | if better_obj:
786 | return parse_activity(better_obj)
787 | return obj
788 |
789 | def _recipients(self) -> List[str]:
790 | obj = self._get_actual_object()
791 | return obj._recipients()
792 |
793 |
794 | class Update(BaseActivity):
795 | ACTIVITY_TYPE = ActivityType.UPDATE
796 | ALLOWED_OBJECT_TYPES = CREATE_TYPES + ACTOR_TYPES
797 | OBJECT_REQUIRED = True
798 | ACTOR_REQUIRED = True
799 |
800 | def _recipients(self) -> List[str]:
801 | # TODO(tsileo): audience support?
802 | recipients = []
803 | for field in ["to", "cc", "bto", "bcc"]:
804 | if field in self._data:
805 | recipients.extend(_to_list(self._data[field]))
806 |
807 | recipients.extend(self.get_object()._recipients())
808 |
809 | return recipients
810 |
811 |
812 | class Create(BaseActivity):
813 | ACTIVITY_TYPE = ActivityType.CREATE
814 | ALLOWED_OBJECT_TYPES = CREATE_TYPES
815 | OBJECT_REQUIRED = True
816 | ACTOR_REQUIRED = True
817 |
818 | def is_public(self) -> bool:
819 | """Returns True if the activity is addressed to the special "public" collection."""
820 | for field in ["to", "cc", "bto", "bcc"]:
821 | if field in self._data:
822 | if AS_PUBLIC in _to_list(self._data[field]):
823 | return True
824 |
825 | return False
826 |
827 | def _set_id(self, uri: str, obj_id: str) -> None:
828 | if BACKEND is None:
829 | raise UninitializedBackendError
830 |
831 | # FIXME(tsileo): add a BACKEND.note_activity_url, and pass the actor to both
832 | self._data["object"]["id"] = uri + "/activity"
833 | if "url" not in self._data["object"]:
834 | self._data["object"]["url"] = BACKEND.note_url(obj_id)
835 | if isinstance(self.ctx(), Note):
836 | try:
837 | self.ctx().id = self._data["object"]["id"]
838 | except NotImplementedError:
839 | pass
840 | self.reset_object_cache()
841 |
842 | def _init(self) -> None:
843 | obj = self.get_object()
844 | if not obj.attributedTo:
845 | self._data["object"]["attributedTo"] = self.get_actor().id
846 | if not obj.published:
847 | if self.published:
848 | self._data["object"]["published"] = self.published
849 | else:
850 | now = format_datetime(datetime.now().astimezone())
851 | self._data["published"] = now
852 | self._data["object"]["published"] = now
853 |
854 | def _recipients(self) -> List[str]:
855 | # TODO(tsileo): audience support?
856 | recipients = []
857 | for field in ["to", "cc", "bto", "bcc"]:
858 | if field in self._data:
859 | recipients.extend(_to_list(self._data[field]))
860 |
861 | recipients.extend(self.get_object()._recipients())
862 |
863 | return recipients
864 |
865 | def get_tombstone(self, deleted: Optional[str] = None) -> BaseActivity:
866 | return Tombstone(
867 | id=self.id,
868 | published=self.get_object().published,
869 | deleted=deleted,
870 | updated=deleted,
871 | )
872 |
873 |
874 | class Tombstone(BaseActivity):
875 | ACTIVITY_TYPE = ActivityType.TOMBSTONE
876 | ACTOR_REQUIRED = False
877 | OBJECT_REQUIRED = False
878 |
879 |
880 | class Note(BaseActivity):
881 | ACTIVITY_TYPE = ActivityType.NOTE
882 | ACTOR_REQUIRED = True
883 | OBJECT_REQURIED = False
884 |
885 | def _init(self) -> None:
886 | if "sensitive" not in self._data:
887 | self._data["sensitive"] = False
888 |
889 | def _recipients(self) -> List[str]:
890 | # TODO(tsileo): audience support?
891 | recipients: List[str] = []
892 |
893 | for field in ["to", "cc", "bto", "bcc"]:
894 | if field in self._data:
895 | recipients.extend(_to_list(self._data[field]))
896 |
897 | return recipients
898 |
899 | def build_create(self) -> BaseActivity:
900 | """Wraps an activity in a Create activity."""
901 | create_payload = {
902 | "object": self.to_dict(embed=True),
903 | "actor": self.attributedTo,
904 | }
905 | for field in ["published", "to", "bto", "cc", "bcc", "audience"]:
906 | if field in self._data:
907 | create_payload[field] = self._data[field]
908 |
909 | create = Create(**create_payload)
910 | create.set_ctx(self)
911 |
912 | return create
913 |
914 | def build_like(self, as_actor: ActorType) -> BaseActivity:
915 | return Like(object=self.id, actor=as_actor.id)
916 |
917 | def build_announce(self, as_actor: ActorType) -> BaseActivity:
918 | return Announce(
919 | actor=as_actor.id,
920 | object=self.id,
921 | to=[AS_PUBLIC],
922 | cc=[as_actor.followers, self.attributedTo],
923 | published=format_datetime(datetime.now().astimezone()),
924 | )
925 |
926 | def has_mention(self, actor_id: str) -> bool:
927 | if self.tag is not None:
928 | for tag in self.tag:
929 | try:
930 | if tag["type"] == ActivityType.MENTION.value:
931 | if tag["href"] == actor_id:
932 | return True
933 | except Exception:
934 | logger.exception(f"invalid tag {tag!r}")
935 |
936 | return False
937 |
938 | def get_in_reply_to(self) -> Optional[str]:
939 | return _get_id(self.inReplyTo)
940 |
941 |
942 | class Question(Note):
943 | ACTIVITY_TYPE = ActivityType.QUESTION
944 | ACTOR_REQUIRED = True
945 | OBJECT_REQURIED = False
946 |
947 | def one_of(self) -> List[Dict[str, Any]]:
948 | return self._data.get("oneOf", [])
949 |
950 |
951 | class Article(Note):
952 | ACTIVITY_TYPE = ActivityType.ARTICLE
953 | ACTOR_REQUIRED = True
954 | OBJECT_REQURIED = False
955 |
956 |
957 | class Page(Note):
958 | ACTIVITY_TYPE = ActivityType.PAGE
959 | ACTOR_REQUIRED = True
960 | OBJECT_REQURIED = False
961 |
962 |
963 | class Video(Note):
964 | ACTIVITY_TYPE = ActivityType.VIDEO
965 | ACTOR_REQUIRED = True
966 | OBJECT_REQURIED = False
967 |
968 |
969 | class Document(Note):
970 | ACTIVITY_TYPE = ActivityType.DOCUMENT
971 | ACTOR_REQUIRED = True
972 | OBJECT_REQUIRED = False
973 |
974 |
975 | class Audio(Note):
976 | ACTIVITY_TYPE = ActivityType.AUDIO
977 | ACTOR_REQUIRED = True
978 | OBJECT_REQUIRED = False
979 |
980 |
981 | def fetch_remote_activity(
982 | iri: str, expected: Optional[ActivityType] = None
983 | ) -> BaseActivity:
984 | return parse_activity(get_backend().fetch_iri(iri), expected=expected)
985 |
--------------------------------------------------------------------------------
/little_boxes/backend.py:
--------------------------------------------------------------------------------
1 | import abc
2 | import binascii
3 | import json
4 | import os
5 | import typing
6 | from typing import Any
7 | from typing import Dict
8 | from typing import List
9 | from typing import Optional
10 |
11 | import requests
12 |
13 | from .__version__ import __version__
14 | from .collection import parse_collection
15 | from .errors import ActivityGoneError
16 | from .errors import ActivityNotFoundError
17 | from .errors import ActivityUnavailableError
18 | from .errors import NotAnActivityError
19 | from .urlutils import URLLookupFailedError
20 | from .urlutils import check_url as check_url
21 |
22 | if typing.TYPE_CHECKING:
23 | from little_boxes import activitypub as ap # noqa: type checking
24 |
25 |
26 | class Backend(abc.ABC):
27 | def debug_mode(self) -> bool:
28 | """Should be overidded to return `True` in order to enable the debug mode."""
29 | return False
30 |
31 | def check_url(self, url: str) -> None:
32 | check_url(url, debug=self.debug_mode())
33 |
34 | def user_agent(self) -> str:
35 | return (
36 | f"{requests.utils.default_user_agent()} (Little Boxes/{__version__};"
37 | " +http://github.com/tsileo/little-boxes)"
38 | )
39 |
40 | def random_object_id(self) -> str:
41 | """Generates a random object ID."""
42 | return binascii.hexlify(os.urandom(8)).decode("utf-8")
43 |
44 | def fetch_json(self, url: str, **kwargs):
45 | self.check_url(url)
46 | resp = requests.get(
47 | url,
48 | headers={"User-Agent": self.user_agent(), "Accept": "application/json"},
49 | **kwargs,
50 | timeout=15,
51 | allow_redirects=True,
52 | )
53 |
54 | resp.raise_for_status()
55 |
56 | return resp
57 |
58 | def parse_collection(
59 | self, payload: Optional[Dict[str, Any]] = None, url: Optional[str] = None
60 | ) -> List[str]:
61 | return parse_collection(payload=payload, url=url, fetcher=self.fetch_iri)
62 |
63 | def extra_inboxes(self) -> List[str]:
64 | """Allows to define inboxes that will be part of of the recipient for every activity."""
65 | return []
66 |
67 | def is_from_outbox(
68 | self, as_actor: "ap.Person", activity: "ap.BaseActivity"
69 | ) -> bool:
70 | return activity.get_actor().id == as_actor.id
71 |
72 | @abc.abstractmethod
73 | def base_url(self) -> str:
74 | pass # pragma: no cover
75 |
76 | def fetch_iri(self, iri: str, **kwargs) -> "ap.ObjectType": # pragma: no cover
77 | if not iri.startswith("http"):
78 | raise NotAnActivityError(f"{iri} is not a valid IRI")
79 |
80 | try:
81 | self.check_url(iri)
82 | except URLLookupFailedError:
83 | # The IRI is inaccessible
84 | raise ActivityUnavailableError(f"unable to fetch {iri}, url lookup failed")
85 |
86 | try:
87 | resp = requests.get(
88 | iri,
89 | headers={
90 | "User-Agent": self.user_agent(),
91 | "Accept": "application/activity+json",
92 | },
93 | timeout=15,
94 | allow_redirects=False,
95 | **kwargs,
96 | )
97 | except (
98 | requests.exceptions.ConnectTimeout,
99 | requests.exceptions.ReadTimeout,
100 | requests.exceptions.ConnectionError,
101 | ):
102 | raise ActivityUnavailableError(f"unable to fetch {iri}, connection error")
103 | if resp.status_code == 404:
104 | raise ActivityNotFoundError(f"{iri} is not found")
105 | elif resp.status_code == 410:
106 | raise ActivityGoneError(f"{iri} is gone")
107 | elif resp.status_code in [500, 502, 503]:
108 | raise ActivityUnavailableError(
109 | f"unable to fetch {iri}, server error ({resp.status_code})"
110 | )
111 |
112 | resp.raise_for_status()
113 |
114 | try:
115 | out = resp.json()
116 | except (json.JSONDecodeError, ValueError):
117 | # TODO(tsileo): a special error type?
118 | raise NotAnActivityError(f"{iri} is not JSON")
119 |
120 | return out
121 |
122 | @abc.abstractmethod
123 | def activity_url(self, obj_id: str) -> str:
124 | pass # pragma: no cover
125 |
126 | @abc.abstractmethod
127 | def note_url(self, obj_id: str) -> str:
128 | pass # pragma: no cover
129 |
--------------------------------------------------------------------------------
/little_boxes/collection.py:
--------------------------------------------------------------------------------
1 | """Collection releated utils."""
2 | from typing import Any
3 | from typing import Callable
4 | from typing import Dict
5 | from typing import List
6 | from typing import Optional
7 |
8 | from .errors import RecursionLimitExceededError
9 | from .errors import UnexpectedActivityTypeError
10 |
11 |
12 | def parse_collection( # noqa: C901
13 | payload: Optional[Dict[str, Any]] = None,
14 | url: Optional[str] = None,
15 | level: int = 0,
16 | fetcher: Optional[Callable[[str], Dict[str, Any]]] = None,
17 | ) -> List[Any]:
18 | """Resolve/fetch a `Collection`/`OrderedCollection`."""
19 | if not fetcher:
20 | raise Exception("must provide a fetcher")
21 | if level > 3:
22 | raise RecursionLimitExceededError("recursion limit exceeded")
23 |
24 | # Go through all the pages
25 | out: List[Any] = []
26 | if url:
27 | payload = fetcher(url)
28 | if not payload:
29 | raise ValueError("must at least prove a payload or an URL")
30 |
31 | if payload["type"] in ["Collection", "OrderedCollection"]:
32 | if "orderedItems" in payload:
33 | return payload["orderedItems"]
34 | if "items" in payload:
35 | return payload["items"]
36 | if "first" in payload:
37 | if isinstance(payload["first"], str):
38 | out.extend(
39 | parse_collection(
40 | url=payload["first"], level=level + 1, fetcher=fetcher
41 | )
42 | )
43 | else:
44 | if "orderedItems" in payload["first"]:
45 | out.extend(payload["first"]["orderedItems"])
46 | if "items" in payload["first"]:
47 | out.extend(payload["first"]["items"])
48 | n = payload["first"].get("next")
49 | if n:
50 | out.extend(
51 | parse_collection(url=n, level=level + 1, fetcher=fetcher)
52 | )
53 | return out
54 |
55 | while payload:
56 | if payload["type"] in ["CollectionPage", "OrderedCollectionPage"]:
57 | if "orderedItems" in payload:
58 | out.extend(payload["orderedItems"])
59 | if "items" in payload:
60 | out.extend(payload["items"])
61 | n = payload.get("next")
62 | if n is None:
63 | break
64 | payload = fetcher(n)
65 | else:
66 | raise UnexpectedActivityTypeError(
67 | "unexpected activity type {}".format(payload["type"])
68 | )
69 |
70 | return out
71 |
--------------------------------------------------------------------------------
/little_boxes/content_helper.py:
--------------------------------------------------------------------------------
1 | from typing import Dict
2 | from typing import List
3 | from typing import Tuple
4 |
5 | from markdown import markdown
6 |
7 | import regex as re
8 |
9 | from .activitypub import get_backend
10 | from .webfinger import get_actor_url
11 |
12 |
13 | def _set_attrs(attrs, new=False):
14 | attrs[(None, "target")] = "_blank"
15 | attrs[(None, "class")] = "external"
16 | attrs[(None, "rel")] = "noopener"
17 | attrs[(None, "title")] = attrs[(None, "href")]
18 | return attrs
19 |
20 |
21 | HASHTAG_REGEX = re.compile(r"(#[\d\w]+)")
22 | MENTION_REGEX = re.compile(r"@[\d\w_.+-]+@[\d\w-]+\.[\d\w\-.]+")
23 |
24 |
25 | def hashtagify(content: str) -> Tuple[str, List[Dict[str, str]]]:
26 | base_url = get_backend().base_url()
27 | tags = []
28 | hashtags = re.findall(HASHTAG_REGEX, content)
29 | hashtags = list(set(hashtags)) # unique tags
30 | hashtags.sort()
31 | hashtags.reverse() # replace longest tag first
32 | for hashtag in hashtags:
33 | tag = hashtag[1:]
34 | link = f'#{tag}'
35 | tags.append(dict(href=f"{base_url}/tags/{tag}", name=hashtag, type="Hashtag"))
36 | content = content.replace(hashtag, link)
37 | return content, tags
38 |
39 |
40 | def mentionify(
41 | content: str, hide_domain: bool = False
42 | ) -> Tuple[str, List[Dict[str, str]]]:
43 | tags = []
44 | for mention in re.findall(MENTION_REGEX, content):
45 | _, username, domain = mention.split("@")
46 | actor_url = get_actor_url(mention)
47 | if not actor_url:
48 | # FIXME(tsileo): raise an error?
49 | continue
50 | p = get_backend().fetch_iri(actor_url)
51 | tags.append(dict(type="Mention", href=p["id"], name=mention))
52 |
53 | d = f"@{domain}"
54 | if hide_domain:
55 | d = ""
56 |
57 | link = f'@{username}{d}'
58 | content = content.replace(mention, link)
59 | return content, tags
60 |
61 |
62 | def parse_markdown(content: str) -> Tuple[str, List[Dict[str, str]]]:
63 | tags = []
64 | content, hashtag_tags = hashtagify(content)
65 | tags.extend(hashtag_tags)
66 | content, mention_tags = mentionify(content)
67 | tags.extend(mention_tags)
68 | content = markdown(content, extensions=["mdx_linkify"])
69 | return content, tags
70 |
--------------------------------------------------------------------------------
/little_boxes/errors.py:
--------------------------------------------------------------------------------
1 | """Errors raised by this package."""
2 | from typing import Any
3 | from typing import Dict
4 | from typing import Optional
5 |
6 |
7 | class Error(Exception):
8 | """Base error for exceptions raised by this package."""
9 |
10 |
11 | class DropActivityPreProcessError(Error):
12 | """Raised in `_pre_process_from_inbox` to notify that we don't want to save the message.
13 |
14 | (like when receiving `Announce` with an OStatus link).
15 | """
16 |
17 |
18 | class ServerError(Error):
19 | """HTTP-friendly base error, with a status code, a message and an optional payload."""
20 |
21 | status_code = 400
22 |
23 | def __init__(
24 | self,
25 | message: str,
26 | status_code: Optional[int] = None,
27 | payload: Optional[Dict[str, Any]] = None,
28 | ) -> None:
29 | Exception.__init__(self)
30 | self.message = message
31 | if status_code is not None:
32 | self.status_code = status_code
33 | self.payload = payload
34 |
35 | def to_dict(self) -> Dict[str, Any]:
36 | rv = dict(self.payload or {})
37 | rv["message"] = self.message
38 | return rv
39 |
40 | def __repr__(self) -> str: # pragma: no cover
41 | return (
42 | f"{self.__class__.__qualname__}({self.message!r}, "
43 | f"payload={self.payload!r}, status_code={self.status_code})"
44 | )
45 |
46 | def __str__(self) -> str: # pragma: no cover
47 | return self.__repr__()
48 |
49 |
50 | class ActorBlockedError(ServerError):
51 | """Raised when an activity from a blocked actor is received."""
52 |
53 |
54 | class NotFromOutboxError(ServerError):
55 | """Raised when an activity targets an object from the inbox when an object from the oubox was expected."""
56 |
57 |
58 | class ActivityNotFoundError(ServerError):
59 | """Raised when an activity is not found."""
60 |
61 | status_code = 404
62 |
63 |
64 | class ActivityGoneError(ServerError):
65 | """Raised when trying to fetch a remote activity that was deleted."""
66 |
67 | status_code = 410
68 |
69 |
70 | class BadActivityError(ServerError):
71 | """Raised when an activity could not be parsed/initialized."""
72 |
73 |
74 | class RecursionLimitExceededError(BadActivityError):
75 | """Raised when the recursion limit for fetching remote object was exceeded (likely a collection)."""
76 |
77 |
78 | class UnexpectedActivityTypeError(BadActivityError):
79 | """Raised when an another activty was expected."""
80 |
81 |
82 | class ActivityUnavailableError(ServerError):
83 | """Raises when fetching a remote activity times out."""
84 |
85 | status_code = 503
86 |
87 |
88 | class NotAnActivityError(ServerError):
89 | """Raised when no JSON can be decoded.
90 |
91 | Most likely raised when stumbling upon a OStatus notice or failed lookup.
92 | """
93 |
--------------------------------------------------------------------------------
/little_boxes/httpsig.py:
--------------------------------------------------------------------------------
1 | """Implements HTTP signature for Flask requests.
2 |
3 | Mastodon instances won't accept requests that are not signed using this scheme.
4 |
5 | """
6 | import base64
7 | import hashlib
8 | import logging
9 | from datetime import datetime
10 | from typing import Any
11 | from typing import Dict
12 | from typing import Optional
13 | from urllib.parse import urlparse
14 |
15 | from Crypto.Hash import SHA256
16 | from Crypto.Signature import PKCS1_v1_5
17 | from requests.auth import AuthBase
18 |
19 | from .activitypub import get_backend
20 | from .activitypub import _has_type
21 | from .errors import ActivityNotFoundError
22 | from .errors import ActivityGoneError
23 | from .key import Key
24 |
25 | logger = logging.getLogger(__name__)
26 |
27 |
28 | def _build_signed_string(
29 | signed_headers: str, method: str, path: str, headers: Any, body_digest: str
30 | ) -> str:
31 | out = []
32 | for signed_header in signed_headers.split(" "):
33 | if signed_header == "(request-target)":
34 | out.append("(request-target): " + method.lower() + " " + path)
35 | elif signed_header == "digest":
36 | out.append("digest: " + body_digest)
37 | else:
38 | out.append(signed_header + ": " + headers[signed_header])
39 | return "\n".join(out)
40 |
41 |
42 | def _parse_sig_header(val: Optional[str]) -> Optional[Dict[str, str]]:
43 | if not val:
44 | return None
45 | out = {}
46 | for data in val.split(","):
47 | k, v = data.split("=", 1)
48 | out[k] = v[1 : len(v) - 1] # noqa: black conflict
49 | return out
50 |
51 |
52 | def _verify_h(signed_string, signature, pubkey):
53 | signer = PKCS1_v1_5.new(pubkey)
54 | digest = SHA256.new()
55 | digest.update(signed_string.encode("utf-8"))
56 | return signer.verify(digest, signature)
57 |
58 |
59 | def _body_digest(body: str) -> str:
60 | h = hashlib.new("sha256")
61 | h.update(body) # type: ignore
62 | return "SHA-256=" + base64.b64encode(h.digest()).decode("utf-8")
63 |
64 |
65 | def _get_public_key(key_id: str) -> Key:
66 | actor = get_backend().fetch_iri(key_id)
67 | if _has_type(actor["type"], "Key"):
68 | # The Key is not embedded in the Person
69 | k = Key(actor["owner"], actor["id"])
70 | k.load_pub(actor["publicKeyPem"])
71 | else:
72 | k = Key(actor["id"], actor["publicKey"]["id"])
73 | k.load_pub(actor["publicKey"]["publicKeyPem"])
74 |
75 | # Ensure the right key was fetch
76 | if key_id != k.key_id():
77 | raise ValueError(
78 | f"failed to fetch requested key {key_id}: got {actor['publicKey']['id']}"
79 | )
80 |
81 | return k
82 |
83 |
84 | def verify_request(method: str, path: str, headers: Any, body: str) -> bool:
85 | hsig = _parse_sig_header(headers.get("Signature"))
86 | if not hsig:
87 | logger.debug("no signature in header")
88 | return False
89 | logger.debug(f"hsig={hsig}")
90 | signed_string = _build_signed_string(
91 | hsig["headers"], method, path, headers, _body_digest(body)
92 | )
93 |
94 | try:
95 | k = _get_public_key(hsig["keyId"])
96 | except (ActivityGoneError, ActivityNotFoundError):
97 | logger.debug("cannot get public key")
98 | return False
99 |
100 | return _verify_h(signed_string, base64.b64decode(hsig["signature"]), k.pubkey)
101 |
102 |
103 | class HTTPSigAuth(AuthBase):
104 | """Requests auth plugin for signing requests on the fly."""
105 |
106 | def __init__(self, key: Key) -> None:
107 | self.key = key
108 |
109 | def __call__(self, r):
110 | logger.info(f"keyid={self.key.key_id()}")
111 | host = urlparse(r.url).netloc
112 |
113 | bh = hashlib.new("sha256")
114 | body = r.body
115 | try:
116 | body = r.body.encode("utf-8")
117 | except AttributeError:
118 | pass
119 | bh.update(body)
120 | bodydigest = "SHA-256=" + base64.b64encode(bh.digest()).decode("utf-8")
121 |
122 | date = datetime.utcnow().strftime("%a, %d %b %Y %H:%M:%S GMT")
123 |
124 | r.headers.update({"Digest": bodydigest, "Date": date, "Host": host})
125 |
126 | sigheaders = "(request-target) user-agent host date digest content-type"
127 |
128 | to_be_signed = _build_signed_string(
129 | sigheaders, r.method, r.path_url, r.headers, bodydigest
130 | )
131 | signer = PKCS1_v1_5.new(self.key.privkey)
132 | digest = SHA256.new()
133 | digest.update(to_be_signed.encode("utf-8"))
134 | sig = base64.b64encode(signer.sign(digest))
135 | sig = sig.decode("utf-8")
136 |
137 | key_id = self.key.key_id()
138 | headers = {
139 | "Signature": f'keyId="{key_id}",algorithm="rsa-sha256",headers="{sigheaders}",signature="{sig}"'
140 | }
141 | logger.debug(f"signed request headers={headers}")
142 |
143 | r.headers.update(headers)
144 |
145 | return r
146 |
--------------------------------------------------------------------------------
/little_boxes/key.py:
--------------------------------------------------------------------------------
1 | import base64
2 | from typing import Any
3 | from typing import Dict
4 | from typing import Optional
5 |
6 | from Crypto.PublicKey import RSA
7 | from Crypto.Util import number
8 |
9 |
10 | class Key(object):
11 | DEFAULT_KEY_SIZE = 2048
12 |
13 | def __init__(self, owner: str, id_: Optional[str] = None) -> None:
14 | self.owner = owner
15 | self.privkey_pem: Optional[str] = None
16 | self.pubkey_pem: Optional[str] = None
17 | self.privkey: Optional[RSA.RsaKey] = None
18 | self.pubkey: Optional[RSA.RsaKey] = None
19 | self.id_ = id_
20 |
21 | def load_pub(self, pubkey_pem: str) -> None:
22 | self.pubkey_pem = pubkey_pem
23 | self.pubkey = RSA.importKey(pubkey_pem)
24 |
25 | def load(self, privkey_pem: str) -> None:
26 | self.privkey_pem = privkey_pem
27 | self.privkey = RSA.importKey(self.privkey_pem)
28 | self.pubkey_pem = self.privkey.publickey().exportKey("PEM").decode("utf-8")
29 |
30 | def new(self) -> None:
31 | k = RSA.generate(self.DEFAULT_KEY_SIZE)
32 | self.privkey_pem = k.exportKey("PEM").decode("utf-8")
33 | self.pubkey_pem = k.publickey().exportKey("PEM").decode("utf-8")
34 | self.privkey = k
35 |
36 | def key_id(self) -> str:
37 | return self.id_ or f"{self.owner}#main-key"
38 |
39 | def to_dict(self) -> Dict[str, Any]:
40 | return {
41 | "id": self.key_id(),
42 | "owner": self.owner,
43 | "publicKeyPem": self.pubkey_pem,
44 | "type": "Key",
45 | }
46 |
47 | @classmethod
48 | def from_dict(cls, data):
49 | try:
50 | k = cls(data["owner"], data["id"])
51 | k.load_pub(data["publicKeyPem"])
52 | except KeyError:
53 | raise ValueError(f"bad key data {data!r}")
54 | return k
55 |
56 | def to_magic_key(self) -> str:
57 | mod = base64.urlsafe_b64encode(
58 | number.long_to_bytes(self.privkey.n) # type: ignore
59 | ).decode("utf-8")
60 | pubexp = base64.urlsafe_b64encode(
61 | number.long_to_bytes(self.privkey.e) # type: ignore
62 | ).decode("utf-8")
63 | return f"data:application/magic-public-key,RSA.{mod}.{pubexp}"
64 |
--------------------------------------------------------------------------------
/little_boxes/linked_data_sig.py:
--------------------------------------------------------------------------------
1 | import base64
2 | import hashlib
3 | import typing
4 | from datetime import datetime
5 | from typing import Any
6 | from typing import Dict
7 |
8 | from Crypto.Hash import SHA256
9 | from Crypto.Signature import PKCS1_v1_5
10 | from pyld import jsonld
11 |
12 | if typing.TYPE_CHECKING:
13 | from .key import Key # noqa: type checking
14 |
15 |
16 | # cache the downloaded "schemas", otherwise the library is super slow
17 | # (https://github.com/digitalbazaar/pyld/issues/70)
18 | _CACHE: Dict[str, Any] = {}
19 | LOADER = jsonld.requests_document_loader()
20 |
21 |
22 | def _caching_document_loader(url: str) -> Any:
23 | if url in _CACHE:
24 | return _CACHE[url]
25 | resp = LOADER(url)
26 | _CACHE[url] = resp
27 | return resp
28 |
29 |
30 | jsonld.set_document_loader(_caching_document_loader)
31 |
32 |
33 | def _options_hash(doc):
34 | doc = dict(doc["signature"])
35 | for k in ["type", "id", "signatureValue"]:
36 | if k in doc:
37 | del doc[k]
38 | doc["@context"] = "https://w3id.org/identity/v1"
39 | normalized = jsonld.normalize(
40 | doc, {"algorithm": "URDNA2015", "format": "application/nquads"}
41 | )
42 | h = hashlib.new("sha256")
43 | h.update(normalized.encode("utf-8"))
44 | return h.hexdigest()
45 |
46 |
47 | def _doc_hash(doc):
48 | doc = dict(doc)
49 | if "signature" in doc:
50 | del doc["signature"]
51 | normalized = jsonld.normalize(
52 | doc, {"algorithm": "URDNA2015", "format": "application/nquads"}
53 | )
54 | h = hashlib.new("sha256")
55 | h.update(normalized.encode("utf-8"))
56 | return h.hexdigest()
57 |
58 |
59 | def verify_signature(doc, key: "Key"):
60 | to_be_signed = _options_hash(doc) + _doc_hash(doc)
61 | signature = doc["signature"]["signatureValue"]
62 | signer = PKCS1_v1_5.new(key.pubkey or key.privkey) # type: ignore
63 | digest = SHA256.new()
64 | digest.update(to_be_signed.encode("utf-8"))
65 | return signer.verify(digest, base64.b64decode(signature)) # type: ignore
66 |
67 |
68 | def generate_signature(doc, key: "Key"):
69 | options = {
70 | "type": "RsaSignature2017",
71 | "creator": doc["actor"] + "#main-key",
72 | "created": datetime.utcnow().replace(microsecond=0).isoformat() + "Z",
73 | }
74 | doc["signature"] = options
75 | to_be_signed = _options_hash(doc) + _doc_hash(doc)
76 | if not key.privkey:
77 | raise ValueError(f"missing privkey on key {key!r}")
78 |
79 | signer = PKCS1_v1_5.new(key.privkey)
80 | digest = SHA256.new()
81 | digest.update(to_be_signed.encode("utf-8"))
82 | sig = base64.b64encode(signer.sign(digest)) # type: ignore
83 | options["signatureValue"] = sig.decode("utf-8")
84 |
--------------------------------------------------------------------------------
/little_boxes/urlutils.py:
--------------------------------------------------------------------------------
1 | import ipaddress
2 | import logging
3 | import socket
4 | from typing import Dict
5 | from urllib.parse import urlparse
6 |
7 | from .errors import Error
8 | from .errors import ServerError
9 |
10 | logger = logging.getLogger(__name__)
11 |
12 |
13 | _CACHE: Dict[str, bool] = {}
14 |
15 |
16 | class InvalidURLError(ServerError):
17 | pass
18 |
19 |
20 | class URLLookupFailedError(Error):
21 | pass
22 |
23 |
24 | def is_url_valid(url: str, debug: bool = False) -> bool:
25 | parsed = urlparse(url)
26 | if parsed.scheme not in ["http", "https"]:
27 | return False
28 |
29 | # XXX in debug mode, we want to allow requests to localhost to test the federation with local instances
30 | if debug: # pragma: no cover
31 | return True
32 |
33 | if parsed.hostname in ["localhost"]:
34 | return False
35 |
36 | if _CACHE.get(parsed.hostname, False):
37 | return True
38 |
39 | try:
40 | ip_address = ipaddress.ip_address(parsed.hostname)
41 | except ValueError:
42 | try:
43 | ip_address = socket.getaddrinfo(parsed.hostname, parsed.port or 80)[0][4][0]
44 | logger.debug(f"dns lookup: {parsed.hostname} -> {ip_address}")
45 | except socket.gaierror:
46 | logger.exception(f"failed to lookup url {url}")
47 | _CACHE[parsed.hostname] = False
48 | raise URLLookupFailedError(f"failed to lookup url {url}")
49 |
50 | logger.debug(f"{ip_address}")
51 |
52 | if ipaddress.ip_address(ip_address).is_private:
53 | logger.info(f"rejecting private URL {url}")
54 | _CACHE[parsed.hostname] = False
55 | return False
56 |
57 | _CACHE[parsed.hostname] = True
58 | return True
59 |
60 |
61 | def check_url(url: str, debug: bool = False) -> None:
62 | logger.debug(f"check_url {url} debug={debug}")
63 | if not is_url_valid(url, debug=debug):
64 | raise InvalidURLError(f'"{url}" is invalid')
65 |
66 | return None
67 |
--------------------------------------------------------------------------------
/little_boxes/webfinger.py:
--------------------------------------------------------------------------------
1 | import json
2 | import logging
3 | from typing import Any
4 | from typing import Dict
5 | from typing import Optional
6 | from urllib.parse import urlparse
7 |
8 | import requests
9 |
10 | from .activitypub import get_backend
11 | from .urlutils import check_url
12 |
13 | logger = logging.getLogger(__name__)
14 |
15 |
16 | def webfinger(
17 | resource: str, debug: bool = False
18 | ) -> Optional[Dict[str, Any]]: # noqa: C901
19 | """Mastodon-like WebFinger resolution to retrieve the activity stream Actor URL.
20 | """
21 | logger.info(f"performing webfinger resolution for {resource}")
22 | protos = ["https", "http"]
23 | if resource.startswith("http://"):
24 | protos.reverse()
25 | host = urlparse(resource).netloc
26 | elif resource.startswith("https://"):
27 | host = urlparse(resource).netloc
28 | else:
29 | if resource.startswith("acct:"):
30 | resource = resource[5:]
31 | if resource.startswith("@"):
32 | resource = resource[1:]
33 | _, host = resource.split("@", 1)
34 | resource = "acct:" + resource
35 |
36 | # Security check on the url (like not calling localhost)
37 | check_url(f"https://{host}", debug=debug)
38 | is_404 = False
39 |
40 | for i, proto in enumerate(protos):
41 | try:
42 | url = f"{proto}://{host}/.well-known/webfinger"
43 | # FIXME(tsileo): BACKEND.fetch_json so we can set a UserAgent
44 | resp = get_backend().fetch_json(url, params={"resource": resource})
45 | break
46 | except requests.ConnectionError:
47 | logger.exception("req failed")
48 | # If we tried https first and the domain is "http only"
49 | if i == 0:
50 | continue
51 | break
52 | except requests.HTTPError as http_error:
53 | logger.exception("HTTP error")
54 | if http_error.response.status_code in [403, 404]:
55 | is_404 = True
56 | continue
57 | raise
58 | if is_404:
59 | return None
60 | resp.raise_for_status()
61 | try:
62 | return resp.json()
63 | except json.JSONDecodeError:
64 | return None
65 |
66 |
67 | def get_remote_follow_template(resource: str, debug: bool = False) -> Optional[str]:
68 | data = webfinger(resource, debug=debug)
69 | if data is None:
70 | return None
71 | for link in data["links"]:
72 | if link.get("rel") == "http://ostatus.org/schema/1.0/subscribe":
73 | return link.get("template")
74 | return None
75 |
76 |
77 | def get_actor_url(resource: str, debug: bool = False) -> Optional[str]:
78 | """Mastodon-like WebFinger resolution to retrieve the activity stream Actor URL.
79 |
80 | Returns:
81 | the Actor URL or None if the resolution failed.
82 | """
83 | data = webfinger(resource, debug=debug)
84 | if data is None:
85 | return None
86 | for link in data["links"]:
87 | if (
88 | link.get("rel") == "self"
89 | and link.get("type") == "application/activity+json"
90 | ):
91 | return link.get("href")
92 | return None
93 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | mdx_linkify
2 | bleach
3 | requests
4 | markdown
5 | pyld
6 | pycryptodome
7 | html2text
8 | regex
9 |
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [flake8]
2 | max-line-length = 120
3 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | from distutils.core import setup
4 | import io
5 | import os
6 |
7 | from setuptools import find_packages
8 |
9 |
10 | here = os.path.abspath(os.path.dirname(__file__))
11 |
12 |
13 | # Package meta-data.
14 | NAME = "little_boxes"
15 | DESCRIPTION = (
16 | "Tiny ActivityPub framework written in Python, both database and server agnostic."
17 | )
18 | URL = "https://github.com/tsileo/little-boxes"
19 | EMAIL = "t@a4.io"
20 | AUTHOR = "Thomas Sileo"
21 | REQUIRES_PYTHON = ">=3.6.0"
22 | VERSION = None
23 |
24 |
25 | REQUIRED = [
26 | "requests",
27 | "markdown",
28 | "bleach",
29 | "pyld",
30 | "pycryptodome",
31 | "html2text",
32 | "mdx_linkify",
33 | "regex",
34 | ]
35 |
36 | DEPENDENCY_LINKS = []
37 |
38 |
39 | # Load the package's __version__.py module as a dictionary.
40 | about = {}
41 | if not VERSION:
42 | with open(os.path.join(here, NAME, "__version__.py")) as f:
43 | exec(f.read(), about)
44 | else:
45 | about["__version__"] = VERSION
46 |
47 |
48 | # Import the README and use it as the long-description.
49 | with io.open(os.path.join(here, "README.md"), encoding="utf-8") as f:
50 | long_description = "\n" + f.read()
51 |
52 |
53 | setup(
54 | name=NAME,
55 | version=about["__version__"],
56 | description=DESCRIPTION,
57 | long_description=long_description,
58 | long_description_content_type="text/markdown",
59 | author=AUTHOR,
60 | author_email=EMAIL,
61 | python_requires=REQUIRES_PYTHON,
62 | url=URL,
63 | packages=find_packages(),
64 | install_requires=REQUIRED,
65 | dependency_links=DEPENDENCY_LINKS,
66 | license="ISC",
67 | classifiers=[
68 | # Trove classifiers
69 | # Full list: https://pypi.python.org/pypi?%3Aaction=list_classifiers
70 | "Development Status :: 3 - Alpha",
71 | "License :: OSI Approved :: ISC License (ISCL)",
72 | "Programming Language :: Python",
73 | "Programming Language :: Python :: 3.6",
74 | "Programming Language :: Python :: 3.7",
75 | "Programming Language :: Python :: Implementation :: CPython",
76 | "Programming Language :: Python :: Implementation :: PyPy",
77 | ],
78 | )
79 |
--------------------------------------------------------------------------------
/tests/test_backend.py:
--------------------------------------------------------------------------------
1 | import json
2 | from typing import List
3 | from typing import Optional
4 |
5 | import little_boxes.activitypub as ap
6 | from little_boxes.backend import Backend
7 |
8 |
9 | def track_call(f):
10 | """Method decorator used to track the events fired during tests."""
11 | fname = f.__name__
12 |
13 | def wrapper(*args, **kwargs):
14 | args[0]._METHOD_CALLS[args[1].id].append((fname, args, kwargs))
15 | return f(*args, **kwargs)
16 |
17 | return wrapper
18 |
19 |
20 | class InMemBackend(Backend):
21 | """In-memory backend meant to be used for the test suite."""
22 |
23 | DB = {}
24 | USERS = {}
25 | FETCH_MOCK = {}
26 | INBOX_IDX = {}
27 | OUTBOX_IDX = {}
28 | FOLLOWERS = {}
29 | FOLLOWING = {}
30 |
31 | # For tests purposes only
32 | _METHOD_CALLS = {}
33 |
34 | def called_methods(self, p: ap.Person) -> List[str]:
35 | data = list(self._METHOD_CALLS[p.id])
36 | self._METHOD_CALLS[p.id] = []
37 | return data
38 |
39 | def assert_called_methods(self, p: ap.Person, *asserts) -> List[str]:
40 | calls = self.called_methods(p)
41 | for i, assert_data in enumerate(asserts):
42 | if len(calls) < i + 1:
43 | raise ValueError(f"no methods called at step #{i}")
44 | error_msg, name, *funcs = assert_data
45 | if name != calls[i][0]:
46 | raise ValueError(
47 | f"expected method {name} to be called at step #{i}, but got {calls[i][0]}"
48 | )
49 | if len(funcs) < len(calls[i][1]) - 1:
50 | raise ValueError(f"args left unchecked for method {name} at step #{i}")
51 | for z, f in enumerate(funcs):
52 | if len(calls[i][1]) < z + 2: # XXX(tsileo): 0 will be self
53 | raise ValueError(f"method {name} has no args at index {z}")
54 | try:
55 | f(calls[i][1][z + 1])
56 | except AssertionError as ae:
57 | ae.args = ((error_msg),)
58 | raise ae
59 |
60 | if len(asserts) < len(calls):
61 | raise ValueError(
62 | f"expecting {len(calls)} assertion, only got {len(asserts)},"
63 | f"leftover: {calls[len(asserts):]!r}"
64 | )
65 |
66 | return calls
67 |
68 | def debug_mode(self) -> bool:
69 | return True
70 |
71 | def setup_actor(self, name, pusername):
72 | """Create a new actor in this backend."""
73 | p = ap.Person(
74 | name=name,
75 | preferredUsername=pusername,
76 | summary="Hello",
77 | id=f"https://lol.com/{pusername}",
78 | inbox=f"https://lol.com/{pusername}/inbox",
79 | followers=f"https://lol.com/{pusername}/followers",
80 | following=f"https://lol.com/{pusername}/following",
81 | )
82 |
83 | self.USERS[p.preferredUsername] = p
84 | self.DB[p.id] = {"inbox": [], "outbox": []}
85 | self.INBOX_IDX[p.id] = {}
86 | self.OUTBOX_IDX[p.id] = {}
87 | self.FOLLOWERS[p.id] = []
88 | self.FOLLOWING[p.id] = []
89 | self.FETCH_MOCK[p.id] = p.to_dict()
90 | self._METHOD_CALLS[p.id] = []
91 | return p
92 |
93 | def fetch_iri(self, iri: str) -> ap.ObjectType:
94 | if iri.endswith("/followers"):
95 | data = self.FOLLOWERS[iri.replace("/followers", "")]
96 | return {
97 | "id": iri,
98 | "type": ap.ActivityType.ORDERED_COLLECTION.value,
99 | "totalItems": len(data),
100 | "orderedItems": data,
101 | }
102 | if iri.endswith("/following"):
103 | data = self.FOLLOWING[iri.replace("/following", "")]
104 | return {
105 | "id": iri,
106 | "type": ap.ActivityType.ORDERED_COLLECTION.value,
107 | "totalItems": len(data),
108 | "orderedItems": data,
109 | }
110 | return self.FETCH_MOCK[iri]
111 |
112 | def get_user(self, username: str) -> ap.Person:
113 | if username in self.USERS:
114 | return self.USERS[username]
115 | else:
116 | raise ValueError(f"bad username {username}")
117 |
118 | @track_call
119 | def outbox_is_blocked(self, as_actor: ap.Person, actor_id: str) -> bool:
120 | """Returns True if `as_actor` has blocked `actor_id`."""
121 | for activity in self.DB[as_actor.id]["outbox"]:
122 | if activity.ACTIVITY_TYPE == ap.ActivityType.BLOCK:
123 | return True
124 | return False
125 |
126 | def inbox_check_duplicate(
127 | self, as_actor: ap.Person, iri: str
128 | ) -> Optional[ap.BaseActivity]:
129 | for activity in self.DB[as_actor.id]["inbox"]:
130 | if activity.id == iri:
131 | return activity
132 |
133 | return None
134 |
135 | @track_call
136 | def inbox_new(self, as_actor: ap.Person, activity: ap.BaseActivity) -> None:
137 | if activity.id in self.INBOX_IDX[as_actor.id]:
138 | return
139 | self.DB[as_actor.id]["inbox"].append(activity)
140 | self.INBOX_IDX[as_actor.id][activity.id] = activity
141 |
142 | def base_url(self) -> str:
143 | return "https://todo"
144 |
145 | def activity_url(self, obj_id: str) -> str:
146 | # from the random hex ID
147 | return f"https://todo/{obj_id}"
148 |
149 | def note_url(self, obj_id: str) -> str:
150 | # from the random hex ID
151 | return f"https://todo/note/{obj_id}"
152 |
153 | @track_call
154 | def outbox_new(self, as_actor: ap.Person, activity: ap.BaseActivity) -> None:
155 | print(f"saving {activity!r} to DB")
156 | actor_id = activity.get_actor().id
157 | if activity.id in self.OUTBOX_IDX[actor_id]:
158 | return
159 | self.DB[actor_id]["outbox"].append(activity)
160 | self.OUTBOX_IDX[actor_id][activity.id] = activity
161 | self.FETCH_MOCK[activity.id] = activity.to_dict()
162 | if isinstance(activity, ap.Create):
163 | self.FETCH_MOCK[activity.get_object().id] = activity.get_object().to_dict()
164 |
165 | @track_call
166 | def new_follower(self, as_actor: ap.Person, follow: ap.Follow) -> None:
167 | self.FOLLOWERS[follow.get_object().id].append(follow.get_actor().id)
168 |
169 | @track_call
170 | def undo_new_follower(self, as_actor: ap.Person, follow: ap.Follow) -> None:
171 | self.FOLLOWERS[follow.get_object().id].remove(follow.get_actor().id)
172 |
173 | @track_call
174 | def new_following(self, as_actor: ap.Person, follow: ap.Follow) -> None:
175 | print(f"new following {follow!r}")
176 | self.FOLLOWING[as_actor.id].append(follow.get_object().id)
177 |
178 | @track_call
179 | def undo_new_following(self, as_actor: ap.Person, follow: ap.Follow) -> None:
180 | self.FOLLOWING[as_actor.id].remove(follow.get_object().id)
181 |
182 | def followers(self, as_actor: ap.Person) -> List[str]:
183 | return self.FOLLOWERS[as_actor.id]
184 |
185 | def following(self, as_actor: ap.Person) -> List[str]:
186 | return self.FOLLOWING[as_actor.id]
187 |
188 | @track_call
189 | def post_to_remote_inbox(
190 | self, as_actor: ap.Person, payload_encoded: str, recp: str
191 | ) -> None:
192 | payload = json.loads(payload_encoded)
193 | print(f"post_to_remote_inbox {payload} {recp}")
194 | act = ap.parse_activity(payload)
195 | as_actor = ap.parse_activity(self.fetch_iri(recp.replace("/inbox", "")))
196 | act.process_from_inbox(as_actor)
197 |
198 | @track_call
199 | def inbox_like(self, as_actor: ap.Person, activity: ap.Like) -> None:
200 | pass
201 |
202 | @track_call
203 | def inbox_undo_like(self, as_actor: ap.Person, activity: ap.Like) -> None:
204 | pass
205 |
206 | @track_call
207 | def outbox_like(self, as_actor: ap.Person, activity: ap.Like) -> None:
208 | pass
209 |
210 | @track_call
211 | def outbox_undo_like(self, as_actor: ap.Person, activity: ap.Like) -> None:
212 | pass
213 |
214 | @track_call
215 | def inbox_announce(self, as_actor: ap.Person, activity: ap.Announce) -> None:
216 | pass
217 |
218 | @track_call
219 | def inbox_undo_announce(self, as_actor: ap.Person, activity: ap.Announce) -> None:
220 | pass
221 |
222 | @track_call
223 | def outbox_announce(self, as_actor: ap.Person, activity: ap.Announce) -> None:
224 | pass
225 |
226 | @track_call
227 | def outbox_undo_announce(self, as_actor: ap.Person, activity: ap.Announce) -> None:
228 | pass
229 |
230 | @track_call
231 | def inbox_delete(self, as_actor: ap.Person, activity: ap.Delete) -> None:
232 | pass
233 |
234 | @track_call
235 | def outbox_delete(self, as_actor: ap.Person, activity: ap.Delete) -> None:
236 | pass
237 |
238 | @track_call
239 | def inbox_update(self, as_actor: ap.Person, activity: ap.Update) -> None:
240 | pass
241 |
242 | @track_call
243 | def outbox_update(self, as_actor: ap.Person, activity: ap.Update) -> None:
244 | pass
245 |
246 | @track_call
247 | def inbox_create(self, as_actor: ap.Person, activity: ap.Create) -> None:
248 | pass
249 |
250 | @track_call
251 | def outbox_create(self, as_actor: ap.Person, activity: ap.Create) -> None:
252 | pass
253 |
--------------------------------------------------------------------------------
/tests/test_collection.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | import pytest
4 | from little_boxes import activitypub as ap
5 | from little_boxes.collection import parse_collection
6 | from little_boxes.errors import RecursionLimitExceededError
7 | from little_boxes.errors import UnexpectedActivityTypeError
8 |
9 | from test_backend import InMemBackend
10 |
11 | logging.basicConfig(level=logging.DEBUG)
12 |
13 |
14 | def test_empty_collection():
15 | back = InMemBackend()
16 | ap.use_backend(back)
17 |
18 | back.FETCH_MOCK["https://lol.com"] = {
19 | "type": "Collection",
20 | "items": [],
21 | "id": "https://lol.com",
22 | }
23 |
24 | out = parse_collection(url="https://lol.com", fetcher=back.fetch_iri)
25 | assert out == []
26 |
27 |
28 | def test_recursive_collection_limit():
29 | back = InMemBackend()
30 | ap.use_backend(back)
31 |
32 | back.FETCH_MOCK["https://lol.com"] = {
33 | "type": "Collection",
34 | "first": "https://lol.com",
35 | "id": "https://lol.com",
36 | }
37 |
38 | with pytest.raises(RecursionLimitExceededError):
39 | parse_collection(url="https://lol.com", fetcher=back.fetch_iri)
40 |
41 |
42 | def test_unexpected_activity_type():
43 | back = InMemBackend()
44 | ap.use_backend(back)
45 |
46 | back.FETCH_MOCK["https://lol.com"] = {"type": "Actor", "id": "https://lol.com"}
47 |
48 | with pytest.raises(UnexpectedActivityTypeError):
49 | parse_collection(url="https://lol.com", fetcher=back.fetch_iri)
50 |
51 |
52 | def test_collection():
53 | back = InMemBackend()
54 | ap.use_backend(back)
55 |
56 | back.FETCH_MOCK["https://lol.com"] = {
57 | "type": "Collection",
58 | "first": "https://lol.com/page1",
59 | "id": "https://lol.com",
60 | }
61 | back.FETCH_MOCK["https://lol.com/page1"] = {
62 | "type": "CollectionPage",
63 | "id": "https://lol.com/page1",
64 | "items": [1, 2, 3],
65 | }
66 |
67 | out = parse_collection(url="https://lol.com", fetcher=back.fetch_iri)
68 | assert out == [1, 2, 3]
69 |
70 |
71 | def test_ordered_collection():
72 | back = InMemBackend()
73 | ap.use_backend(back)
74 |
75 | back.FETCH_MOCK["https://lol.com"] = {
76 | "type": "OrderedCollection",
77 | "first": {
78 | "type": "OrderedCollectionPage",
79 | "id": "https://lol.com/page1",
80 | "orderedItems": [1, 2, 3],
81 | "next": "https://lol.com/page2",
82 | },
83 | "id": "https://lol.com",
84 | }
85 | back.FETCH_MOCK["https://lol.com/page2"] = {
86 | "type": "OrderedCollectionPage",
87 | "id": "https://lol.com/page2",
88 | "orderedItems": [4, 5, 6],
89 | }
90 |
91 | out = parse_collection(url="https://lol.com", fetcher=back.fetch_iri)
92 | assert out == [1, 2, 3, 4, 5, 6]
93 |
--------------------------------------------------------------------------------
/tests/test_content_helper.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from unittest import mock
3 |
4 | from little_boxes import activitypub as ap
5 | from little_boxes import content_helper
6 |
7 | from test_backend import InMemBackend
8 |
9 | logging.basicConfig(level=logging.DEBUG)
10 |
11 |
12 | def test_little_content_helper_simple():
13 | back = InMemBackend()
14 | ap.use_backend(back)
15 |
16 | content, tags = content_helper.parse_markdown("hello")
17 | assert content == "hello
"
18 | assert tags == []
19 |
20 |
21 | def test_little_content_helper_linkify():
22 | back = InMemBackend()
23 | ap.use_backend(back)
24 |
25 | content, tags = content_helper.parse_markdown("hello https://google.com")
26 | assert content.startswith("hello hello @dev'
45 | "@microblog.pub
"
46 | )
47 | assert tags == [
48 | {
49 | "href": "https://microblog.pub",
50 | "name": "@dev@microblog.pub",
51 | "type": "Mention",
52 | }
53 | ]
54 |
55 |
56 | @mock.patch(
57 | "little_boxes.content_helper.get_actor_url", return_value="https://microblog.pub"
58 | )
59 | def test_little_content_helper_tag(_):
60 | back = InMemBackend()
61 | ap.use_backend(back)
62 |
63 | content, tags = content_helper.parse_markdown("hello #activitypub")
64 | base_url = back.base_url()
65 | assert content == (
66 | f'hello #'
67 | f"activitypub
"
68 | )
69 | assert tags == [
70 | {
71 | "href": f"{base_url}/tags/activitypub",
72 | "name": "#activitypub",
73 | "type": "Hashtag",
74 | }
75 | ]
76 |
--------------------------------------------------------------------------------
/tests/test_httpsig.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | import requests
4 | from little_boxes import activitypub as ap
5 | from little_boxes import httpsig
6 | from little_boxes.key import Key
7 |
8 | import httpretty
9 | from test_backend import InMemBackend
10 |
11 | logging.basicConfig(level=logging.DEBUG)
12 |
13 |
14 | @httpretty.activate
15 | def test_httpsig():
16 | back = InMemBackend()
17 | ap.use_backend(back)
18 |
19 | k = Key("https://lol.com", "https://lol.com#lol")
20 | k.new()
21 | back.FETCH_MOCK["https://lol.com#lol"] = {
22 | "publicKey": k.to_dict(),
23 | "id": "https://lol.com",
24 | "type": "Person",
25 | }
26 |
27 | httpretty.register_uri(httpretty.POST, "https://remote-instance.com", body="ok")
28 |
29 | auth = httpsig.HTTPSigAuth(k)
30 | resp = requests.post("https://remote-instance.com", json={"ok": 1}, auth=auth)
31 |
32 | assert httpsig.verify_request(
33 | resp.request.method,
34 | resp.request.path_url,
35 | resp.request.headers,
36 | resp.request.body,
37 | )
38 |
39 |
40 | @httpretty.activate
41 | def test_httpsig_key():
42 | back = InMemBackend()
43 | ap.use_backend(back)
44 |
45 | k = Key("https://lol.com", "https://lol.com/key/lol")
46 | k.new()
47 | back.FETCH_MOCK["https://lol.com/key/lol"] = k.to_dict()
48 |
49 | httpretty.register_uri(httpretty.POST, "https://remote-instance.com", body="ok")
50 |
51 | auth = httpsig.HTTPSigAuth(k)
52 | resp = requests.post("https://remote-instance.com", json={"ok": 1}, auth=auth)
53 |
54 | assert httpsig.verify_request(
55 | resp.request.method,
56 | resp.request.path_url,
57 | resp.request.headers,
58 | resp.request.body,
59 | )
60 |
--------------------------------------------------------------------------------
/tests/test_key.py:
--------------------------------------------------------------------------------
1 | from little_boxes.key import Key
2 |
3 |
4 | def test_key_new_load():
5 | owner = "http://lol.com"
6 | k = Key(owner)
7 | k.new()
8 |
9 | assert k.to_dict() == {
10 | "id": f"{owner}#main-key",
11 | "owner": owner,
12 | "publicKeyPem": k.pubkey_pem,
13 | "type": "Key",
14 | }
15 |
16 | k2 = Key(owner)
17 | k2.load(k.privkey_pem)
18 |
19 | assert k2.to_dict() == k.to_dict()
20 |
--------------------------------------------------------------------------------
/tests/test_linked_data_sig.py:
--------------------------------------------------------------------------------
1 | import json
2 | import logging
3 |
4 | from little_boxes import linked_data_sig
5 | from little_boxes.key import Key
6 |
7 | logging.basicConfig(level=logging.DEBUG)
8 |
9 |
10 | DOC = """{"type": "Create", "actor": "https://microblog.pub", "object": {"type": "Note", "sensitive": false, "cc": ["https://microblog.pub/followers"], "to": ["https://www.w3.org/ns/activitystreams#Public"], "content": "Hello world!
", "tag": [], "source": {"mediaType": "text/markdown", "content": "Hello world!"}, "attributedTo": "https://microblog.pub", "published": "2018-05-21T15:51:59Z", "id": "https://microblog.pub/outbox/988179f13c78b3a7/activity", "url": "https://microblog.pub/note/988179f13c78b3a7", "replies": {"type": "OrderedCollection", "totalItems": 0, "first": "https://microblog.pub/outbox/988179f13c78b3a7/replies?page=first", "id": "https://microblog.pub/outbox/988179f13c78b3a7/replies"}, "likes": {"type": "OrderedCollection", "totalItems": 2, "first": "https://microblog.pub/outbox/988179f13c78b3a7/likes?page=first", "id": "https://microblog.pub/outbox/988179f13c78b3a7/likes"}, "shares": {"type": "OrderedCollection", "totalItems": 3, "first": "https://microblog.pub/outbox/988179f13c78b3a7/shares?page=first", "id": "https://microblog.pub/outbox/988179f13c78b3a7/shares"}}, "@context": ["https://www.w3.org/ns/activitystreams", "https://w3id.org/security/v1", {"Hashtag": "as:Hashtag", "sensitive": "as:sensitive"}], "published": "2018-05-21T15:51:59Z", "to": ["https://www.w3.org/ns/activitystreams#Public"], "cc": ["https://microblog.pub/followers"], "id": "https://microblog.pub/outbox/988179f13c78b3a7"}""" # noqa: E501
11 |
12 |
13 | def test_linked_data_sig():
14 | doc = json.loads(DOC)
15 |
16 | k = Key("https://lol.com")
17 | k.new()
18 |
19 | linked_data_sig.generate_signature(doc, k)
20 | assert linked_data_sig.verify_signature(doc, k)
21 |
--------------------------------------------------------------------------------
/tests/test_urlutils.py:
--------------------------------------------------------------------------------
1 | from unittest import mock
2 |
3 | import pytest
4 | from little_boxes import urlutils
5 |
6 |
7 | def test_urlutils_reject_invalid_scheme():
8 | assert not urlutils.is_url_valid("ftp://localhost:123")
9 |
10 |
11 | def test_urlutils_reject_localhost():
12 | assert not urlutils.is_url_valid("http://localhost:8000")
13 |
14 |
15 | def test_urlutils_reject_private_ip():
16 | assert not urlutils.is_url_valid("http://192.168.1.10:8000")
17 |
18 |
19 | @mock.patch("socket.getaddrinfo", return_value=[[0, 1, 2, 3, ["192.168.1.11", None]]])
20 | def test_urlutils_reject_domain_that_resolve_to_private_ip(_):
21 | assert not urlutils.is_url_valid("http://resolve-to-private.com")
22 |
23 |
24 | @mock.patch("socket.getaddrinfo", return_value=[[0, 1, 2, 3, ["1.2.3.4", None]]])
25 | def test_urlutils_accept_valid_url(_):
26 | assert urlutils.is_url_valid("https://microblog.pub")
27 |
28 |
29 | def test_urlutils_check_url_helper():
30 | with pytest.raises(urlutils.InvalidURLError):
31 | urlutils.check_url("http://localhost:5000")
32 |
--------------------------------------------------------------------------------
/tests/test_webfinger.py:
--------------------------------------------------------------------------------
1 | import json
2 | import logging
3 | from unittest import mock
4 |
5 | import pytest
6 | from little_boxes import urlutils
7 | from little_boxes import webfinger
8 |
9 | import httpretty
10 |
11 | logging.basicConfig(level=logging.DEBUG)
12 |
13 |
14 | _WEBFINGER_RESP = {
15 | "aliases": ["https://microblog.pub"],
16 | "links": [
17 | {
18 | "href": "https://microblog.pub",
19 | "rel": "http://webfinger.net/rel/profile-page",
20 | "type": "text/html",
21 | },
22 | {
23 | "href": "https://microblog.pub",
24 | "rel": "self",
25 | "type": "application/activity+json",
26 | },
27 | {
28 | "rel": "http://ostatus.org/schema/1.0/subscribe",
29 | "template": "https://microblog.pub/authorize_follow?profile={uri}",
30 | },
31 | ],
32 | "subject": "acct:dev@microblog.pub",
33 | }
34 |
35 |
36 | @mock.patch("little_boxes.webfinger.check_url", return_value=None)
37 | @mock.patch("little_boxes.backend.check_url", return_value=None)
38 | @httpretty.activate
39 | def test_webfinger(_, _1):
40 | # FIXME(tsileo): it should try https first
41 | httpretty.register_uri(
42 | httpretty.GET,
43 | "https://microblog.pub/.well-known/webfinger",
44 | body=json.dumps(_WEBFINGER_RESP),
45 | )
46 | data = webfinger.webfinger("@dev@microblog.pub")
47 | assert data == _WEBFINGER_RESP
48 |
49 | assert webfinger.get_actor_url("@dev@microblog.pub") == "https://microblog.pub"
50 | assert (
51 | webfinger.get_remote_follow_template("@dev@microblog.pub")
52 | == "https://microblog.pub/authorize_follow?profile={uri}"
53 | )
54 |
55 |
56 | def test_webfinger_invalid_url():
57 | with pytest.raises(urlutils.InvalidURLError):
58 | webfinger.webfinger("@dev@localhost:8080")
59 |
--------------------------------------------------------------------------------