├── core ├── __init__.py ├── jsonld.py ├── remote.py ├── db.py ├── indexes.py ├── feed.py ├── tasks.py ├── meta.py ├── outbox.py ├── notifications.py ├── shared.py ├── gc.py └── inbox.py ├── blueprints ├── __init__.py ├── well_known.py └── indieauth.py ├── data ├── .gitignore ├── mongodb │ └── .gitignore └── poussetaches │ └── .gitignore ├── sass ├── theme.scss ├── dark.scss ├── light.scss └── base_theme.scss ├── setup.cfg ├── static ├── emojis │ └── .gitignore ├── media │ └── .gitignore ├── nopic.png └── favicon.png ├── .dockerignore ├── .env ├── tests ├── fixtures │ ├── instance1 │ │ └── config │ │ │ ├── .gitignore │ │ │ └── me.yml │ ├── instance2 │ │ └── config │ │ │ ├── .gitignore │ │ │ └── me.yml │ └── me.yml └── integration_test.py ├── .isort.cfg ├── setup_wizard ├── requirements.txt ├── Dockerfile └── wizard.py ├── .gitignore ├── dev-requirements.txt ├── Dockerfile ├── docs ├── head.html ├── activitypub.md └── api.md ├── run.sh ├── startup.py ├── docker-compose-dev.yml ├── run_dev.sh ├── requirements.txt ├── templates ├── authorize_remote_follow.html ├── stream_debug.html ├── remote_follow.html ├── direct_messages.html ├── error.html ├── about.html ├── admin.html ├── u2f.html ├── login.html ├── followers.html ├── liked.html ├── admin_indieauth.html ├── note_debug.html ├── indieauth_flow.html ├── note.html ├── tags.html ├── header.html ├── lists.html ├── following.html ├── lookup.html ├── layout.html ├── admin_tasks.html ├── index.html ├── new.html └── stream.html ├── ENVVARS.md ├── utils ├── local_actor_cache.py ├── __init__.py ├── highlight.py ├── blacklist.py ├── key.py ├── webmentions.py ├── migrations.py ├── emojis.py ├── lookup.py ├── nodeinfo.py ├── opengraph.py ├── media.py └── template_filters.py ├── config └── me.sample.yml ├── docker-compose.yml ├── Makefile ├── .drone.yml ├── config.py └── README.md /core/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /blueprints/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /data/.gitignore: -------------------------------------------------------------------------------- 1 | * 2 | !.gitignore 3 | -------------------------------------------------------------------------------- /data/mongodb/.gitignore: -------------------------------------------------------------------------------- 1 | * 2 | !.gitignore 3 | -------------------------------------------------------------------------------- /sass/theme.scss: -------------------------------------------------------------------------------- 1 | @import 'base_theme.scss' 2 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [flake8] 2 | max-line-length = 120 3 | -------------------------------------------------------------------------------- /static/emojis/.gitignore: -------------------------------------------------------------------------------- 1 | * 2 | !.gitignore 3 | -------------------------------------------------------------------------------- /static/media/.gitignore: -------------------------------------------------------------------------------- 1 | * 2 | !.gitignore 3 | -------------------------------------------------------------------------------- /data/poussetaches/.gitignore: -------------------------------------------------------------------------------- 1 | * 2 | !.gitignore 3 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | __pycache__/ 2 | data/ 3 | data2/ 4 | tests/ 5 | -------------------------------------------------------------------------------- /.env: -------------------------------------------------------------------------------- 1 | WEB_PORT=5005 2 | CONFIG_DIR=./config 3 | DATA_DIR=./data 4 | -------------------------------------------------------------------------------- /tests/fixtures/instance1/config/.gitignore: -------------------------------------------------------------------------------- 1 | * 2 | !.gitignore 3 | -------------------------------------------------------------------------------- /tests/fixtures/instance2/config/.gitignore: -------------------------------------------------------------------------------- 1 | * 2 | !.gitignore 3 | -------------------------------------------------------------------------------- /.isort.cfg: -------------------------------------------------------------------------------- 1 | [settings] 2 | line_length=120 3 | force_single_line=true 4 | -------------------------------------------------------------------------------- /setup_wizard/requirements.txt: -------------------------------------------------------------------------------- 1 | prompt_toolkit 2 | bcrypt 3 | markdown 4 | -------------------------------------------------------------------------------- /static/nopic.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aperezdc/microblog.pub/master/static/nopic.png -------------------------------------------------------------------------------- /static/favicon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aperezdc/microblog.pub/master/static/favicon.png -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.sw[op] 2 | key_*.pem 3 | data/* 4 | config/* 5 | static/media/* 6 | 7 | .mypy_cache/ 8 | __pycache__/ 9 | -------------------------------------------------------------------------------- /dev-requirements.txt: -------------------------------------------------------------------------------- 1 | git+https://github.com/tsileo/little-boxes.git 2 | pytest 3 | requests 4 | html2text 5 | pyyaml 6 | flake8 7 | mypy 8 | black 9 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.7 2 | COPY requirements.txt /app/requirements.txt 3 | WORKDIR /app 4 | RUN pip install -r requirements.txt 5 | ADD . /app 6 | ENV FLASK_APP=app.py 7 | CMD ["./run.sh"] 8 | -------------------------------------------------------------------------------- /setup_wizard/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.7 2 | WORKDIR /app 3 | ADD . /app 4 | RUN pip install -r requirements.txt 5 | LABEL maintainer="t@a4.io" 6 | LABEL pub.microblog.oneshot=true 7 | CMD ["python", "wizard.py"] 8 | -------------------------------------------------------------------------------- /sass/dark.scss: -------------------------------------------------------------------------------- 1 | $background-color: #060606; 2 | $background-light: #222; 3 | $color: #808080; 4 | $color-title-link: #fefefe; 5 | $color-summary: #ddd; 6 | $color-light: #bbb; 7 | $color-menu-background: #222; 8 | $color-note-link: #666; 9 | -------------------------------------------------------------------------------- /sass/light.scss: -------------------------------------------------------------------------------- 1 | $background-color: #eee; 2 | $background-light: #ccc; 3 | $color: #111; 4 | $color-title-link: #333; 5 | $color-light: #555; 6 | $color-summary: #111; 7 | $color-note-link: #333; 8 | $color-menu-background: #ddd; 9 | // $primary-color: #1d781d; 10 | -------------------------------------------------------------------------------- /tests/fixtures/me.yml: -------------------------------------------------------------------------------- 1 | username: 'ci' 2 | name: 'CI tests' 3 | icon_url: 'https://sos-ch-dk-2.exo.io/microblogpub/microblobpub.png' 4 | domain: 'localhost:5005' 5 | summary: 'test instance summary' 6 | pass: '$2b$12$nEgJMgaYbXSPOvgnqM4jSeYnleKhXqsFgv/o3hg12x79uEdsR4cUy' # hello 7 | https: false 8 | -------------------------------------------------------------------------------- /docs/head.html: -------------------------------------------------------------------------------- 1 | 19 | -------------------------------------------------------------------------------- /tests/fixtures/instance1/config/me.yml: -------------------------------------------------------------------------------- 1 | username: 'instance1' 2 | name: 'Instance 1' 3 | icon_url: 'https://sos-ch-dk-2.exo.io/microblogpub/microblobpub.png' 4 | domain: 'instance1_web:5005' 5 | summary: 'instance1 summary' 6 | pass: '$2b$12$nEgJMgaYbXSPOvgnqM4jSeYnleKhXqsFgv/o3hg12x79uEdsR4cUy' # hello 7 | https: false 8 | hide_following: false 9 | -------------------------------------------------------------------------------- /tests/fixtures/instance2/config/me.yml: -------------------------------------------------------------------------------- 1 | username: 'instance2' 2 | name: 'Instance 2' 3 | icon_url: 'https://sos-ch-dk-2.exo.io/microblogpub/microblobpub.png' 4 | domain: 'instance2_web:5005' 5 | summary: 'instance2 summary' 6 | pass: '$2b$12$nEgJMgaYbXSPOvgnqM4jSeYnleKhXqsFgv/o3hg12x79uEdsR4cUy' # hello 7 | https: false 8 | hide_following: false 9 | -------------------------------------------------------------------------------- /docs/activitypub.md: -------------------------------------------------------------------------------- 1 | ## ActivityPub 2 | 3 | _microblog.pub_ implements an [ActivityPub](http://activitypub.rocks/) server, it implements both the client to server API and the federated server to server API. 4 | 5 | Activities are verified using HTTP Signatures or by fetching the content on the remote server directly. 6 | 7 | WebFinger is also required. 8 | 9 | TODO 10 | -------------------------------------------------------------------------------- /run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | python -c "import logging; logging.basicConfig(level=logging.DEBUG); from core import migrations; migrations.perform()" 3 | python -c "from core import indexes; indexes.create_indexes()" 4 | python startup.py 5 | (sleep 5 && curl -X POST -u :$POUSETACHES_AUTH_KEY $MICROBLOGPUB_POUSSETACHES_HOST/resume)& 6 | gunicorn -t 600 -w 5 -b 0.0.0.0:5005 --log-level debug app:app 7 | -------------------------------------------------------------------------------- /startup.py: -------------------------------------------------------------------------------- 1 | import app # noqa: F401 # here to init the backend 2 | from core.activitypub import _actor_hash 3 | from core.shared import MY_PERSON 4 | from core.shared import p 5 | from core.tasks import Tasks 6 | from utils.local_actor_cache import is_actor_updated 7 | 8 | h = _actor_hash(MY_PERSON, local=True) 9 | if is_actor_updated(h): 10 | Tasks.send_actor_update() 11 | 12 | p.push({}, "/task/cleanup", schedule="@every 1h") 13 | -------------------------------------------------------------------------------- /docker-compose-dev.yml: -------------------------------------------------------------------------------- 1 | version: '3' 2 | services: 3 | mongo: 4 | image: "mongo:latest" 5 | volumes: 6 | - "./data:/data/db" 7 | ports: 8 | - "27017:27017" 9 | poussetaches: 10 | image: "poussetaches/poussetaches:latest" 11 | volumes: 12 | - "${DATA_DIR}/poussetaches:/app/poussetaches_data" 13 | environment: 14 | - POUSSETACHES_AUTH_KEY=${POUSSETACHES_AUTH_KEY} 15 | ports: 16 | - "7991:7991" 17 | -------------------------------------------------------------------------------- /run_dev.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | DEV_POUSSETACHES_AUTH_KEY="1234567890" 3 | MICROBLOGPUB_INTERNAL_HOST="http://host.docker.internal:5005" 4 | 5 | 6 | env POUSSETACHES_AUTH_KEY=${DEV_POUSSETACHES_AUTH_KEY} docker-compose -f docker-compose-dev.yml up -d 7 | FLASK_DEBUG=1 MICROBLOGPUB_DEBUG=1 FLASK_APP=app.py POUSSETACHES_AUTH_KEY=${DEV_POUSSETACHES_AUTH_KEY} MICROBLOGPUB_INTERNAL_HOST=${MICROBLOGPUB_INTERNAL_HOST} flask run -p 5005 --with-threads 8 | docker-compose down 9 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | cachetools 2 | poussetaches 3 | python-dateutil 4 | libsass 5 | tornado<6.0.0 6 | gunicorn 7 | piexif 8 | requests 9 | python-u2flib-server 10 | Flask 11 | Flask-WTF 12 | pymongo 13 | timeago 14 | bleach 15 | html2text 16 | feedgen 17 | itsdangerous 18 | bcrypt 19 | mf2py 20 | passlib 21 | git+https://github.com/erikriver/opengraph.git#egg=opengraph 22 | git+https://github.com/tsileo/little-boxes.git@litepub#egg=little_boxes 23 | pyyaml 24 | pillow 25 | emoji-unicode 26 | html5lib 27 | Pygments 28 | -------------------------------------------------------------------------------- /templates/authorize_remote_follow.html: -------------------------------------------------------------------------------- 1 | {% extends "layout.html" %} 2 | {% import 'utils.html' as utils %} 3 | {% block header %} 4 | {% endblock %} 5 | {% block content %} 6 |
7 |

You're about to follow {{ profile}}

8 | 9 |
10 | 11 | 12 | 13 |
14 | 15 |
16 | {% endblock %} 17 | -------------------------------------------------------------------------------- /ENVVARS.md: -------------------------------------------------------------------------------- 1 | | var | default | 2 | |----------------------------------|-------------------------| 3 | | POUSSETACHES_AUTH_KEY | | 4 | | FLASK_DEBUG | 0 | 5 | | MICROBLOGPUB_DEBUG | "false" | 6 | | MICROBLOGPUB_INTERNAL_HOST | "http://localhost:5000" | 7 | | MICROBLOGPUB_MONGODB_HOST | "localhost:27017" | 8 | | MICROBLOGPUB_POUSSETACHES_HOST | "http://localhost:7991" | 9 | | MICROBLOGPUB_WIZARD_PROJECT_NAME | "microblogpub" | 10 | -------------------------------------------------------------------------------- /utils/local_actor_cache.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | _CACHE_FILE = Path(__file__).parent.absolute() / ".." / "config" / "local_actor_hash" 4 | 5 | 6 | def is_actor_updated(actor_hash: str) -> bool: 7 | actor_updated = False 8 | cache_exists = _CACHE_FILE.exists() 9 | if cache_exists: 10 | current_hash = _CACHE_FILE.read_text() 11 | if actor_hash != current_hash: 12 | actor_updated = True 13 | 14 | if actor_updated or not cache_exists: 15 | with _CACHE_FILE.open("w") as f: 16 | f.write(actor_hash) 17 | 18 | return actor_updated 19 | -------------------------------------------------------------------------------- /config/me.sample.yml: -------------------------------------------------------------------------------- 1 | username: 'username' 2 | name: 'You Name' 3 | domain: 'your-domain.tld' 4 | https: true 5 | summary: 'your summary' 6 | icon_url: 'https://you-avatar-url' 7 | pass: '' 8 | #favicon_url: '/static/favicon.png' 9 | #profile_metadata: 10 | # name1: 'value' 11 | # name2: 'value2' 12 | #hide_following: true 13 | #manually_approves_followers: false 14 | #aliases: 15 | # - "http://example.com/users/name" 16 | # - ... 17 | #theme: 18 | # style: "light" #or "dark" 19 | # color: "#1d781d" # (green, default for light) 20 | # "#33ff00" (purple, default for dark) 21 | -------------------------------------------------------------------------------- /templates/stream_debug.html: -------------------------------------------------------------------------------- 1 | {% extends "layout.html" %} 2 | {% import 'utils.html' as utils %} 3 | {% block title %}{% if request.path == url_for('admin_stream') %}Stream{% else %}Notifications{% endif %} - {{ config.NAME }}{% endblock %} 4 | {% block content %} 5 |
6 | {% include "header.html" %} 7 |
8 | 9 |
10 | {% for item in inbox_data %} 11 |
{{ item |remove_mongo_id|tojson(indent=4) }}
12 | {% endfor %} 13 | 14 | {{ utils.display_pagination(older_than, newer_than) }} 15 |
16 |
17 | 18 |
19 | {% endblock %} 20 | -------------------------------------------------------------------------------- /templates/remote_follow.html: -------------------------------------------------------------------------------- 1 | {% extends "layout.html" %} 2 | {% import 'utils.html' as utils %} 3 | {% block header %} 4 | {% endblock %} 5 | {% block content %} 6 |
7 | {% include "header.html" %} 8 |
9 |

Remote follow @{{ config.USERNAME }}@{{ config.DOMAIN }}

10 | 11 |
12 | 13 | 14 | 15 |
16 |
17 |
18 | {% endblock %} 19 | -------------------------------------------------------------------------------- /core/jsonld.py: -------------------------------------------------------------------------------- 1 | MICROBLOGPUB = { 2 | "@context": [ 3 | "https://www.w3.org/ns/activitystreams", 4 | "https://w3id.org/security/v1", 5 | { 6 | "Hashtag": "as:Hashtag", 7 | "PropertyValue": "schema:PropertyValue", 8 | "manuallyApprovesFollowers": "as:manuallyApprovesFollowers", 9 | "ostatus": "http://ostatus.org#", 10 | "schema": "http://schema.org", 11 | "sensitive": "as:sensitive", 12 | "toot": "http://joinmastodon.org/ns#", 13 | "totalItems": "as:totalItems", 14 | "value": "schema:value", 15 | "Emoji": "toot:Emoji", 16 | }, 17 | ] 18 | } 19 | -------------------------------------------------------------------------------- /templates/direct_messages.html: -------------------------------------------------------------------------------- 1 | {% extends "layout.html" %} 2 | {% import 'utils.html' as utils %} 3 | {% block title %}DMs - {{ config.NAME }}{% endblock %} 4 | {% block header %} 5 | 6 | {% endblock %} 7 | {% block content %} 8 |
9 |
10 | 11 | {% for thread in threads %} 12 |

With {{ ", ".join(thread.participants) }} 13 | {{ thread.len }} message{% if thread.len > 1 %}s{% endif %} 14 |

15 | {{ utils.display_note(thread.last_reply.activity.object, meta=thread.last_reply.meta) }} 16 | {% endfor %} 17 | 18 |
19 |
20 | {% endblock %} 21 | -------------------------------------------------------------------------------- /templates/error.html: -------------------------------------------------------------------------------- 1 | {% extends "layout.html" %} 2 | {% import 'utils.html' as utils %} 3 | {% block title %}{{code}} {{status_text}} - {{ config.NAME }}{% endblock %} 4 | {% block header %} 5 | {% endblock %} 6 | {% block content %} 7 |
8 | {% if not request.path.startswith('/admin') %} 9 | {% include "header.html" %} 10 | {% endif %} 11 |
12 |

{{code}} {{status_text}}

13 |

Something went wrong :(

14 | {% if tb %} 15 |
16 |

Please consider opening an issue on GitHub.
Here is the traceback:

17 |
18 | {{ tb }}
19 | 
20 |
21 | {% endif %} 22 |
23 |
24 | {% endblock %} 25 | -------------------------------------------------------------------------------- /templates/about.html: -------------------------------------------------------------------------------- 1 | {% extends "layout.html" %} 2 | {% import 'utils.html' as utils %} 3 | {% block header %} 4 | {% endblock %} 5 | {% block content %} 6 |
7 | 8 | {% include "header.html" %} 9 | 10 |
11 |
12 | 13 |
14 | 15 |
16 | {{ text | safe }} 17 |
{{ me.summary }}
18 |
19 |
20 | 21 |
22 | {% endblock %} 23 | -------------------------------------------------------------------------------- /templates/admin.html: -------------------------------------------------------------------------------- 1 | {% extends "layout.html" %} 2 | {% import 'utils.html' as utils %} 3 | {% block title %}Admin - {{ config.NAME }}{% endblock %} 4 | {% block content %} 5 |
6 | {% include "header.html" %} 7 |
8 |

Admin

9 |

Links

10 | 14 |

DB

15 |
    16 |
  • Inbox size: {{ inbox_size }}
  • 17 |
  • Outbox size: {{ outbox_size }}
  • 18 |
19 |

Collections

20 |
    21 |
  • followers: {{ col_followers }}
  • 22 |
  • following: {{ col_following }}
  • 23 |
  • liked: {{col_liked }}
  • 24 |
25 |
26 | 27 |
28 | {% endblock %} 29 | -------------------------------------------------------------------------------- /utils/__init__.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from datetime import datetime 3 | from datetime import timezone 4 | 5 | from dateutil import parser 6 | from little_boxes import activitypub as ap 7 | 8 | logger = logging.getLogger(__name__) 9 | 10 | 11 | def strtobool(s: str) -> bool: 12 | if s in ["y", "yes", "true", "on", "1"]: 13 | return True 14 | if s in ["n", "no", "false", "off", "0"]: 15 | return False 16 | 17 | raise ValueError(f"cannot convert {s} to bool") 18 | 19 | 20 | def parse_datetime(s: str) -> datetime: 21 | # Parses the datetime with dateutil 22 | dt = parser.parse(s) 23 | 24 | # If no TZ is set, assumes it's UTC 25 | if not dt.tzinfo: 26 | dt = dt.replace(tzinfo=timezone.utc) 27 | 28 | return dt 29 | 30 | 31 | def now() -> str: 32 | return ap.format_datetime(datetime.now(timezone.utc)) 33 | -------------------------------------------------------------------------------- /templates/u2f.html: -------------------------------------------------------------------------------- 1 | {% extends "layout.html" %} 2 | {% import 'utils.html' as utils %} 3 | {% block header %} 4 | {% endblock %} 5 | {% block content %} 6 |
7 | {% if session.logged_in %}logged{% else%}not logged{%endif%} 8 | 9 |
10 | 11 | 12 |
13 | 14 |
15 | 29 | {% endblock %} 30 | -------------------------------------------------------------------------------- /tests/integration_test.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import pytest 4 | import requests 5 | from html2text import html2text 6 | 7 | 8 | @pytest.fixture 9 | def config(): 10 | """Return the current config as a dict.""" 11 | import yaml 12 | 13 | with open( 14 | os.path.join(os.path.dirname(__file__), "..", "config/me.yml"), "rb" 15 | ) as f: 16 | yield yaml.load(f) 17 | 18 | 19 | def resp2plaintext(resp): 20 | """Convert the body of a requests reponse to plain text in order to make basic assertions.""" 21 | return html2text(resp.text) 22 | 23 | 24 | def test_ping_homepage(config): 25 | """Ensure the homepage is accessible.""" 26 | resp = requests.get("http://localhost:5005") 27 | resp.raise_for_status() 28 | assert resp.status_code == 200 29 | body = resp2plaintext(resp) 30 | assert config["name"] in body 31 | assert f"@{config['username']}@{config['domain']}" in body 32 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '2' 2 | services: 3 | web: 4 | image: 'microblogpub:latest' 5 | ports: 6 | - "127.0.0.1:${WEB_PORT}:5005" 7 | volumes: 8 | - "${CONFIG_DIR}:/app/config" 9 | - "./static:/app/static" 10 | environment: 11 | - MICROBLOGPUB_MONGODB_HOST=mongo:27017 12 | - MICROBLOGPUB_INTERNAL_HOST=http://${COMPOSE_PROJECT_NAME}_web_1:5005 13 | - MICROBLOGPUB_POUSSETACHES_HOST=http://${COMPOSE_PROJECT_NAME}_poussetaches_1:7991 14 | - POUSSETACHES_AUTH_KEY=${POUSSETACHES_AUTH_KEY} 15 | - COMPOSE_PROJECT_NAME=${COMPOSE_PROJECT_NAME} 16 | mongo: 17 | image: "mongo:3" 18 | volumes: 19 | - "${DATA_DIR}/mongodb:/data/db" 20 | poussetaches: 21 | image: "poussetaches/poussetaches:latest" 22 | volumes: 23 | - "${DATA_DIR}/poussetaches:/app/poussetaches_data" 24 | environment: 25 | - POUSSETACHES_AUTH_KEY=${POUSSETACHES_AUTH_KEY} 26 | # ports: 27 | # - "127.0.0.1:${POUSSETACHES_PORT}:7991" 28 | -------------------------------------------------------------------------------- /utils/highlight.py: -------------------------------------------------------------------------------- 1 | from functools import lru_cache 2 | 3 | from bs4 import BeautifulSoup 4 | from pygments import highlight as phighlight 5 | from pygments.formatters import HtmlFormatter 6 | from pygments.lexers import guess_lexer 7 | 8 | from config import THEME_STYLE 9 | from config import ThemeStyle 10 | 11 | _FORMATTER = HtmlFormatter( 12 | style="default" if THEME_STYLE == ThemeStyle.LIGHT else "vim" 13 | ) 14 | 15 | HIGHLIGHT_CSS = _FORMATTER.get_style_defs() 16 | 17 | 18 | @lru_cache(512) 19 | def highlight(html: str) -> str: 20 | soup = BeautifulSoup(html, "html5lib") 21 | for code in soup.find_all("code"): 22 | if not code.parent.name == "pre": 23 | continue 24 | lexer = guess_lexer(code.text) 25 | tag = BeautifulSoup( 26 | phighlight(code.text, lexer, _FORMATTER), "html5lib" 27 | ).body.next 28 | pre = code.parent 29 | pre.replaceWith(tag) 30 | out = soup.body 31 | out.name = "div" 32 | return str(out) 33 | -------------------------------------------------------------------------------- /core/remote.py: -------------------------------------------------------------------------------- 1 | from urllib.parse import urlparse 2 | 3 | from core.db import _Q 4 | from core.db import update_one_remote 5 | from utils import now 6 | 7 | 8 | def server(url: str) -> str: 9 | return urlparse(url).netloc 10 | 11 | 12 | def _update(url: str, replace: _Q) -> None: 13 | update_one_remote({"server": server(url)}, replace, upsert=True) 14 | 15 | 16 | # TODO(tsileo): track receive (and the user agent to help debug issues) 17 | 18 | 19 | def track_successful_send(url: str) -> None: 20 | now_ = now() 21 | _update( 22 | url, 23 | { 24 | "$inc": {"successful_send": 1}, 25 | "$set": { 26 | "last_successful_contact": now_, 27 | "last_successful_send": now_, 28 | "last_contact": now_, 29 | }, 30 | }, 31 | ) 32 | return None 33 | 34 | 35 | def track_failed_send(url: str) -> None: 36 | now_ = now() 37 | _update(url, {"$inc": {"failed_send": 1}, "$set": {"last_contact": now_}}) 38 | return None 39 | -------------------------------------------------------------------------------- /core/db.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | from enum import unique 3 | from typing import Any 4 | from typing import Dict 5 | from typing import Iterable 6 | from typing import Optional 7 | 8 | from config import DB 9 | 10 | _Q = Dict[str, Any] 11 | _D = Dict[str, Any] 12 | _Doc = Optional[_D] 13 | 14 | 15 | @unique 16 | class CollectionName(Enum): 17 | ACTIVITIES = "activities" 18 | REMOTE = "remote" 19 | 20 | 21 | def find_one_activity(q: _Q) -> _Doc: 22 | return DB[CollectionName.ACTIVITIES.value].find_one(q) 23 | 24 | 25 | def find_activities(q: _Q) -> Iterable[_D]: 26 | return DB[CollectionName.ACTIVITIES.value].find(q) 27 | 28 | 29 | def update_one_activity(q: _Q, update: _Q) -> bool: 30 | return DB[CollectionName.ACTIVITIES.value].update_one(q, update).matched_count == 1 31 | 32 | 33 | def update_many_activities(q: _Q, update: _Q) -> None: 34 | DB[CollectionName.ACTIVITIES.value].update_many(q, update) 35 | 36 | 37 | def update_one_remote(filter_: _Q, update: _Q, upsert: bool = False) -> None: 38 | DB[CollectionName.REMOTE.value].update_one(filter_, update, upsert) 39 | -------------------------------------------------------------------------------- /utils/blacklist.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from typing import Any 3 | from typing import Dict 4 | from urllib.parse import urlparse 5 | 6 | import config 7 | 8 | logger = logging.getLogger(__name__) 9 | 10 | 11 | def is_url_blacklisted(url: str) -> bool: 12 | try: 13 | return urlparse(url).netloc in config.BLACKLIST 14 | except Exception: 15 | logger.exception(f"failed to blacklist for {url}") 16 | return False 17 | 18 | 19 | def is_blacklisted(data: Dict[str, Any]) -> bool: 20 | """Returns True if the activity is coming/or referencing a blacklisted host.""" 21 | if ( 22 | "id" in data 23 | and is_url_blacklisted(data["id"]) 24 | or ( 25 | "object" in data 26 | and isinstance(data["object"], dict) 27 | and "id" in data["object"] 28 | and is_url_blacklisted(data["object"]["id"]) 29 | ) 30 | or ( 31 | "object" in data 32 | and isinstance(data["object"], str) 33 | and is_url_blacklisted(data["object"]) 34 | ) 35 | ): 36 | return True 37 | 38 | return False 39 | -------------------------------------------------------------------------------- /templates/login.html: -------------------------------------------------------------------------------- 1 | {% extends "layout.html" %} 2 | {% import 'utils.html' as utils %} 3 | {% block title %}Login - {{ config.NAME }}{% endblock %} 4 | {% block header %} 5 | 15 | {% endblock %} 16 | {% block content %} 17 |
18 | 19 |
20 | 21 | 22 | {% if u2f_enabled %} 23 | 24 | {% else %} 25 | {% endif %} 26 | 27 |
28 | 29 |
30 | {% if u2f_enabled %} 31 | 45 | {% endif %} 46 | {% endblock %} 47 | -------------------------------------------------------------------------------- /templates/followers.html: -------------------------------------------------------------------------------- 1 | {% extends "layout.html" %} 2 | {% import 'utils.html' as utils %} 3 | {% block title %}Followers - {{ config.NAME }}{% endblock %} 4 | {% block header %} 5 | {% endblock %} 6 | {% block content %} 7 |
8 | {% include "header.html" %} 9 | 10 |
11 |
12 | {% for meta in followers_data %} 13 | {% set follower = meta.actor %} 14 | {% if session.logged_in %} 15 |
16 | profile 17 | 18 | {% if meta.notification_follows_back %} 19 | following 20 | {% endif %} 21 | 22 |
23 | {% endif %} 24 |
25 | {{ utils.display_actor_inline(follower, size=80) }} 26 |
27 | {% endfor %} 28 | {{ utils.display_pagination(older_than, newer_than) }} 29 |
30 | 31 |
32 |
33 | {% endblock %} 34 | {% block links %} 35 | 36 | {{ utils.display_pagination_links(older_than, newer_than) }}{% endblock %} 37 | -------------------------------------------------------------------------------- /templates/liked.html: -------------------------------------------------------------------------------- 1 | {% extends "layout.html" %} 2 | {% import 'utils.html' as utils %} 3 | {% block content %} 4 |
5 | 6 | 7 | {% include "header.html" %} 8 | 9 |
10 | {% for item in liked %} 11 | {% if session.logged_in %} 12 |
13 |
14 | 15 | 16 | 17 | 18 |
19 |
20 | 21 | {% endif %} 22 | {% if item.meta.object %} 23 | {{ utils.display_note(item.meta.object, meta=item.meta) }} 24 | {% endif %} 25 | {% endfor %} 26 | 27 | {{ utils.display_pagination(older_than, newer_than) }} 28 |
29 | 30 |
31 | {% endblock %} 32 | {% block links %} 33 | 34 | {{ utils.display_pagination_links(older_than, newer_than) }}{% endblock %} 35 | -------------------------------------------------------------------------------- /templates/admin_indieauth.html: -------------------------------------------------------------------------------- 1 | {% extends "layout.html" %} 2 | {% import 'utils.html' as utils %} 3 | {% block title %}IndieAuth logs - {{ config.NAME }}{% endblock %} 4 | {% block content %} 5 |
6 | {% include "header.html" %} 7 |
8 |

IndieAuth logs

9 |
    10 | {% for action in indieauth_actions %} 11 |
  • {{action.ts|format_ts}} 12 | {% if action.verified_by == "id" %}Authentication {% else %}Authorization {% endif %} 13 | request by {{ action.client_id }} 14 | / {{action.ip_address}} {% if action.geoip %}({{action.geoip}}){% endif %} 15 | as {{action.me}} 16 | ({% if action.scope %}scope={{action.scope}},{% endif %}redirect_uri={{action.redirect_uri}}). 17 | {% if action.token_expires %} 18 |
    19 | The token {{action.token[:20]}}... 20 | {% if action.token_expires|gt_ts%}has expired on{% else %}expires{% endif %} {{ action.token_expires|format_ts }} 21 | {% endif %} 22 | 23 |
  • 24 | {% endfor %} 25 |
26 |
27 | 28 |
29 | {% endblock %} 30 | -------------------------------------------------------------------------------- /utils/key.py: -------------------------------------------------------------------------------- 1 | import binascii 2 | import os 3 | from typing import Callable 4 | 5 | from little_boxes.key import Key 6 | 7 | KEY_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", "config") 8 | 9 | 10 | def _new_key() -> str: 11 | return binascii.hexlify(os.urandom(32)).decode("utf-8") 12 | 13 | 14 | def get_secret_key(name: str, new_key: Callable[[], str] = _new_key) -> str: 15 | """Loads or generates a cryptographic key.""" 16 | key_path = os.path.join(KEY_DIR, f"{name}.key") 17 | if not os.path.exists(key_path): 18 | k = new_key() 19 | with open(key_path, "w+") as f: 20 | f.write(k) 21 | return k 22 | 23 | with open(key_path) as f: 24 | return f.read() 25 | 26 | 27 | def get_key(owner: str, _id: str, user: str, domain: str) -> Key: 28 | """"Loads or generates an RSA key.""" 29 | k = Key(owner, _id) 30 | user = user.replace(".", "_") 31 | domain = domain.replace(".", "_") 32 | key_path = os.path.join(KEY_DIR, f"key_{user}_{domain}.pem") 33 | if os.path.isfile(key_path): 34 | with open(key_path) as f: 35 | privkey_pem = f.read() 36 | k.load(privkey_pem) 37 | else: 38 | k.new() 39 | with open(key_path, "w") as f: 40 | f.write(k.privkey_pem) 41 | 42 | return k 43 | -------------------------------------------------------------------------------- /templates/note_debug.html: -------------------------------------------------------------------------------- 1 | {% extends "layout.html" %} 2 | {% import 'utils.html' as utils %} 3 | {% block title %}{{ config.NAME }}: "{{ note.activity.object.content | html2plaintext | truncate(50) }}"{% endblock %} 4 | {% block header %} 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | {% endblock %} 16 | {% block content %} 17 |
18 | {% include "header.html" %} 19 |
{{ thread | remove_mongo_id | tojson(indent=4) }}
20 |
21 | {% endblock %} 22 | {% block links %}{% endblock %} 23 | -------------------------------------------------------------------------------- /templates/indieauth_flow.html: -------------------------------------------------------------------------------- 1 | {% extends "layout.html" %} 2 | {% import 'utils.html' as utils %} 3 | {% block header %} 4 | {% endblock %} 5 | {% block content %} 6 |
7 | 8 |
9 | {% if client.logo %} 10 |
11 | 12 |
13 | {% endif %} 14 |
15 |
16 | {{ client.name }} 17 |

wants you to login as {{ me }}

18 |
19 |
20 |
21 | 22 |
23 | {% if scopes %} 24 |

Scopes

25 |
    26 | {% for scope in scopes %} 27 |
  • 28 |
  • 29 | {% endfor %} 30 |
31 | {% endif %} 32 | 33 | 34 | 35 | 36 | 37 | 38 |
39 | 40 |
41 | {% endblock %} 42 | -------------------------------------------------------------------------------- /templates/note.html: -------------------------------------------------------------------------------- 1 | {% extends "layout.html" %} 2 | {% import 'utils.html' as utils %} 3 | {% block title %}{{ config.NAME }}{{ note.activity.object | get_text | html2plaintext | trim | truncate(50) }}"{% endblock %} 4 | {% block header %} 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | {% endblock %} 17 | {% block content %} 18 |
19 | {% if request.path != "/admin/thread" %} 20 | {% include "header.html" %} 21 | {% endif %} 22 |
23 | {{ utils.display_thread(thread, likes=likes, shares=shares) }} 24 |
25 |
26 | {% endblock %} 27 | {% block links %}{% endblock %} 28 | -------------------------------------------------------------------------------- /utils/webmentions.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from typing import Optional 3 | from urllib.parse import urlparse 4 | 5 | import requests 6 | from bs4 import BeautifulSoup 7 | from little_boxes.urlutils import is_url_valid 8 | 9 | logger = logging.getLogger(__name__) 10 | 11 | 12 | def _make_abs(url: Optional[str], parent: str) -> Optional[str]: 13 | if url is None: 14 | return None 15 | 16 | if url.startswith("http"): 17 | return url 18 | 19 | return ( 20 | urlparse(parent)._replace(path=url, params="", query="", fragment="").geturl() 21 | ) 22 | 23 | 24 | def _discover_webmention_endoint(url: str) -> Optional[str]: 25 | try: 26 | resp = requests.get(url, timeout=3) 27 | except Exception: 28 | return None 29 | 30 | for k, v in resp.links.items(): 31 | if "webmention" in k: 32 | return _make_abs(resp.links[k].get("url"), url) 33 | 34 | soup = BeautifulSoup(resp.text, "html5lib") 35 | wlinks = soup.find_all(["link", "a"], attrs={"rel": "webmention"}) 36 | for wlink in wlinks: 37 | if "href" in wlink.attrs: 38 | return _make_abs(wlink.attrs["href"], url) 39 | 40 | return None 41 | 42 | 43 | def discover_webmention_endpoint(url: str) -> Optional[str]: 44 | """Discover the Webmention endpoint of a given URL, if any. 45 | 46 | Passes all the tests at https://webmention.rocks! 47 | 48 | """ 49 | wurl = _discover_webmention_endoint(url) 50 | if wurl is None: 51 | return None 52 | if not is_url_valid(wurl): 53 | return None 54 | return wurl 55 | -------------------------------------------------------------------------------- /templates/tags.html: -------------------------------------------------------------------------------- 1 | {% extends "layout.html" %} 2 | {% import 'utils.html' as utils %} 3 | {% block title %}#{{ tag }} - {{ config.NAME }}{% endblock %} 4 | {% block header %} 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | {% endblock %} 18 | {% block content %} 19 |
20 | 21 | 22 | {% include "header.html" %} 23 |
24 |

#{{ tag }}

25 |
26 | {% for item in outbox_data %} 27 | {{ utils.display_note(item.activity.object, meta=item.meta) }} 28 | {% endfor %} 29 |
30 |
31 |
32 | {% endblock %} 33 | {% block links %} 34 | 35 | {% endblock %} 36 | -------------------------------------------------------------------------------- /utils/migrations.py: -------------------------------------------------------------------------------- 1 | """Automatic migration tools for the da:ta stored in MongoDB.""" 2 | import logging 3 | from abc import ABC 4 | from abc import abstractmethod 5 | from typing import List 6 | from typing import Type 7 | 8 | from config import DB 9 | 10 | logger = logging.getLogger(__name__) 11 | 12 | # Used to keep track of all the defined migrations 13 | _MIGRATIONS: List[Type["Migration"]] = [] 14 | 15 | 16 | def perform() -> None: 17 | """Perform all the defined migration.""" 18 | for migration in _MIGRATIONS: 19 | migration().perform() 20 | 21 | 22 | class Migration(ABC): 23 | """Abstract class for migrations.""" 24 | 25 | def __init__(self) -> None: 26 | self.name = self.__class__.__qualname__ 27 | self._col = DB.migrations 28 | 29 | def __init_subclass__(cls, **kwargs): 30 | super().__init_subclass__(**kwargs) 31 | _MIGRATIONS.append(cls) 32 | 33 | def _apply(self) -> None: 34 | self._col.insert_one({"name": self.name}) 35 | 36 | def _reset(self) -> None: 37 | self._col.delete_one({"name": self.name}) 38 | 39 | def _is_applied(self) -> bool: 40 | return bool(self._col.find_one({"name": self.name})) 41 | 42 | @abstractmethod 43 | def migrate(self) -> None: 44 | """Expected to be implemented by actual migrations.""" 45 | pass 46 | 47 | def perform(self) -> None: 48 | if self._is_applied(): 49 | logger.info(f"Skipping migration {self.name} (already applied)") 50 | return 51 | 52 | logger.info(f"Performing migration {self.name}...") 53 | self.migrate() 54 | 55 | self._apply() 56 | logger.info("Done") 57 | -------------------------------------------------------------------------------- /utils/emojis.py: -------------------------------------------------------------------------------- 1 | import mimetypes 2 | import re 3 | from datetime import datetime 4 | from pathlib import Path 5 | from typing import Any 6 | from typing import Dict 7 | from typing import List 8 | from typing import Set 9 | 10 | from little_boxes import activitypub as ap 11 | 12 | EMOJI_REGEX = re.compile(r"(:[\d\w]+:)") 13 | 14 | EMOJIS: Dict[str, Dict[str, Any]] = {} 15 | EMOJIS_BY_NAME: Dict[str, Dict[str, Any]] = {} 16 | 17 | 18 | def _load_emojis(root_dir: Path, base_url: str) -> None: 19 | if EMOJIS: 20 | return 21 | for emoji in (root_dir / "static" / "emojis").iterdir(): 22 | mt = mimetypes.guess_type(emoji.name)[0] 23 | if mt and mt.startswith("image/"): 24 | name = emoji.name.split(".")[0] 25 | ap_emoji = dict( 26 | type=ap.ActivityType.EMOJI.value, 27 | name=f":{name}:", 28 | updated=ap.format_datetime(datetime.fromtimestamp(0.0).astimezone()), 29 | id=f"{base_url}/emoji/{name}", 30 | icon={ 31 | "mediaType": mt, 32 | "type": ap.ActivityType.IMAGE.value, 33 | "url": f"{base_url}/static/emojis/{emoji.name}", 34 | }, 35 | ) 36 | EMOJIS[emoji.name] = ap_emoji 37 | EMOJIS_BY_NAME[ap_emoji["name"]] = ap_emoji 38 | 39 | 40 | def tags(content: str) -> List[Dict[str, Any]]: 41 | tags: List[Dict[str, Any]] = [] 42 | added: Set[str] = set() 43 | for e in re.findall(EMOJI_REGEX, content): 44 | if e not in added and e in EMOJIS_BY_NAME: 45 | tags.append(EMOJIS_BY_NAME[e]) 46 | added.add(e) 47 | 48 | return tags 49 | -------------------------------------------------------------------------------- /templates/header.html: -------------------------------------------------------------------------------- 1 | 41 | -------------------------------------------------------------------------------- /utils/lookup.py: -------------------------------------------------------------------------------- 1 | import little_boxes.activitypub as ap 2 | import mf2py 3 | import requests 4 | from little_boxes.errors import NotAnActivityError 5 | from little_boxes.errors import RemoteServerUnavailableError 6 | from little_boxes.webfinger import get_actor_url 7 | 8 | 9 | def lookup(url: str) -> ap.BaseActivity: 10 | """Try to find an AP object related to the given URL.""" 11 | try: 12 | if url.startswith("@"): 13 | actor_url = get_actor_url(url) 14 | if actor_url: 15 | return ap.fetch_remote_activity(actor_url) 16 | except NotAnActivityError: 17 | pass 18 | except requests.HTTPError: 19 | # Some websites may returns 404, 503 or others when they don't support webfinger, and we're just taking a guess 20 | # when performing the lookup. 21 | pass 22 | except requests.RequestException as err: 23 | raise RemoteServerUnavailableError(f"failed to fetch {url}: {err!r}") 24 | 25 | backend = ap.get_backend() 26 | try: 27 | resp = requests.head( 28 | url, 29 | timeout=10, 30 | allow_redirects=True, 31 | headers={"User-Agent": backend.user_agent()}, 32 | ) 33 | except requests.RequestException as err: 34 | raise RemoteServerUnavailableError(f"failed to GET {url}: {err!r}") 35 | 36 | try: 37 | resp.raise_for_status() 38 | except Exception: 39 | return ap.fetch_remote_activity(url) 40 | 41 | # If the page is HTML, maybe it contains an alternate link pointing to an AP object 42 | for alternate in mf2py.parse(resp.text).get("alternates", []): 43 | if alternate.get("type") == "application/activity+json": 44 | return ap.fetch_remote_activity(alternate["url"]) 45 | 46 | try: 47 | # Maybe the page was JSON-LD? 48 | data = resp.json() 49 | return ap.parse_activity(data) 50 | except Exception: 51 | pass 52 | 53 | # Try content negotiation (retry with the AP Accept header) 54 | return ap.fetch_remote_activity(url) 55 | -------------------------------------------------------------------------------- /templates/lists.html: -------------------------------------------------------------------------------- 1 | {% extends "layout.html" %} 2 | {% import 'utils.html' as utils %} 3 | {% block title %}Lists - {{ config.NAME }}{% endblock %} 4 | {% block header %} 5 | 6 | {% endblock %} 7 | {% block content %} 8 |
9 | 10 |
11 |

Lists and its members are private.

12 |

New List

13 |
14 | 15 | 16 | 17 | 18 |
19 | 20 |

Lists

21 |

Manage list members in the Following section

22 | 23 |
    24 | {% for l in lists %} 25 |
  • {{ l.name }}
  • 26 | {% endfor %} 27 |
28 | 29 |

Manage lists

30 | {% for l in lists %} 31 |

{{ l.name }} {{ l.members | length }} members

32 |
33 | 34 | 35 | 36 | 37 |
38 | 39 |
40 | 41 | {% for member in l.members %} 42 |
43 |
44 | 45 | 46 | 47 | 48 | 49 |
50 |
51 | 52 |
53 | {{ utils.display_actor_inline(member | get_actor, size=80) }} 54 |
55 | {% endfor %} 56 | 57 | {% endfor %} 58 |
59 |
60 |
61 | {% endblock %} 62 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | SHELL := /bin/bash 2 | PYTHON=python 3 | SETUP_WIZARD_IMAGE=microblogpub-setup-wizard:latest 4 | PWD=$(shell pwd) 5 | 6 | # Build the config (will error if an existing config/me.yml is found) via a Docker container 7 | .PHONY: config 8 | config: 9 | # Build the container for the setup wizard on-the-fly 10 | cd setup_wizard && docker build . -t $(SETUP_WIZARD_IMAGE) 11 | # Run and remove instantly 12 | -docker run -e MICROBLOGPUB_WIZARD_PROJECT_NAME --rm -it --volume $(PWD):/app/out $(SETUP_WIZARD_IMAGE) 13 | # Finally, remove the tagged image 14 | docker rmi $(SETUP_WIZARD_IMAGE) 15 | 16 | # Reload the federation test instances (for local dev) 17 | .PHONY: reload-fed 18 | reload-fed: 19 | docker build . -t microblogpub:latest 20 | docker-compose -p instance2 -f docker-compose-tests.yml stop 21 | docker-compose -p instance1 -f docker-compose-tests.yml stop 22 | WEB_PORT=5006 CONFIG_DIR=./tests/fixtures/instance1/config docker-compose -p instance1 -f docker-compose-tests.yml up -d --force-recreate --build 23 | WEB_PORT=5007 CONFIG_DIR=./tests/fixtures/instance2/config docker-compose -p instance2 -f docker-compose-tests.yml up -d --force-recreate --build 24 | 25 | # Reload the local dev instance 26 | .PHONY: reload-dev 27 | reload-dev: 28 | docker build . -t microblogpub:latest 29 | docker-compose -f docker-compose-dev.yml up -d --force-recreate 30 | 31 | # Build the microblogpub Docker image 32 | .PHONY: microblogpub 33 | microblogpub: 34 | # Update microblog.pub 35 | git pull 36 | # Rebuild the Docker image 37 | docker build . --no-cache -t microblogpub:latest 38 | 39 | .PHONY: css 40 | css: 41 | # Download pure.css if needed 42 | if [[ ! -f static/pure.css ]]; then curl https://unpkg.com/purecss@1.0.1/build/pure-min.css > static/pure.css; fi 43 | # Download the emojis from twemoji if needded 44 | if [[ ! -d static/twemoji ]]; then wget https://github.com/twitter/twemoji/archive/v12.1.2.tar.gz && tar xvzf v12.1.2.tar.gz && mv twemoji-12.1.2/assets/svg static/twemoji && rm -rf twemoji-12.1.2 && rm -f v12.1.2.tar.gz; fi 45 | 46 | # Run the docker-compose project locally (will perform a update if the project is already running) 47 | .PHONY: run 48 | run: microblogpub css 49 | # (poussetaches and microblogpub Docker image will updated) 50 | # Update MongoDB 51 | docker pull mongo:3 52 | docker pull poussetaches/poussetaches 53 | # Restart the project 54 | docker-compose stop 55 | docker-compose up -d --force-recreate --build 56 | -------------------------------------------------------------------------------- /utils/nodeinfo.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | from enum import unique 3 | from functools import lru_cache 4 | from typing import Optional 5 | 6 | import little_boxes.activitypub as ap 7 | import requests 8 | 9 | 10 | @unique 11 | class SoftwareName(Enum): 12 | UNKNOWN = "unknown" 13 | MASTODON = "mastodon" 14 | MICROBLOGPUB = "microblogpub" 15 | 16 | 17 | def _get_nodeinfo_url(server: str) -> Optional[str]: 18 | backend = ap.get_backend() 19 | for scheme in {"https", "http"}: 20 | try: 21 | resp = requests.get( 22 | f"{scheme}://{server}/.well-known/nodeinfo", 23 | timeout=10, 24 | allow_redirects=True, 25 | headers={"User-Agent": backend.user_agent()}, 26 | ) 27 | resp.raise_for_status() 28 | data = resp.json() 29 | for link in data.get("links", []): 30 | return link["href"] 31 | except requests.HTTPError: 32 | return None 33 | except requests.RequestException: 34 | continue 35 | 36 | return None 37 | 38 | 39 | def _try_mastodon_api(server: str) -> bool: 40 | for scheme in {"https", "http"}: 41 | try: 42 | resp = requests.get(f"{scheme}://{server}/api/v1/instance") 43 | resp.raise_for_status() 44 | if resp.json(): 45 | return True 46 | except requests.HTTPError: 47 | return False 48 | except requests.RequestException: 49 | continue 50 | 51 | return False 52 | 53 | 54 | @lru_cache(2048) 55 | def get_software_name(server: str) -> str: 56 | backend = ap.get_backend() 57 | nodeinfo_endpoint = _get_nodeinfo_url(server) 58 | if nodeinfo_endpoint: 59 | try: 60 | resp = requests.get( 61 | nodeinfo_endpoint, 62 | timeout=10, 63 | headers={"User-Agent": backend.user_agent()}, 64 | ) 65 | resp.raise_for_status() 66 | software_name = resp.json().get("software", {}).get("name") 67 | if software_name: 68 | return software_name 69 | 70 | return SoftwareName.UNKNOWN.value 71 | except requests.RequestException: 72 | return SoftwareName.UNKNOWN.value 73 | 74 | if _try_mastodon_api(server): 75 | return SoftwareName.MASTODON.value 76 | 77 | return SoftwareName.UNKNOWN.value 78 | -------------------------------------------------------------------------------- /setup_wizard/wizard.py: -------------------------------------------------------------------------------- 1 | """Basic wizard for setting up microblog.pub configuration files.""" 2 | import binascii 3 | import os 4 | import sys 5 | from pathlib import Path 6 | 7 | import bcrypt 8 | from markdown import markdown 9 | from prompt_toolkit import prompt 10 | 11 | 12 | def main() -> None: 13 | print("Welcome to microblog.pub setup wizard\n") 14 | 15 | config_file = Path("/app/out/config/me.yml") 16 | env_file = Path("/app/out/.env") 17 | 18 | if config_file.exists(): 19 | # Spit out the relative path for the "config artifacts" 20 | rconfig_file = "config/me.yml" 21 | print( 22 | f"Existing setup detected, please delete {rconfig_file} before restarting the wizard" 23 | ) 24 | sys.exit(2) 25 | 26 | dat = {} 27 | print("Your identity will be @{username}@{domain}") 28 | dat["domain"] = prompt("domain: ") 29 | dat["username"] = prompt("username: ") 30 | dat["pass"] = bcrypt.hashpw( 31 | prompt("password: ", is_password=True).encode(), bcrypt.gensalt() 32 | ).decode() 33 | dat["name"] = prompt("name (e.g. John Doe): ") 34 | dat["summary"] = markdown( 35 | prompt( 36 | "summary (short description, in markdown, press [ESC] then [ENTER] to submit):\n", 37 | multiline=True, 38 | ) 39 | ) 40 | dat["https"] = True 41 | proto = "https" 42 | yn = "" 43 | while yn not in ["y", "n"]: 44 | yn = prompt("will the site be served via https? (y/n): ", default="y").lower() 45 | if yn == "n": 46 | dat["https"] = False 47 | proto = "http" 48 | 49 | dat["icon_url"] = prompt( 50 | "icon URL: ", default=f'{proto}://{dat["domain"]}/static/nopic.png' 51 | ) 52 | 53 | out = "" 54 | for k, v in dat.items(): 55 | out += f"{k}: {v!r}\n" 56 | 57 | with config_file.open("w") as f: 58 | f.write(out) 59 | 60 | proj_name = os.getenv("MICROBLOGPUB_WIZARD_PROJECT_NAME", "microblogpub") 61 | 62 | env = { 63 | "WEB_PORT": 5005, 64 | "CONFIG_DIR": "./config", 65 | "DATA_DIR": "./data", 66 | "POUSSETACHES_AUTH_KEY": binascii.hexlify(os.urandom(32)).decode(), 67 | "COMPOSE_PROJECT_NAME": proj_name, 68 | } 69 | 70 | out2 = "" 71 | for k, v in env.items(): 72 | out2 += f"{k}={v}\n" 73 | 74 | with env_file.open("w") as f: 75 | f.write(out2) 76 | 77 | print("Done") 78 | sys.exit(0) 79 | 80 | 81 | if __name__ == "__main__": 82 | try: 83 | main() 84 | except KeyboardInterrupt: 85 | print("Aborted") 86 | sys.exit(1) 87 | -------------------------------------------------------------------------------- /templates/following.html: -------------------------------------------------------------------------------- 1 | {% extends "layout.html" %} 2 | {% import 'utils.html' as utils %} 3 | {% block title %}Following - {{ config.NAME }}{% endblock %} 4 | {% block header %} 5 | {% endblock %} 6 | {% block content %} 7 |
8 | {% include "header.html" %} 9 | 10 |
11 | {% for (follow_id, meta) in following_data %} 12 | {% set follow = meta.object %} 13 | {% if session.logged_in %} 14 |
15 | profile 16 |
17 | 18 | 19 | 20 | 21 |
22 | {% if meta.notification_follows_back %} 23 | follows you back 24 | {% endif %} 25 | 26 | {% if lists %} 27 |
28 | 29 | 30 | 31 | 39 | 40 |
41 | {% endif %} 42 | 43 | {% for l in lists %} 44 | {% if follow.id in l.members %} 45 |
46 | 47 | 48 | 49 | 50 |
51 | 52 | 53 | {% endif %} 54 | {% endfor %} 55 | 56 | 57 |
58 | 59 | {% endif %} 60 |
61 | {{ utils.display_actor_inline(follow, size=80) }} 62 |
63 | {% endfor %} 64 | {{ utils.display_pagination(older_than, newer_than) }} 65 |
66 | 67 |
68 | {% endblock %} 69 | {% block links %} 70 | 71 | {{ utils.display_pagination_links(older_than, newer_than) }}{% endblock %} 72 | -------------------------------------------------------------------------------- /templates/lookup.html: -------------------------------------------------------------------------------- 1 | {% extends "layout.html" %} 2 | {% import 'utils.html' as utils %} 3 | {% block title %}Lookup - {{ config.NAME }}{% endblock %} 4 | {% block content %} 5 |
6 |
7 | 8 |

Interact with an ActivityPub object via its URL or look for a user using @user@domain.tld

9 | 10 |
11 | 12 | 13 |
14 | 15 | {% if data %} 16 | {% set data = data.to_dict() %} 17 |
18 | {% if data | has_actor_type %} 19 |
20 | {% if following %} 21 | profile 22 |
23 | 24 | 25 | 26 | 27 |
28 |
29 | 30 | 31 | 32 | 33 |
34 | {% else %} 35 |
36 | 37 | 38 | 39 | 40 |
41 | {% endif %} 42 | {% if follower %}follows you!{% endif %} 43 |
44 | 45 | 46 | {{ utils.display_actor_inline(data, size=80) }} 47 | 48 | {% elif data | has_type('Create') %} 49 | {{ utils.display_note(data.object, meta=meta) }} 50 | {% elif data | has_type(['Note', 'Article', 'Video', 'Audio', 'Page', 'Question']) %} 51 | {{ utils.display_note(data, meta=meta) }} 52 | {% elif data | has_type('Announce') %} 53 | {% set boost_actor = meta.actor %} 54 |

55 | {{ boost_actor.name }} boosted 56 |

57 | {{ utils.display_note(meta.object, meta=meta) }} 58 | 59 | {% endif %} 60 |
61 | {% endif %} 62 | 63 |
64 |
65 | {% endblock %} 66 | -------------------------------------------------------------------------------- /blueprints/well_known.py: -------------------------------------------------------------------------------- 1 | import mimetypes 2 | from typing import Any 3 | 4 | import flask 5 | from flask import abort 6 | from flask import request 7 | from little_boxes import activitypub as ap 8 | 9 | import config 10 | from config import DB 11 | from core.meta import Box 12 | from core.shared import jsonify 13 | 14 | blueprint = flask.Blueprint("well_known", __name__) 15 | 16 | 17 | @blueprint.route("/.well-known/webfinger") 18 | def wellknown_webfinger() -> Any: 19 | """Exposes/servers WebFinger data.""" 20 | resource = request.args.get("resource") 21 | if resource not in [f"acct:{config.USERNAME}@{config.DOMAIN}", config.ID]: 22 | abort(404) 23 | 24 | out = { 25 | "subject": f"acct:{config.USERNAME}@{config.DOMAIN}", 26 | "aliases": [config.ID], 27 | "links": [ 28 | { 29 | "rel": "http://webfinger.net/rel/profile-page", 30 | "type": "text/html", 31 | "href": config.ID, 32 | }, 33 | {"rel": "self", "type": "application/activity+json", "href": config.ID}, 34 | { 35 | "rel": "http://ostatus.org/schema/1.0/subscribe", 36 | "template": config.BASE_URL + "/authorize_follow?profile={uri}", 37 | }, 38 | {"rel": "magic-public-key", "href": config.KEY.to_magic_key()}, 39 | { 40 | "href": config.ICON_URL, 41 | "rel": "http://webfinger.net/rel/avatar", 42 | "type": mimetypes.guess_type(config.ICON_URL)[0], 43 | }, 44 | ], 45 | } 46 | 47 | return jsonify(out, "application/jrd+json; charset=utf-8") 48 | 49 | 50 | @blueprint.route("/.well-known/nodeinfo") 51 | def wellknown_nodeinfo() -> Any: 52 | """Exposes the NodeInfo endpoint (http://nodeinfo.diaspora.software/).""" 53 | return jsonify( 54 | { 55 | "links": [ 56 | { 57 | "rel": "http://nodeinfo.diaspora.software/ns/schema/2.1", 58 | "href": f"{config.ID}/nodeinfo", 59 | } 60 | ] 61 | } 62 | ) 63 | 64 | 65 | @blueprint.route("/nodeinfo") 66 | def nodeinfo() -> Any: 67 | """NodeInfo endpoint.""" 68 | q = { 69 | "box": Box.OUTBOX.value, 70 | "meta.deleted": False, 71 | "type": {"$in": [ap.ActivityType.CREATE.value, ap.ActivityType.ANNOUNCE.value]}, 72 | } 73 | 74 | out = { 75 | "version": "2.1", 76 | "software": { 77 | "name": "microblogpub", 78 | "version": config.VERSION, 79 | "repository": "https://github.com/tsileo/microblog.pub", 80 | }, 81 | "protocols": ["activitypub"], 82 | "services": {"inbound": [], "outbound": []}, 83 | "openRegistrations": False, 84 | "usage": {"users": {"total": 1}, "localPosts": DB.activities.count(q)}, 85 | "metadata": { 86 | "nodeName": f"@{config.USERNAME}@{config.DOMAIN}", 87 | "version": config.VERSION, 88 | "versionDate": config.VERSION_DATE, 89 | }, 90 | } 91 | 92 | return jsonify( 93 | out, 94 | "application/json; profile=http://nodeinfo.diaspora.software/ns/schema/2.1#", 95 | ) 96 | -------------------------------------------------------------------------------- /templates/layout.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | {% block title %}{{ config.NAME }}'s microblog{% endblock %} 8 | 9 | 10 | 11 | 12 | 13 | {% if not request.args.get("older_than") and not request.args.get("previous_than") %}{% endif %} 14 | {% block links %}{% endblock %} 15 | {% if config.THEME_COLOR %}{% endif %} 16 | 30 | {% block header %}{% endblock %} 31 | 32 | 33 | {% if logged_in %} 34 | 51 | {% endif %} 52 | 53 | 54 |
55 | {% block content %}{% endblock %} 56 |
57 | 62 | 63 | 64 | -------------------------------------------------------------------------------- /templates/admin_tasks.html: -------------------------------------------------------------------------------- 1 | {% extends "layout.html" %} 2 | {% import 'utils.html' as utils %} 3 | {% block title %}Tasks - {{ config.NAME }}{% endblock %} 4 | {% block content %} 5 |
6 | {% include "header.html" %} 7 |
8 | 9 |

Cron

10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | {% for task in cron %} 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | {% endfor %} 33 | 34 |
#URLPayloadScheduleNext runResponse
{{ task.task_id }}{{ task.url }} ({{ task.expected }}){{ task.payload }}{{ task.schedule }}{{ task.next_run }}Tries #{{ task.tries }}: {{ task.last_error_body }} ({{ task.last_error_status_code }})
35 | 36 |

Dead

37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | {% for task in dead %} 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | {% endfor %} 58 | 59 |
#URLPayloadNext runResponse
{{ task.task_id }}{{ task.url }} ({{ task.expected }}){{ task.payload }}{{ task.next_run }}Tries #{{ task.tries }}: {{ task.last_error_body }} ({{ task.last_error_status_code }})
60 | 61 | 62 |

Waiting

63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | {% for task in waiting %} 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | {% endfor %} 84 | 85 |
#URLPayloadNext runResponse
{{ task.task_id }}{{ task.url }} ({{ task.expected }}){{ task.payload }}{{ task.next_run }}Tries #{{ task.tries }}: {{ task.last_error_body }} ({{ task.last_error_status_code }})
86 | 87 |

Success

88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | {% for task in success %} 101 | 102 | 103 | 104 | 105 | 106 | 107 | 108 | {% endfor %} 109 | 110 |
#URLPayloadNext runResponse
{{ task.task_id }}{{ task.url }} ({{ task.expected }}){{ task.payload }}{{ task.next_run }}Tries #{{ task.tries }}: {{ task.last_error_body }} ({{ task.last_error_status_code }})
111 | 112 | 113 | 114 |
115 |
116 | {% endblock %} 117 | -------------------------------------------------------------------------------- /core/indexes.py: -------------------------------------------------------------------------------- 1 | import pymongo 2 | 3 | from config import DB 4 | from config import MEDIA_CACHE 5 | from core.meta import MetaKey 6 | from core.meta import _meta 7 | 8 | 9 | def create_indexes(): 10 | if "trash" not in DB.collection_names(): 11 | DB.create_collection("trash", capped=True, size=50 << 20) # 50 MB 12 | 13 | if "activities" in DB.collection_names(): 14 | DB.command("compact", "activities") 15 | 16 | try: 17 | MEDIA_CACHE.fs._GridFS__database.command("compact", "fs.files") 18 | MEDIA_CACHE.fs._GridFS__database.command("compact", "fs.chunks") 19 | except Exception: 20 | pass 21 | 22 | DB.activities.create_index([(_meta(MetaKey.NOTIFICATION), pymongo.ASCENDING)]) 23 | DB.activities.create_index( 24 | [(_meta(MetaKey.NOTIFICATION_UNREAD), pymongo.ASCENDING)] 25 | ) 26 | DB.activities.create_index([("remote_id", pymongo.ASCENDING)]) 27 | DB.activities.create_index([("meta.actor_id", pymongo.ASCENDING)]) 28 | DB.activities.create_index([("meta.object_id", pymongo.ASCENDING)]) 29 | DB.activities.create_index([("meta.mentions", pymongo.ASCENDING)]) 30 | DB.activities.create_index([("meta.hashtags", pymongo.ASCENDING)]) 31 | DB.activities.create_index([("meta.thread_root_parent", pymongo.ASCENDING)]) 32 | DB.activities.create_index( 33 | [ 34 | ("meta.thread_root_parent", pymongo.ASCENDING), 35 | ("meta.deleted", pymongo.ASCENDING), 36 | ] 37 | ) 38 | DB.activities.create_index( 39 | [("activity.object.id", pymongo.ASCENDING), ("meta.deleted", pymongo.ASCENDING)] 40 | ) 41 | DB.activities.create_index( 42 | [("meta.object_id", pymongo.ASCENDING), ("type", pymongo.ASCENDING)] 43 | ) 44 | 45 | # Index for the block query 46 | DB.activities.create_index( 47 | [ 48 | ("box", pymongo.ASCENDING), 49 | ("type", pymongo.ASCENDING), 50 | ("meta.undo", pymongo.ASCENDING), 51 | ] 52 | ) 53 | 54 | # Index for count queries 55 | DB.activities.create_index( 56 | [ 57 | ("box", pymongo.ASCENDING), 58 | ("type", pymongo.ASCENDING), 59 | ("meta.undo", pymongo.ASCENDING), 60 | ("meta.deleted", pymongo.ASCENDING), 61 | ] 62 | ) 63 | 64 | DB.activities.create_index([("box", pymongo.ASCENDING)]) 65 | 66 | # Outbox query 67 | DB.activities.create_index( 68 | [ 69 | ("box", pymongo.ASCENDING), 70 | ("type", pymongo.ASCENDING), 71 | ("meta.undo", pymongo.ASCENDING), 72 | ("meta.deleted", pymongo.ASCENDING), 73 | ("meta.public", pymongo.ASCENDING), 74 | ] 75 | ) 76 | 77 | DB.activities.create_index( 78 | [ 79 | ("type", pymongo.ASCENDING), 80 | ("activity.object.type", pymongo.ASCENDING), 81 | ("activity.object.inReplyTo", pymongo.ASCENDING), 82 | ("meta.deleted", pymongo.ASCENDING), 83 | ] 84 | ) 85 | 86 | # For the is_actor_icon_cached query 87 | MEDIA_CACHE.fs._GridFS__files.create_index([("url", 1), ("kind", 1)]) 88 | 89 | # Replies index 90 | DB.replies.create_index([("remote_id", pymongo.ASCENDING)]) 91 | DB.replies.create_index([("meta.thread_root_parent", pymongo.ASCENDING)]) 92 | DB.replies.create_index( 93 | [ 94 | ("meta.thread_root_parent", pymongo.ASCENDING), 95 | ("meta.deleted", pymongo.ASCENDING), 96 | ] 97 | ) 98 | -------------------------------------------------------------------------------- /.drone.yml: -------------------------------------------------------------------------------- 1 | --- 2 | priviliged: true # Needed for Docker in Docker 3 | kind: pipeline 4 | name: default 5 | steps: 6 | # Run Mypy/Flake8/black linters before any further work is done 7 | - name: lint 8 | image: python:3 9 | commands: 10 | - pip install -U pip 11 | - pip install mypy flake8 black 12 | - black --check . 13 | - flake8 14 | - mypy --ignore-missing-imports . 15 | - mypy --ignore-missing-imports setup_wizard 16 | 17 | # Build the container images we need for the test suite 18 | - name: build_containers 19 | image: docker:dind 20 | environment: 21 | DOCKER_HOST: tcp://docker:2375 22 | commands: 23 | - apk update && apk upgrade && apk add --no-cache bash git openssh curl 24 | - docker network create fede 25 | - docker pull mongo 26 | - docker pull poussetaches/poussetaches 27 | - docker build . -t microblogpub:latest 28 | 29 | # Run poussetaches (will be shared by the two microblog.pub instances) "in the background" 30 | - name: run_poussetaches 31 | image: docker:dind 32 | detach: true 33 | environment: 34 | DOCKER_HOST: tcp://docker:2375 35 | POUSSETACHES_AUTH_KEY: lol 36 | commands: 37 | - docker run -p 7991:7991 --net fede -e POUSSETACHES_AUTH_KEY --name poussetaches poussetaches/poussetaches 38 | 39 | # Run MongoDB (will be shared by the two microblog.pub instances) "in the background" 40 | - name: run_mongodb 41 | image: docker:dind 42 | detach: true 43 | environment: 44 | DOCKER_HOST: tcp://docker:2375 45 | commands: 46 | - docker run -p 27017:27017 --net fede --name mongo mongo 47 | 48 | # Run a first microblog.pub instance "in the background" 49 | - name: microblogpub_instance1 50 | image: docker:dind 51 | detach: true 52 | environment: 53 | DOCKER_HOST: tcp://docker:2375 54 | MICROBLOGPUB_DEBUG: 1 55 | MICROBLOGPUB_POUSSETACHES_HOST: http://poussetaches:7991 56 | MICROBLOGPUB_INTERNAL_HOST: http://instance1_web:5005 57 | MICROBLOGPUB_MONGODB_HOST: mongo:27017 58 | POUSSETACHES_AUTH_KEY: lol 59 | commands: 60 | - sleep 5 61 | - 'docker run -p 5006:5005 --net fede -v "`pwd`/tests/fixtures/instance1/config:/app/config" -e MICROBLOGPUB_DEBUG -e MICROBLOGPUB_INTERNAL_HOST -e MICROBLOGPUB_MONGODB_HOST -e MICROBLOGPUB_POUSSETACHES_HOST -e POUSSETACHES_AUTH_KEY --name instance1_web microblogpub' 62 | 63 | # Run the second microblog.pub instance "in the background" 64 | - name: microblogpub_instance2 65 | image: docker:dind 66 | detach: true 67 | environment: 68 | DOCKER_HOST: tcp://docker:2375 69 | MICROBLOGPUB_DEBUG: 1 70 | MICROBLOGPUB_POUSSETACHES_HOST: http://poussetaches:7991 71 | MICROBLOGPUB_INTERNAL_HOST: http://instance2_web:5005 72 | MICROBLOGPUB_MONGODB_HOST: mongo:27017 73 | POUSSETACHES_AUTH_KEY: lol 74 | commands: 75 | - 'docker run -p 5007:5005 --net fede -v "`pwd`/tests/fixtures/instance2/config:/app/config" -e MICROBLOGPUB_DEBUG -e MICROBLOGPUB_INTERNAL_HOST -e MICROBLOGPUB_MONGODB_HOST -e MICROBLOGPUB_POUSSETACHES_HOST -e POUSSETACHES_AUTH_KEY --name instance2_web microblogpub' 76 | 77 | # Run some tests against the two instances to ensure federation is working 78 | - name: federation_test 79 | image: python:3 80 | commands: 81 | - pip install -U pip 82 | - pip install -r dev-requirements.txt 83 | # Federation tests (with two local instances) 84 | - python -m pytest -v -s --ignore data -k federatio 85 | 86 | # Setup the services needed to do some "Docker in Docker" (or dind) 87 | services: 88 | - name: docker 89 | image: docker:dind 90 | privileged: true 91 | --- 92 | kind: signature 93 | hmac: ae911176117298c18ecfcd95fbdbd62304c5f32462b42f2aefdd5a5b834fed60 94 | 95 | ... 96 | -------------------------------------------------------------------------------- /templates/index.html: -------------------------------------------------------------------------------- 1 | {% extends "layout.html" %} 2 | {% import 'utils.html' as utils %} 3 | {% block header %} 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | {% endblock %} 17 | {% block content %} 18 |
19 | 20 | 21 | {% include "header.html" %} 22 | 23 |
24 | {% for item in pinned %} 25 | {% if item.meta.pinned %} 26 |

27 | pinned 28 |

29 | {% endif %} 30 | 31 | {{ utils.display_note(item.activity.object, meta=item.meta, no_color=True) }} 32 | {% endfor %} 33 | 34 | {% for item in outbox_data %} 35 | 36 | {% if item | has_type('Announce') %} 37 | {% if "actor" in item.meta %} 38 | {% set boost_actor = item.meta.actor %} 39 | {% if session.logged_in %} 40 |
41 |
42 | 43 | 44 | 45 | 46 |
47 | {{ utils.display_actor_box(boost_actor, after="boosted") }} 48 | {{ utils.display_in_reply_to(item.meta, item.activity.object) }} 49 |
50 | {% else %} 51 |

52 | {{ utils.display_actor_box(boost_actor, after="boosted") }} 53 | {{ utils.display_in_reply_to(item.meta, item.activity.object) }} 54 |

55 | {% endif %} 56 | {% endif %} 57 | {% if item.meta.object %} 58 | {{ utils.display_note(item.meta.object, meta=item.meta) }} 59 | {% endif %} 60 | {% elif item | has_type('Create') %} 61 | {% if item.activity.object.inReplyTo %} 62 |

63 | {{ utils.display_in_reply_to(item.meta, item.activity.object) }} 64 |

65 | {% endif %} 66 | {{ utils.display_note(item.activity.object, meta=item.meta, no_color=True) }} 67 | {% endif %} 68 | 69 | {% endfor %} 70 | 71 | {{ utils.display_pagination(older_than, newer_than) }} 72 |
73 | 74 |
75 | {% endblock %} 76 | {% block links %} 77 | 78 | 79 | 80 | {{ utils.display_pagination_links(older_than, newer_than) }}{% endblock %} 81 | -------------------------------------------------------------------------------- /core/feed.py: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | from typing import Dict 3 | from typing import Optional 4 | 5 | from feedgen.feed import FeedGenerator 6 | from html2text import html2text 7 | from little_boxes import activitypub as ap 8 | 9 | from config import ID 10 | from config import ME 11 | from config import USERNAME 12 | from core.db import DB 13 | from core.meta import Box 14 | 15 | 16 | def gen_feed(): 17 | fg = FeedGenerator() 18 | fg.id(f"{ID}") 19 | fg.title(f"{USERNAME} notes") 20 | fg.author({"name": USERNAME}) 21 | fg.link(href=ID, rel="alternate") 22 | fg.description(f"{USERNAME} notes") 23 | fg.logo(ME.get("icon", {}).get("url")) 24 | fg.language("en") 25 | for item in DB.activities.find( 26 | { 27 | "box": Box.OUTBOX.value, 28 | "type": "Create", 29 | "meta.deleted": False, 30 | "meta.public": True, 31 | }, 32 | limit=10, 33 | ).sort("_id", -1): 34 | fe = fg.add_entry() 35 | fe.id(item["activity"]["object"].get("url")) 36 | fe.link(href=item["activity"]["object"].get("url")) 37 | fe.title(item["activity"]["object"]["content"]) 38 | fe.description(item["activity"]["object"]["content"]) 39 | return fg 40 | 41 | 42 | def json_feed(path: str) -> Dict[str, Any]: 43 | """JSON Feed (https://jsonfeed.org/) document.""" 44 | data = [] 45 | for item in DB.activities.find( 46 | { 47 | "box": Box.OUTBOX.value, 48 | "type": "Create", 49 | "meta.deleted": False, 50 | "meta.public": True, 51 | }, 52 | limit=10, 53 | ).sort("_id", -1): 54 | data.append( 55 | { 56 | "id": item["activity"]["id"], 57 | "url": item["activity"]["object"].get("url"), 58 | "content_html": item["activity"]["object"]["content"], 59 | "content_text": html2text(item["activity"]["object"]["content"]), 60 | "date_published": item["activity"]["object"].get("published"), 61 | } 62 | ) 63 | return { 64 | "version": "https://jsonfeed.org/version/1", 65 | "user_comment": ( 66 | "This is a microblog feed. You can add this to your feed reader using the following URL: " 67 | + ID 68 | + path 69 | ), 70 | "title": USERNAME, 71 | "home_page_url": ID, 72 | "feed_url": ID + path, 73 | "author": { 74 | "name": USERNAME, 75 | "url": ID, 76 | "avatar": ME.get("icon", {}).get("url"), 77 | }, 78 | "items": data, 79 | } 80 | 81 | 82 | def build_inbox_json_feed( 83 | path: str, request_cursor: Optional[str] = None 84 | ) -> Dict[str, Any]: 85 | """Build a JSON feed from the inbox activities.""" 86 | data = [] 87 | cursor = None 88 | 89 | q: Dict[str, Any] = { 90 | "type": "Create", 91 | "meta.deleted": False, 92 | "box": Box.INBOX.value, 93 | } 94 | if request_cursor: 95 | q["_id"] = {"$lt": request_cursor} 96 | 97 | for item in DB.activities.find(q, limit=50).sort("_id", -1): 98 | actor = ap.get_backend().fetch_iri(item["activity"]["actor"]) 99 | data.append( 100 | { 101 | "id": item["activity"]["id"], 102 | "url": item["activity"]["object"].get("url"), 103 | "content_html": item["activity"]["object"]["content"], 104 | "content_text": html2text(item["activity"]["object"]["content"]), 105 | "date_published": item["activity"]["object"].get("published"), 106 | "author": { 107 | "name": actor.get("name", actor.get("preferredUsername")), 108 | "url": actor.get("url"), 109 | "avatar": actor.get("icon", {}).get("url"), 110 | }, 111 | } 112 | ) 113 | cursor = str(item["_id"]) 114 | 115 | resp = { 116 | "version": "https://jsonfeed.org/version/1", 117 | "title": f"{USERNAME}'s stream", 118 | "home_page_url": ID, 119 | "feed_url": ID + path, 120 | "items": data, 121 | } 122 | if cursor and len(data) == 50: 123 | resp["next_url"] = ID + path + "?cursor=" + cursor 124 | 125 | return resp 126 | -------------------------------------------------------------------------------- /core/tasks.py: -------------------------------------------------------------------------------- 1 | import os 2 | from datetime import datetime 3 | from datetime import timezone 4 | from typing import Any 5 | from typing import Dict 6 | from typing import Set 7 | 8 | from little_boxes import activitypub as ap 9 | from poussetaches import PousseTaches 10 | 11 | from config import DISABLE_WEBMENTIONS 12 | from config import MEDIA_CACHE 13 | from utils import parse_datetime 14 | 15 | p = PousseTaches( 16 | os.getenv("MICROBLOGPUB_POUSSETACHES_HOST", "http://localhost:7991"), 17 | os.getenv("MICROBLOGPUB_INTERNAL_HOST", "http://localhost:5000"), 18 | ) 19 | 20 | 21 | class Tasks: 22 | @staticmethod 23 | def cache_object(iri: str) -> None: 24 | p.push(iri, "/task/cache_object") 25 | 26 | @staticmethod 27 | def cache_actor(iri: str, also_cache_attachments: bool = True) -> None: 28 | p.push( 29 | {"iri": iri, "also_cache_attachments": also_cache_attachments}, 30 | "/task/cache_actor", 31 | ) 32 | 33 | @staticmethod 34 | def cache_actor_icon(icon_url: str, actor_iri: str) -> None: 35 | if MEDIA_CACHE.is_actor_icon_cached(icon_url): 36 | return None 37 | 38 | p.push({"icon_url": icon_url, "actor_iri": actor_iri}, "/task/cache_actor_icon") 39 | 40 | @staticmethod 41 | def cache_emoji(url: str, iri: str) -> None: 42 | if MEDIA_CACHE.is_emoji_cached(iri): 43 | return None 44 | 45 | p.push({"url": url, "iri": iri}, "/task/cache_emoji") 46 | 47 | @staticmethod 48 | def send_webmentions(activity: ap.Create, links: Set[str]) -> None: 49 | if DISABLE_WEBMENTIONS: 50 | return None 51 | 52 | for link in links: 53 | p.push( 54 | { 55 | "link": link, 56 | "note_url": activity.get_object().get_url(), 57 | "remote_id": activity.id, 58 | }, 59 | "/task/send_webmention", 60 | ) 61 | 62 | @staticmethod 63 | def cache_emojis(activity: ap.BaseActivity) -> None: 64 | for emoji in activity.get_emojis(): 65 | try: 66 | Tasks.cache_emoji(emoji.get_icon_url(), emoji.id) 67 | except KeyError: 68 | # TODO(tsileo): log invalid emoji 69 | pass 70 | 71 | @staticmethod 72 | def post_to_remote_inbox(payload: str, recp: str) -> None: 73 | p.push({"payload": payload, "to": recp}, "/task/post_to_remote_inbox") 74 | 75 | @staticmethod 76 | def forward_activity(iri: str) -> None: 77 | p.push(iri, "/task/forward_activity") 78 | 79 | @staticmethod 80 | def fetch_og_meta(iri: str) -> None: 81 | p.push(iri, "/task/fetch_og_meta") 82 | 83 | @staticmethod 84 | def process_reply(iri: str) -> None: 85 | p.push(iri, "/task/process_reply") 86 | 87 | @staticmethod 88 | def process_new_activity(iri: str) -> None: 89 | p.push(iri, "/task/process_new_activity") 90 | 91 | @staticmethod 92 | def cache_attachments(iri: str) -> None: 93 | p.push(iri, "/task/cache_attachments") 94 | 95 | @staticmethod 96 | def cache_attachment(attachment: Dict[str, Any], iri: str) -> None: 97 | p.push({"iri": iri, "attachment": attachment}, "/task/cache_attachment") 98 | 99 | @staticmethod 100 | def finish_post_to_inbox(iri: str) -> None: 101 | p.push(iri, "/task/finish_post_to_inbox") 102 | 103 | @staticmethod 104 | def finish_post_to_outbox(iri: str) -> None: 105 | p.push(iri, "/task/finish_post_to_outbox") 106 | 107 | @staticmethod 108 | def send_actor_update() -> None: 109 | p.push({}, "/task/send_actor_update", delay=2) 110 | 111 | @staticmethod 112 | def update_question_outbox(iri: str, open_for: int) -> None: 113 | p.push( 114 | iri, "/task/update_question", delay=open_for 115 | ) # XXX: delay expects minutes 116 | 117 | @staticmethod 118 | def fetch_remote_question(question) -> None: 119 | now = datetime.now(timezone.utc) 120 | dt = parse_datetime(question.closed or question.endTime) 121 | minutes = int((dt - now).total_seconds() / 60) 122 | 123 | if minutes > 0: 124 | # Only push the task if the poll is not ended yet 125 | p.push( 126 | question.id, "/task/fetch_remote_question", delay=minutes 127 | ) # XXX: delay expects minutes 128 | -------------------------------------------------------------------------------- /core/meta.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | from enum import Enum 3 | from enum import unique 4 | from typing import Any 5 | from typing import Dict 6 | from typing import List 7 | from typing import Union 8 | 9 | from little_boxes import activitypub as ap 10 | 11 | _SubQuery = Dict[str, Any] 12 | 13 | 14 | @unique 15 | class Box(Enum): 16 | INBOX = "inbox" 17 | OUTBOX = "outbox" 18 | REPLIES = "replies" 19 | 20 | 21 | @unique 22 | class FollowStatus(Enum): 23 | WAITING = "waiting" 24 | ACCEPTED = "accepted" 25 | REJECTED = "rejected" 26 | 27 | 28 | @unique 29 | class MetaKey(Enum): 30 | NOTIFICATION = "notification" 31 | NOTIFICATION_UNREAD = "notification_unread" 32 | NOTIFICATION_FOLLOWS_BACK = "notification_follows_back" 33 | POLL_ANSWER = "poll_answer" 34 | POLL_ANSWER_TO = "poll_answer_to" 35 | STREAM = "stream" 36 | ACTOR_ID = "actor_id" 37 | ACTOR = "actor" 38 | ACTOR_HASH = "actor_hash" 39 | UNDO = "undo" 40 | PUBLISHED = "published" 41 | GC_KEEP = "gc_keep" 42 | OBJECT = "object" 43 | OBJECT_ID = "object_id" 44 | OBJECT_ACTOR = "object_actor" 45 | OBJECT_ACTOR_ID = "object_actor_id" 46 | OBJECT_ACTOR_HASH = "object_actor_hash" 47 | PUBLIC = "public" 48 | 49 | PINNED = "pinned" 50 | HASHTAGS = "hashtags" 51 | MENTIONS = "mentions" 52 | 53 | FOLLOW_STATUS = "follow_status" 54 | 55 | THREAD_ROOT_PARENT = "thread_root_parent" 56 | 57 | IN_REPLY_TO = "in_reply_to" 58 | IN_REPLY_TO_SELF = "in_reply_to_self" 59 | IN_REPLY_TO_ACTOR = "in_reply_to_actor" 60 | 61 | SERVER = "server" 62 | VISIBILITY = "visibility" 63 | OBJECT_VISIBILITY = "object_visibility" 64 | 65 | DELETED = "deleted" 66 | BOOSTED = "boosted" 67 | LIKED = "liked" 68 | 69 | COUNT_LIKE = "count_like" 70 | COUNT_BOOST = "count_boost" 71 | COUNT_REPLY = "count_reply" 72 | 73 | EMOJI_REACTIONS = "emoji_reactions" 74 | 75 | 76 | def _meta(mk: MetaKey) -> str: 77 | return f"meta.{mk.value}" 78 | 79 | 80 | def flag(mk: MetaKey, val: Any) -> _SubQuery: 81 | return {_meta(mk): val} 82 | 83 | 84 | def by_remote_id(remote_id: str) -> _SubQuery: 85 | return {"remote_id": remote_id} 86 | 87 | 88 | def in_inbox() -> _SubQuery: 89 | return {"box": Box.INBOX.value} 90 | 91 | 92 | def in_outbox() -> _SubQuery: 93 | return {"box": Box.OUTBOX.value} 94 | 95 | 96 | def by_type(type_: Union[ap.ActivityType, List[ap.ActivityType]]) -> _SubQuery: 97 | if isinstance(type_, list): 98 | return {"type": {"$in": [t.value for t in type_]}} 99 | 100 | return {"type": type_.value} 101 | 102 | 103 | def follow_request_accepted() -> _SubQuery: 104 | return flag(MetaKey.FOLLOW_STATUS, FollowStatus.ACCEPTED.value) 105 | 106 | 107 | def not_poll_answer() -> _SubQuery: 108 | return flag(MetaKey.POLL_ANSWER, False) 109 | 110 | 111 | def not_in_reply_to() -> _SubQuery: 112 | return {"activity.object.inReplyTo": None} 113 | 114 | 115 | def not_undo() -> _SubQuery: 116 | return flag(MetaKey.UNDO, False) 117 | 118 | 119 | def not_deleted() -> _SubQuery: 120 | return flag(MetaKey.DELETED, False) 121 | 122 | 123 | def pinned() -> _SubQuery: 124 | return flag(MetaKey.PINNED, True) 125 | 126 | 127 | def by_actor(actor: ap.BaseActivity) -> _SubQuery: 128 | return flag(MetaKey.ACTOR_ID, actor.id) 129 | 130 | 131 | def by_actor_id(actor_id: str) -> _SubQuery: 132 | return flag(MetaKey.ACTOR_ID, actor_id) 133 | 134 | 135 | def by_object_id(object_id: str) -> _SubQuery: 136 | return flag(MetaKey.OBJECT_ID, object_id) 137 | 138 | 139 | def is_public() -> _SubQuery: 140 | return flag(MetaKey.PUBLIC, True) 141 | 142 | 143 | def by_visibility(vis: ap.Visibility) -> _SubQuery: 144 | return flag(MetaKey.VISIBILITY, vis.name) 145 | 146 | 147 | def by_object_visibility(vis: ap.Visibility) -> _SubQuery: 148 | return flag(MetaKey.OBJECT_VISIBILITY, vis.name) 149 | 150 | 151 | def by_hashtag(ht: str) -> _SubQuery: 152 | return flag(MetaKey.HASHTAGS, ht) 153 | 154 | 155 | def inc(mk: MetaKey, val: int) -> _SubQuery: 156 | return {"$inc": flag(mk, val)} 157 | 158 | 159 | def upsert(data: Dict[MetaKey, Any]) -> _SubQuery: 160 | sq: Dict[str, Any] = {} 161 | 162 | for mk, val in data.items(): 163 | sq[_meta(mk)] = val 164 | 165 | return {"$set": sq} 166 | 167 | 168 | def published_after(dt: datetime) -> _SubQuery: 169 | return flag(MetaKey.PUBLISHED, {"$gt": ap.format_datetime(dt)}) 170 | -------------------------------------------------------------------------------- /utils/opengraph.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import mimetypes 3 | from typing import Any 4 | from typing import Dict 5 | from typing import Set 6 | from urllib.parse import urlparse 7 | 8 | import opengraph 9 | import requests 10 | from bs4 import BeautifulSoup 11 | from little_boxes import activitypub as ap 12 | from little_boxes.errors import NotAnActivityError 13 | from little_boxes.urlutils import check_url 14 | from little_boxes.urlutils import is_url_valid 15 | 16 | from .lookup import lookup 17 | 18 | logger = logging.getLogger(__name__) 19 | 20 | 21 | def links_from_note(note: Dict[str, Any]) -> Set[str]: 22 | note_host = urlparse(ap._get_id(note["id"]) or "").netloc 23 | 24 | links = set() 25 | if "content" in note: 26 | soup = BeautifulSoup(note["content"], "html5lib") 27 | for link in soup.find_all("a"): 28 | h = link.get("href") 29 | ph = urlparse(h) 30 | if ( 31 | ph.scheme in {"http", "https"} 32 | and ph.netloc != note_host 33 | and is_url_valid(h) 34 | ): 35 | links.add(h) 36 | 37 | # FIXME(tsileo): support summary and name fields 38 | 39 | return links 40 | 41 | 42 | def fetch_og_metadata(user_agent, links): 43 | res = [] 44 | for l in links: 45 | # Try to skip media early 46 | mimetype, _ = mimetypes.guess_type(l) 47 | if mimetype and mimetype.split("/")[0] in ["image", "video", "audio"]: 48 | logger.info(f"skipping media link {l}") 49 | continue 50 | 51 | check_url(l) 52 | 53 | # Remove any AP objects 54 | try: 55 | lookup(l) 56 | continue 57 | except NotAnActivityError: 58 | pass 59 | except Exception: 60 | logger.exception(f"skipping {l} because of issues during AP lookup") 61 | continue 62 | 63 | try: 64 | h = requests.head( 65 | l, headers={"User-Agent": user_agent}, timeout=3, allow_redirects=True 66 | ) 67 | h.raise_for_status() 68 | except requests.HTTPError as http_err: 69 | logger.debug( 70 | f"failed to HEAD {l}, got a {http_err.response.status_code}: {http_err.response.text}" 71 | ) 72 | continue 73 | except requests.RequestException as err: 74 | logger.debug(f"failed to HEAD {l}: {err!r}") 75 | continue 76 | 77 | if h.headers.get("content-type") and not h.headers.get( 78 | "content-type" 79 | ).startswith("text/html"): 80 | logger.debug(f"skipping {l} for bad content type") 81 | continue 82 | 83 | try: 84 | r = requests.get( 85 | l, headers={"User-Agent": user_agent}, timeout=5, allow_redirects=True 86 | ) 87 | r.raise_for_status() 88 | except requests.HTTPError as http_err: 89 | logger.debug( 90 | f"failed to GET {l}, got a {http_err.response.status_code}: {http_err.response.text}" 91 | ) 92 | continue 93 | except requests.RequestException as err: 94 | logger.debug(f"failed to GET {l}: {err!r}") 95 | continue 96 | 97 | # FIXME(tsileo): check mimetype via the URL too (like we do for images) 98 | if not r.headers.get("content-type") or not r.headers.get( 99 | "content-type" 100 | ).startswith("text/html"): 101 | continue 102 | 103 | r.encoding = "UTF-8" 104 | html = r.text 105 | try: 106 | data = dict(opengraph.OpenGraph(html=html)) 107 | except Exception: 108 | logger.exception(f"failed to parse {l}") 109 | continue 110 | 111 | # Keep track of the fetched URL as some crappy websites use relative URLs everywhere 112 | data["_input_url"] = l 113 | u = urlparse(l) 114 | 115 | # If it's a relative URL, build the absolute version 116 | if "image" in data and data["image"].startswith("/"): 117 | data["image"] = u._replace( 118 | path=data["image"], params="", query="", fragment="" 119 | ).geturl() 120 | 121 | if "url" in data and data["url"].startswith("/"): 122 | data["url"] = u._replace( 123 | path=data["url"], params="", query="", fragment="" 124 | ).geturl() 125 | 126 | if data.get("url"): 127 | res.append(data) 128 | 129 | return res 130 | -------------------------------------------------------------------------------- /docs/api.md: -------------------------------------------------------------------------------- 1 | ## API 2 | 3 | Your admin API key can be found at `config/admin_api_key.key`. 4 | 5 | ## ActivityPub API 6 | 7 | ### GET / 8 | 9 | Returns the actor profile, with links to all the "standard" collections. 10 | 11 | ### GET /tags/:tag 12 | 13 | Special collection that reference notes with the given tag. 14 | 15 | ### GET /stream 16 | 17 | Special collection that returns the stream/inbox as displayed in the UI. 18 | 19 | ## User API 20 | 21 | The user API is used by the admin UI (and requires a CSRF token when used with a regular user session), but it can also be accessed with an API key. 22 | 23 | All the examples are using [HTTPie](https://httpie.org/). 24 | 25 | ### POST /api/note/delete{?id} 26 | 27 | Deletes the given note `id` (the note must from the instance outbox). 28 | 29 | Answers a **201** (Created) status code. 30 | 31 | You can pass the `id` via JSON, form data or query argument. 32 | 33 | #### Example 34 | 35 | ```shell 36 | $ http POST https://microblog.pub/api/note/delete Authorization:'Bearer ' id=http://microblob.pub/outbox//activity 37 | ``` 38 | 39 | #### Response 40 | 41 | ```json 42 | { 43 | "activity": "https://microblog.pub/outbox/" 44 | } 45 | ``` 46 | 47 | ### POST /api/note/pin{?id} 48 | 49 | Adds the given note `id` (the note must from the instance outbox) to the featured collection (and pins it on the homepage). 50 | 51 | Answers a **201** (Created) status code. 52 | 53 | You can pass the `id` via JSON, form data or query argument. 54 | 55 | #### Example 56 | 57 | ```shell 58 | $ http POST https://microblog.pub/api/note/pin Authorization:'Bearer ' id=http://microblob.pub/outbox//activity 59 | ``` 60 | 61 | #### Response 62 | 63 | ```json 64 | { 65 | "pinned": true 66 | } 67 | ``` 68 | 69 | ### POST /api/note/unpin{?id} 70 | 71 | Removes the given note `id` (the note must from the instance outbox) from the featured collection (and un-pins it). 72 | 73 | Answers a **201** (Created) status code. 74 | 75 | You can pass the `id` via JSON, form data or query argument. 76 | 77 | #### Example 78 | 79 | ```shell 80 | $ http POST https://microblog.pub/api/note/unpin Authorization:'Bearer ' id=http://microblob.pub/outbox//activity 81 | ``` 82 | 83 | #### Response 84 | 85 | ```json 86 | { 87 | "pinned": false 88 | } 89 | ``` 90 | 91 | ### POST /api/like{?id} 92 | 93 | Likes the given activity. 94 | 95 | Answers a **201** (Created) status code. 96 | 97 | You can pass the `id` via JSON, form data or query argument. 98 | 99 | #### Example 100 | 101 | ```shell 102 | $ http POST https://microblog.pub/api/like Authorization:'Bearer ' id=http://activity-iri.tld 103 | ``` 104 | 105 | #### Response 106 | 107 | ```json 108 | { 109 | "activity": "https://microblog.pub/outbox/" 110 | } 111 | ``` 112 | 113 | ### POST /api/boost{?id} 114 | 115 | Boosts/Announces the given activity. 116 | 117 | Answers a **201** (Created) status code. 118 | 119 | You can pass the `id` via JSON, form data or query argument. 120 | 121 | #### Example 122 | 123 | ```shell 124 | $ http POST https://microblog.pub/api/boost Authorization:'Bearer ' id=http://activity-iri.tld 125 | ``` 126 | 127 | #### Response 128 | 129 | ```json 130 | { 131 | "activity": "https://microblog.pub/outbox/" 132 | } 133 | ``` 134 | 135 | ### POST /api/block{?actor} 136 | 137 | Blocks the given actor, all activities from this actor will be dropped after that. 138 | 139 | Answers a **201** (Created) status code. 140 | 141 | You can pass the `id` via JSON, form data or query argument. 142 | 143 | #### Example 144 | 145 | ```shell 146 | $ http POST https://microblog.pub/api/block Authorization:'Bearer ' actor=http://actor-iri.tld/ 147 | ``` 148 | 149 | #### Response 150 | 151 | ```json 152 | { 153 | "activity": "https://microblog.pub/outbox/" 154 | } 155 | ``` 156 | 157 | ### POST /api/follow{?actor} 158 | 159 | Follows the given actor. 160 | 161 | Answers a **201** (Created) status code. 162 | 163 | You can pass the `id` via JSON, form data or query argument. 164 | 165 | #### Example 166 | 167 | ```shell 168 | $ http POST https://microblog.pub/api/follow Authorization:'Bearer ' actor=http://actor-iri.tld/ 169 | ``` 170 | 171 | #### Response 172 | 173 | ```json 174 | { 175 | "activity": "https://microblog.pub/outbox/" 176 | } 177 | ``` 178 | 179 | ### POST /api/new_note{?content,reply} 180 | 181 | Creates a new note. `reply` is the IRI of the "replied" note if any. 182 | 183 | Answers a **201** (Created) status code. 184 | 185 | You can pass the `content` and `reply` via JSON, form data or query argument. 186 | 187 | #### Example 188 | 189 | ```shell 190 | $ http POST https://microblog.pub/api/new_note Authorization:'Bearer ' content=hello 191 | ``` 192 | 193 | #### Response 194 | 195 | ```json 196 | { 197 | "activity": "https://microblog.pub/outbox/" 198 | } 199 | ``` 200 | 201 | 202 | ### GET /api/stream 203 | 204 | 205 | #### Example 206 | 207 | ```shell 208 | $ http GET https://microblog.pub/api/stream Authorization:'Bearer ' 209 | ``` 210 | 211 | #### Response 212 | 213 | ```json 214 | ``` 215 | 216 | 217 | -------------------------------------------------------------------------------- /templates/new.html: -------------------------------------------------------------------------------- 1 | {% extends "layout.html" %} 2 | {% import 'utils.html' as utils %} 3 | {% block title %}New - {{ config.NAME }}{% endblock %} 4 | {% block header %} 5 | 6 | {% endblock %} 7 | {% block content %} 8 |
9 |
10 | {% if thread %} 11 |

Replying to {{ content }}

12 | {{ utils.display_thread(thread) }} 13 | {% else %} 14 |
15 | {% if request.args.get("question") == "1" %} 16 | NoteQuestion 17 | {% else %} 18 | NoteQuestion 19 | {% endif %} 20 |
21 | {% endif %} 22 |
23 | 24 | 25 | 30 | {% if reply %}{% endif %} 31 | 32 |

33 | 34 |

35 | {% for emoji in emojis %} 36 | {{ emoji | emojify | safe }} 37 | {% endfor %} 38 | {% for emoji in custom_emojis %} 39 | {{emoji.name}} 40 | {% endfor %} 41 |

42 | 43 | 44 | 45 |

46 | 47 |

48 |

49 | 50 |

51 | 52 |

53 | 54 | 55 | ask browser for location 56 |

57 |

58 | 59 |

60 | 61 | {% if request.args.get("question") == "1" %} 62 |
63 |

Open for:

74 | 75 |

79 | 80 | {% for i in range(4) %} 81 |

82 | {% endfor %} 83 | 84 |
85 | {% endif %} 86 | 87 | 88 |
89 | 90 | 91 |
92 | 93 | {% endblock %} 139 | -------------------------------------------------------------------------------- /core/outbox.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from datetime import datetime 3 | from functools import singledispatch 4 | from typing import Any 5 | from typing import Dict 6 | 7 | from little_boxes import activitypub as ap 8 | 9 | from core.activitypub import handle_replies 10 | from core.db import find_one_activity 11 | from core.db import update_many_activities 12 | from core.db import update_one_activity 13 | from core.meta import MetaKey 14 | from core.meta import by_object_id 15 | from core.meta import by_type 16 | from core.meta import inc 17 | from core.meta import upsert 18 | from core.tasks import Tasks 19 | 20 | _logger = logging.getLogger(__name__) 21 | 22 | _NewMeta = Dict[str, Any] 23 | 24 | 25 | @singledispatch 26 | def process_outbox(activity: ap.BaseActivity, new_meta: _NewMeta) -> None: 27 | _logger.warning(f"skipping {activity!r}") 28 | return None 29 | 30 | 31 | @process_outbox.register 32 | def _delete_process_outbox(delete: ap.Delete, new_meta: _NewMeta) -> None: 33 | _logger.info(f"process_outbox activity={delete!r}") 34 | obj_id = delete.get_object_id() 35 | 36 | # Flag everything referencing the deleted object as deleted (except the Delete activity itself) 37 | update_many_activities( 38 | {**by_object_id(obj_id), "remote_id": {"$ne": delete.id}}, 39 | upsert({MetaKey.DELETED: True, MetaKey.UNDO: True}), 40 | ) 41 | 42 | # If the deleted activity was in DB, decrease some threads-related counter 43 | data = find_one_activity( 44 | {**by_object_id(obj_id), **by_type(ap.ActivityType.CREATE)} 45 | ) 46 | _logger.info(f"found local copy of deleted activity: {data}") 47 | if data: 48 | obj = ap.parse_activity(data["activity"]).get_object() 49 | _logger.info(f"obj={obj!r}") 50 | in_reply_to = obj.get_in_reply_to() 51 | if in_reply_to: 52 | update_one_activity( 53 | {**by_type(ap.ActivityType.CREATE), **by_object_id(in_reply_to)}, 54 | {"$inc": {"meta.count_reply": -1, "meta.count_direct_reply": -1}}, 55 | ) 56 | 57 | 58 | @process_outbox.register 59 | def _update_process_outbox(update: ap.Update, new_meta: _NewMeta) -> None: 60 | _logger.info(f"process_outbox activity={update!r}") 61 | 62 | obj = update._data["object"] 63 | 64 | update_prefix = "activity.object." 65 | to_update: Dict[str, Any] = {"$set": dict(), "$unset": dict()} 66 | to_update["$set"][f"{update_prefix}updated"] = ( 67 | datetime.utcnow().replace(microsecond=0).isoformat() + "Z" 68 | ) 69 | for k, v in obj.items(): 70 | if k in ["id", "type"]: 71 | continue 72 | if v is None: 73 | to_update["$unset"][f"{update_prefix}{k}"] = "" 74 | else: 75 | to_update["$set"][f"{update_prefix}{k}"] = v 76 | 77 | if len(to_update["$unset"]) == 0: 78 | del to_update["$unset"] 79 | 80 | _logger.info(f"updating note from outbox {obj!r} {to_update}") 81 | update_one_activity({"activity.object.id": obj["id"]}, to_update) 82 | # FIXME(tsileo): should send an Update (but not a partial one, to all the note's recipients 83 | # (create a new Update with the result of the update, and send it without saving it?) 84 | 85 | 86 | @process_outbox.register 87 | def _create_process_outbox(create: ap.Create, new_meta: _NewMeta) -> None: 88 | _logger.info(f"process_outbox activity={create!r}") 89 | handle_replies(create) 90 | 91 | 92 | @process_outbox.register 93 | def _announce_process_outbox(announce: ap.Announce, new_meta: _NewMeta) -> None: 94 | _logger.info(f"process_outbox activity={announce!r}") 95 | 96 | obj = announce.get_object() 97 | if obj.has_type(ap.ActivityType.QUESTION): 98 | Tasks.fetch_remote_question(obj) 99 | 100 | Tasks.cache_object(announce.id) 101 | 102 | update_one_activity( 103 | {**by_object_id(obj.id), **by_type(ap.ActivityType.CREATE)}, 104 | upsert({MetaKey.BOOSTED: announce.id}), 105 | ) 106 | 107 | 108 | @process_outbox.register 109 | def _like_process_outbox(like: ap.Like, new_meta: _NewMeta) -> None: 110 | _logger.info(f"process_outbox activity={like!r}") 111 | 112 | obj = like.get_object() 113 | if obj.has_type(ap.ActivityType.QUESTION): 114 | Tasks.fetch_remote_question(obj) 115 | 116 | # Cache the object for display on the "Liked" public page 117 | Tasks.cache_object(like.id) 118 | 119 | update_one_activity( 120 | {**by_object_id(obj.id), **by_type(ap.ActivityType.CREATE)}, 121 | {**inc(MetaKey.COUNT_LIKE, 1), **upsert({MetaKey.LIKED: like.id})}, 122 | ) 123 | 124 | 125 | @process_outbox.register 126 | def _undo_process_outbox(undo: ap.Undo, new_meta: _NewMeta) -> None: 127 | _logger.info(f"process_outbox activity={undo!r}") 128 | obj = undo.get_object() 129 | update_one_activity({"remote_id": obj.id}, {"$set": {"meta.undo": True}}) 130 | 131 | # Undo Like 132 | if obj.has_type(ap.ActivityType.LIKE): 133 | liked = obj.get_object_id() 134 | update_one_activity( 135 | {**by_object_id(liked), **by_type(ap.ActivityType.CREATE)}, 136 | {**inc(MetaKey.COUNT_LIKE, -1), **upsert({MetaKey.LIKED: False})}, 137 | ) 138 | 139 | elif obj.has_type(ap.ActivityType.ANNOUNCE): 140 | announced = obj.get_object_id() 141 | update_one_activity( 142 | {**by_object_id(announced), **by_type(ap.ActivityType.CREATE)}, 143 | upsert({MetaKey.BOOSTED: False}), 144 | ) 145 | 146 | # Undo Follow (undo new following) 147 | elif obj.has_type(ap.ActivityType.FOLLOW): 148 | pass 149 | # do nothing 150 | -------------------------------------------------------------------------------- /config.py: -------------------------------------------------------------------------------- 1 | import mimetypes 2 | import os 3 | import subprocess 4 | from datetime import datetime 5 | from enum import Enum 6 | from pathlib import Path 7 | 8 | import yaml 9 | from bleach import linkify 10 | from itsdangerous import JSONWebSignatureSerializer 11 | from little_boxes import strtobool 12 | from little_boxes.activitypub import CTX_AS as AP_DEFAULT_CTX 13 | from pymongo import MongoClient 14 | 15 | import sass 16 | from utils.emojis import _load_emojis 17 | from utils.key import KEY_DIR 18 | from utils.key import get_key 19 | from utils.key import get_secret_key 20 | from utils.media import MediaCache 21 | 22 | ROOT_DIR = Path(__file__).parent.absolute() 23 | 24 | 25 | class ThemeStyle(Enum): 26 | LIGHT = "light" 27 | DARK = "dark" 28 | 29 | 30 | DEFAULT_THEME_STYLE = ThemeStyle.LIGHT.value 31 | 32 | DEFAULT_THEME_PRIMARY_COLOR = { 33 | ThemeStyle.LIGHT: "#1d781d", # Green 34 | ThemeStyle.DARK: "#33ff00", # Purple 35 | } 36 | 37 | 38 | VERSION = ( 39 | subprocess.check_output(["git", "describe", "--always"]).split()[0].decode("utf-8") 40 | ) 41 | VERSION_DATE = ( 42 | subprocess.check_output(["git", "show", VERSION]) 43 | .decode() 44 | .splitlines()[2] 45 | .split("Date:")[-1] 46 | .strip() 47 | ) 48 | 49 | DEBUG_MODE = strtobool(os.getenv("MICROBLOGPUB_DEBUG", "false")) 50 | 51 | HEADERS = [ 52 | "application/activity+json", 53 | "application/ld+json;profile=https://www.w3.org/ns/activitystreams", 54 | 'application/ld+json; profile="https://www.w3.org/ns/activitystreams"', 55 | "application/ld+json", 56 | ] 57 | 58 | 59 | with open(os.path.join(KEY_DIR, "me.yml")) as f: 60 | conf = yaml.safe_load(f) 61 | 62 | USERNAME = conf["username"] 63 | NAME = conf["name"] 64 | DOMAIN = conf["domain"] 65 | SCHEME = "https" if conf.get("https", True) else "http" 66 | BASE_URL = SCHEME + "://" + DOMAIN 67 | ID = BASE_URL 68 | SUMMARY = conf["summary"] 69 | ICON_URL = conf["icon_url"] 70 | FAVICON_URL = conf.get("favicon_url", "/static/favicon.png") 71 | PASS = conf["pass"] 72 | 73 | PROFILE_METADATA = conf.get("profile_metadata", {}) 74 | HIDE_FOLLOWING = conf.get("hide_following", True) 75 | 76 | ALIASES = conf.get('aliases', []) 77 | 78 | # Theme-related config 79 | theme_conf = conf.get("theme", {}) 80 | THEME_STYLE = ThemeStyle(theme_conf.get("style", DEFAULT_THEME_STYLE)) 81 | THEME_COLOR = theme_conf.get("color", DEFAULT_THEME_PRIMARY_COLOR[THEME_STYLE]) 82 | 83 | 84 | DEFAULT_CTX = [ 85 | AP_DEFAULT_CTX, 86 | f"{BASE_URL}/microblogpub-0.1.jsonld", 87 | {"@language": "und"}, 88 | ] 89 | 90 | SASS_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "sass") 91 | theme_css = f"$primary-color: {THEME_COLOR};\n" 92 | with open(os.path.join(SASS_DIR, f"{THEME_STYLE.value}.scss")) as f: 93 | theme_css += f.read() 94 | theme_css += "\n" 95 | with open(os.path.join(SASS_DIR, "base_theme.scss")) as f: 96 | raw_css = theme_css + f.read() 97 | CSS = sass.compile(string=raw_css, output_style="compressed") 98 | 99 | 100 | USER_AGENT = f"microblog.pub/{VERSION}; +{BASE_URL}" 101 | 102 | mongo_client = MongoClient( 103 | host=[os.getenv("MICROBLOGPUB_MONGODB_HOST", "localhost:27017")] 104 | ) 105 | 106 | DB_NAME = "{}_{}".format(USERNAME, DOMAIN.replace(".", "_")) 107 | DB = mongo_client[DB_NAME] 108 | GRIDFS = mongo_client[f"{DB_NAME}_gridfs"] 109 | MEDIA_CACHE = MediaCache(GRIDFS, USER_AGENT) 110 | 111 | 112 | def _drop_db(): 113 | if not DEBUG_MODE: 114 | return 115 | 116 | mongo_client.drop_database(DB_NAME) 117 | 118 | 119 | KEY = get_key(ID, ID + "#main-key", USERNAME, DOMAIN) 120 | 121 | 122 | JWT_SECRET = get_secret_key("jwt") 123 | JWT = JSONWebSignatureSerializer(JWT_SECRET) 124 | 125 | 126 | def _admin_jwt_token() -> str: 127 | return JWT.dumps( # type: ignore 128 | {"me": "ADMIN", "ts": datetime.now().timestamp()} 129 | ).decode( # type: ignore 130 | "utf-8" 131 | ) 132 | 133 | 134 | ADMIN_API_KEY = get_secret_key("admin_api_key", _admin_jwt_token) 135 | 136 | attachments = [] 137 | if PROFILE_METADATA: 138 | for key, value in PROFILE_METADATA.items(): 139 | attachments.append( 140 | {"type": "PropertyValue", "name": key, "value": linkify(value)} 141 | ) 142 | 143 | MANUALLY_APPROVES_FOLLOWERS = bool(conf.get("manually_approves_followers", False)) 144 | 145 | ME = { 146 | "@context": DEFAULT_CTX, 147 | "type": "Person", 148 | "id": ID, 149 | "following": ID + "/following", 150 | "followers": ID + "/followers", 151 | "featured": ID + "/featured", 152 | "inbox": ID + "/inbox", 153 | "outbox": ID + "/outbox", 154 | "preferredUsername": USERNAME, 155 | "name": NAME, 156 | "summary": SUMMARY, 157 | "endpoints": {}, 158 | "url": ID, 159 | "manuallyApprovesFollowers": MANUALLY_APPROVES_FOLLOWERS, 160 | "attachment": attachments, 161 | "icon": { 162 | "mediaType": mimetypes.guess_type(ICON_URL)[0], 163 | "type": "Image", 164 | "url": ICON_URL, 165 | }, 166 | "publicKey": KEY.to_dict(), 167 | "alsoKnownAs": ALIASES, 168 | } 169 | 170 | # Default emojis, space-separated, update `me.yml` to customize emojis 171 | EMOJIS = "😺 😸 😹 😻 😼 😽 🙀 😿 😾" 172 | if conf.get("emojis"): 173 | EMOJIS = conf["emojis"] 174 | 175 | # Emoji template for the FE 176 | EMOJI_TPL = '{raw}' 177 | if conf.get("emoji_tpl"): 178 | EMOJI_TPL = conf["emoji_tpl"] 179 | 180 | # Hosts blacklist 181 | BLACKLIST = conf.get("blacklist", []) 182 | 183 | # Outbound Webmentions support for public posts 184 | DISABLE_WEBMENTIONS = bool(conf.get("disable_webmentions", False)) 185 | 186 | # Whether replies should be displayed in the stream or not 187 | REPLIES_IN_STREAM = bool(conf.get("replies_in_stream", False)) 188 | 189 | # By default, we keep 14 of inbox data ; outbox is kept forever (along with bookmarked stuff, outbox replies, liked...) 190 | DAYS_TO_KEEP = int(conf.get("days_to_keep", 14)) 191 | 192 | # Load custom emojis (stored in static/emojis) 193 | _load_emojis(ROOT_DIR, BASE_URL) 194 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # microblog.pub 2 | 3 |

4 | microblog.pub 7 |

8 |

A self-hosted, single-user, ActivityPub powered microblog.

9 |

10 | Build Status 11 | #microblog.pub on Matrix 12 | License 13 | Code style: black 14 |

15 | 16 | **Still in early development/I do not recommend to run an instance yet.** 17 | 18 | 19 | 20 | ## Features 21 | 22 | - Implements a basic [ActivityPub](https://activitypub.rocks/) server (with federation) 23 | - S2S (Server to Server) and C2S (Client to Server) protocols 24 | - Compatible with [Mastodon](https://joinmastodon.org/) and others ([Pleroma](https://pleroma.social/), Misskey, Plume, PixelFed, Hubzilla...) 25 | - Exposes your outbox as a basic microblog 26 | - Support all content types from the Fediverse (`Note`, `Article`, `Page`, `Video`, `Image`, `Question`...) 27 | - Markdown support 28 | - Server-side code syntax highlighting 29 | - Comes with an admin UI with notifications and the stream of people you follow 30 | - Private "bookmark" support 31 | - List support 32 | - Allows you to attach files to your notes 33 | - Custom emojis support 34 | - Cares about your privacy 35 | - The image upload endpoint strips EXIF meta data before storing the file 36 | - Every attachment/media is cached (or proxied) by the server 37 | - No JavaScript, **that's it**. Even the admin UI is pure HTML/CSS 38 | - (well except for the Emoji picker within the admin, but it's only few line of hand-written JavaScript) 39 | - Easy to customize (the theme is written Sass) 40 | - mobile-friendly theme 41 | - with dark and light version 42 | - IndieWeb citizen 43 | - Microformats aware (exports `h-feed`, `h-entry`, `h-cards`, ...) 44 | - Export a feed in the HTML that is WebSub compatible 45 | - Partial [Micropub](https://www.w3.org/TR/micropub/) support ([implementation report](https://micropub.rocks/implementation-reports/servers/416/s0BDEXZiX805btoa47sz)) 46 | - Implements [IndieAuth](https://indieauth.spec.indieweb.org/) endpoints (authorization and token endpoint) 47 | - You can use your ActivityPub identity to login to other websites/app (with U2F support) 48 | - Send [Webmentions](https://www.w3.org/TR/webmention/) to linked website (only for public notes) 49 | - Exports RSS/Atom/[JSON](https://jsonfeed.org/) feeds 50 | - You stream/timeline is also available in an (authenticated) JSON feed 51 | - Comes with a tiny HTTP API to help posting new content and and read your inbox/notifications 52 | - Deployable with Docker (Docker compose for everything: dev, test and deployment) 53 | - Focused on testing 54 | - Tested against the [official ActivityPub test suite](https://test.activitypub.rocks/), see [the results](https://activitypub.rocks/implementation-report/) 55 | - [CI runs "federation" tests against two instances](https://d.a4.io/tsileo/microblog.pub) 56 | - Project is running 2 up-to-date instances ([here](https://microblog.pub) and [there](https://a4.io)) 57 | - Manually tested against other major platforms 58 | 59 | 60 | ## User Guide 61 | 62 | Remember that _microblog.pub_ is still in early development. 63 | 64 | The easiest and recommended way to run _microblog.pub_ in production is to use the provided docker-compose config. 65 | 66 | First install [Docker](https://docs.docker.com/install/) and [Docker Compose](https://docs.docker.com/compose/install/). 67 | Python is not needed on the host system. 68 | 69 | Note that all the generated data (config included) will be stored on the host (i.e. not only in Docker) in `config/` and `data/`. 70 | 71 | ### Installation 72 | 73 | ```shell 74 | $ git clone https://github.com/tsileo/microblog.pub 75 | $ cd microblog.pub 76 | $ make config 77 | ``` 78 | 79 | Once the initial configuration is done, you can still tweak the config by editing `config/me.yml` directly. 80 | 81 | 82 | ### Deployment 83 | 84 | To spawn the docker-compose project (running this command will also update _microblog.pub_ to latest and restart everything if it's already running): 85 | 86 | ```shell 87 | $ make run 88 | ``` 89 | 90 | By default, the server will listen on `localhost:5005` (http://localhost:5005 should work if you're running locally). 91 | 92 | For production, you need to setup a reverse proxy (nginx, caddy) to forward your domain to the local server 93 | (and check [certbot](https://certbot.eff.org/) for getting a free TLS certificate). 94 | 95 | ### Backup 96 | 97 | The easiest way to backup all of your data is to backup the `microblog.pub/` directory directly (that's what I do and I have been able to restore super easily). 98 | It should be safe to copy the directory while the Docker compose project is running. 99 | 100 | 101 | ## Development 102 | 103 | The project requires Python3.7+. 104 | 105 | The most convenient way to hack on _microblog.pub_ is to run the Python server on the host directly, and evetything else in Docker. 106 | 107 | ```shell 108 | # One-time setup (in a new virtual env) 109 | $ pip install -r requirements.txt 110 | # Start MongoDB and poussetaches 111 | $ make poussetaches 112 | $ env POUSSETACHES_AUTH_KEY="" docker-compose -f docker-compose-dev.yml up -d 113 | # Run the server locally 114 | $ FLASK_DEBUG=1 MICROBLOGPUB_DEBUG=1 FLASK_APP=app.py POUSSETACHES_AUTH_KEY="" flask run -p 5005 --with-threads 115 | ``` 116 | 117 | 118 | ## Contributions 119 | 120 | Contributions/PRs are welcome, please open an issue to start a discussion before your start any work. 121 | -------------------------------------------------------------------------------- /blueprints/indieauth.py: -------------------------------------------------------------------------------- 1 | import binascii 2 | import os 3 | from datetime import datetime 4 | from datetime import timedelta 5 | from urllib.parse import urlencode 6 | 7 | import flask 8 | import mf2py 9 | from flask import Response 10 | from flask import abort 11 | from flask import redirect 12 | from flask import render_template 13 | from flask import request 14 | from flask import session 15 | from flask import url_for 16 | from itsdangerous import BadSignature 17 | 18 | from config import DB 19 | from config import JWT 20 | from core.shared import _get_ip 21 | from core.shared import htmlify 22 | from core.shared import jsonify 23 | from core.shared import login_required 24 | 25 | blueprint = flask.Blueprint("indieauth", __name__) 26 | 27 | 28 | def build_auth_resp(payload): 29 | if request.headers.get("Accept") == "application/json": 30 | return jsonify(payload) 31 | return Response( 32 | status=200, 33 | headers={"Content-Type": "application/x-www-form-urlencoded"}, 34 | response=urlencode(payload), 35 | ) 36 | 37 | 38 | def _get_prop(props, name, default=None): 39 | if name in props: 40 | items = props.get(name) 41 | if isinstance(items, list): 42 | return items[0] 43 | return items 44 | return default 45 | 46 | 47 | def get_client_id_data(url): 48 | # FIXME(tsileo): ensure not localhost via `little_boxes.urlutils.is_url_valid` 49 | data = mf2py.parse(url=url) 50 | for item in data["items"]: 51 | if "h-x-app" in item["type"] or "h-app" in item["type"]: 52 | props = item.get("properties", {}) 53 | print(props) 54 | return dict( 55 | logo=_get_prop(props, "logo"), 56 | name=_get_prop(props, "name"), 57 | url=_get_prop(props, "url"), 58 | ) 59 | return dict(logo=None, name=url, url=url) 60 | 61 | 62 | @blueprint.route("/indieauth/flow", methods=["POST"]) 63 | @login_required 64 | def indieauth_flow(): 65 | auth = dict( 66 | scope=" ".join(request.form.getlist("scopes")), 67 | me=request.form.get("me"), 68 | client_id=request.form.get("client_id"), 69 | state=request.form.get("state"), 70 | redirect_uri=request.form.get("redirect_uri"), 71 | response_type=request.form.get("response_type"), 72 | ts=datetime.now().timestamp(), 73 | code=binascii.hexlify(os.urandom(8)).decode("utf-8"), 74 | verified=False, 75 | ) 76 | 77 | # XXX(tsileo): a whitelist for me values? 78 | 79 | # TODO(tsileo): redirect_uri checks 80 | if not auth["redirect_uri"]: 81 | abort(400) 82 | 83 | DB.indieauth.insert_one(auth) 84 | 85 | # FIXME(tsileo): fetch client ID and validate redirect_uri 86 | red = f'{auth["redirect_uri"]}?code={auth["code"]}&state={auth["state"]}&me={auth["me"]}' 87 | return redirect(red) 88 | 89 | 90 | @blueprint.route("/indieauth", methods=["GET", "POST"]) 91 | def indieauth_endpoint(): 92 | if request.method == "GET": 93 | if not session.get("logged_in"): 94 | return redirect(url_for("admin.admin_login", redirect=request.url)) 95 | 96 | me = request.args.get("me") 97 | # FIXME(tsileo): ensure me == ID 98 | client_id = request.args.get("client_id") 99 | redirect_uri = request.args.get("redirect_uri") 100 | state = request.args.get("state", "") 101 | response_type = request.args.get("response_type", "id") 102 | scope = request.args.get("scope", "").split() 103 | 104 | print("STATE", state) 105 | return htmlify( 106 | render_template( 107 | "indieauth_flow.html", 108 | client=get_client_id_data(client_id), 109 | scopes=scope, 110 | redirect_uri=redirect_uri, 111 | state=state, 112 | response_type=response_type, 113 | client_id=client_id, 114 | me=me, 115 | ) 116 | ) 117 | 118 | # Auth verification via POST 119 | code = request.form.get("code") 120 | redirect_uri = request.form.get("redirect_uri") 121 | client_id = request.form.get("client_id") 122 | 123 | ip, geoip = _get_ip() 124 | 125 | auth = DB.indieauth.find_one_and_update( 126 | { 127 | "code": code, 128 | "redirect_uri": redirect_uri, 129 | "client_id": client_id, 130 | "verified": False, 131 | }, 132 | { 133 | "$set": { 134 | "verified": True, 135 | "verified_by": "id", 136 | "verified_at": datetime.now().timestamp(), 137 | "ip_address": ip, 138 | "geoip": geoip, 139 | } 140 | }, 141 | ) 142 | print(auth) 143 | print(code, redirect_uri, client_id) 144 | 145 | # Ensure the code is recent 146 | if (datetime.now() - datetime.fromtimestamp(auth["ts"])) > timedelta(minutes=5): 147 | abort(400) 148 | 149 | if not auth: 150 | abort(403) 151 | return 152 | 153 | session["logged_in"] = True 154 | me = auth["me"] 155 | state = auth["state"] 156 | scope = auth["scope"] 157 | print("STATE", state) 158 | return build_auth_resp({"me": me, "state": state, "scope": scope}) 159 | 160 | 161 | @blueprint.route("/token", methods=["GET", "POST"]) 162 | def token_endpoint(): 163 | # Generate a new token with the returned access code 164 | if request.method == "POST": 165 | code = request.form.get("code") 166 | me = request.form.get("me") 167 | redirect_uri = request.form.get("redirect_uri") 168 | client_id = request.form.get("client_id") 169 | 170 | now = datetime.now() 171 | ip, geoip = _get_ip() 172 | 173 | # This query ensure code, client_id, redirect_uri and me are matching with the code request 174 | auth = DB.indieauth.find_one_and_update( 175 | { 176 | "code": code, 177 | "me": me, 178 | "redirect_uri": redirect_uri, 179 | "client_id": client_id, 180 | "verified": False, 181 | }, 182 | { 183 | "$set": { 184 | "verified": True, 185 | "verified_by": "code", 186 | "verified_at": now.timestamp(), 187 | "ip_address": ip, 188 | "geoip": geoip, 189 | } 190 | }, 191 | ) 192 | 193 | if not auth: 194 | abort(403) 195 | 196 | scope = auth["scope"].split() 197 | 198 | # Ensure there's at least one scope 199 | if not len(scope): 200 | abort(400) 201 | 202 | # Ensure the code is recent 203 | if (now - datetime.fromtimestamp(auth["ts"])) > timedelta(minutes=5): 204 | abort(400) 205 | 206 | payload = dict(me=me, client_id=client_id, scope=scope, ts=now.timestamp()) 207 | token = JWT.dumps(payload).decode("utf-8") 208 | DB.indieauth.update_one( 209 | {"_id": auth["_id"]}, 210 | { 211 | "$set": { 212 | "token": token, 213 | "token_expires": (now + timedelta(minutes=30)).timestamp(), 214 | } 215 | }, 216 | ) 217 | 218 | return build_auth_resp( 219 | {"me": me, "scope": auth["scope"], "access_token": token} 220 | ) 221 | 222 | # Token verification 223 | token = request.headers.get("Authorization").replace("Bearer ", "") 224 | try: 225 | payload = JWT.loads(token) 226 | except BadSignature: 227 | abort(403) 228 | 229 | # Check the token expritation (valid for 3 hours) 230 | if (datetime.now() - datetime.fromtimestamp(payload["ts"])) > timedelta( 231 | minutes=180 232 | ): 233 | abort(401) 234 | 235 | return build_auth_resp( 236 | { 237 | "me": payload["me"], 238 | "scope": " ".join(payload["scope"]), 239 | "client_id": payload["client_id"], 240 | } 241 | ) 242 | -------------------------------------------------------------------------------- /core/notifications.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from datetime import datetime 3 | from datetime import timedelta 4 | from datetime import timezone 5 | from functools import singledispatch 6 | from typing import Any 7 | from typing import Dict 8 | 9 | from little_boxes import activitypub as ap 10 | 11 | from config import DB 12 | from config import REPLIES_IN_STREAM 13 | from core.activitypub import is_from_outbox 14 | from core.activitypub import is_local_url 15 | from core.db import find_one_activity 16 | from core.meta import MetaKey 17 | from core.meta import _meta 18 | from core.meta import by_actor 19 | from core.meta import by_object_id 20 | from core.meta import by_type 21 | from core.meta import flag 22 | from core.meta import in_inbox 23 | from core.meta import not_undo 24 | from core.meta import published_after 25 | from core.tasks import Tasks 26 | 27 | _logger = logging.getLogger(__name__) 28 | 29 | _NewMeta = Dict[str, Any] 30 | 31 | 32 | def _flag_as_notification(activity: ap.BaseActivity, new_meta: _NewMeta) -> None: 33 | new_meta.update( 34 | {_meta(MetaKey.NOTIFICATION): True, _meta(MetaKey.NOTIFICATION_UNREAD): True} 35 | ) 36 | return None 37 | 38 | 39 | def _set_flag(meta: _NewMeta, meta_key: MetaKey, value: Any = True) -> None: 40 | meta.update({_meta(meta_key): value}) 41 | return None 42 | 43 | 44 | @singledispatch 45 | def set_inbox_flags(activity: ap.BaseActivity, new_meta: _NewMeta) -> None: 46 | _logger.warning(f"skipping {activity!r}") 47 | return None 48 | 49 | 50 | @set_inbox_flags.register 51 | def _accept_set_inbox_flags(activity: ap.Accept, new_meta: _NewMeta) -> None: 52 | """Handle notifications for "accepted" following requests.""" 53 | _logger.info(f"set_inbox_flags activity={activity!r}") 54 | # Check if this actor already follow us back 55 | follows_back = False 56 | follow_query = { 57 | **in_inbox(), 58 | **by_type(ap.ActivityType.FOLLOW), 59 | **by_actor(activity.get_actor()), 60 | **not_undo(), 61 | } 62 | raw_follow = DB.activities.find_one(follow_query) 63 | if raw_follow: 64 | follows_back = True 65 | 66 | DB.activities.update_many( 67 | follow_query, {"$set": {_meta(MetaKey.NOTIFICATION_FOLLOWS_BACK): True}} 68 | ) 69 | 70 | # This Accept will be a "You started following $actor" notification 71 | _flag_as_notification(activity, new_meta) 72 | _set_flag(new_meta, MetaKey.GC_KEEP) 73 | _set_flag(new_meta, MetaKey.NOTIFICATION_FOLLOWS_BACK, follows_back) 74 | return None 75 | 76 | 77 | @set_inbox_flags.register 78 | def _reject_set_inbox_flags(activity: ap.Reject, new_meta: _NewMeta) -> None: 79 | """Handle notifications for "rejected" following requests.""" 80 | _logger.info(f"set_inbox_flags activity={activity!r}") 81 | # This Accept will be a "You started following $actor" notification 82 | _flag_as_notification(activity, new_meta) 83 | _set_flag(new_meta, MetaKey.GC_KEEP) 84 | return None 85 | 86 | 87 | @set_inbox_flags.register 88 | def _follow_set_inbox_flags(activity: ap.Follow, new_meta: _NewMeta) -> None: 89 | """Handle notification for new followers.""" 90 | _logger.info(f"set_inbox_flags activity={activity!r}") 91 | # Check if we're already following this actor 92 | follows_back = False 93 | accept_query = { 94 | **in_inbox(), 95 | **by_type(ap.ActivityType.ACCEPT), 96 | **by_actor(activity.get_actor()), 97 | **not_undo(), 98 | } 99 | raw_accept = DB.activities.find_one(accept_query) 100 | if raw_accept: 101 | follows_back = True 102 | 103 | DB.activities.update_many( 104 | accept_query, {"$set": {_meta(MetaKey.NOTIFICATION_FOLLOWS_BACK): True}} 105 | ) 106 | 107 | # This Follow will be a "$actor started following you" notification 108 | _flag_as_notification(activity, new_meta) 109 | _set_flag(new_meta, MetaKey.GC_KEEP) 110 | _set_flag(new_meta, MetaKey.NOTIFICATION_FOLLOWS_BACK, follows_back) 111 | return None 112 | 113 | 114 | @set_inbox_flags.register 115 | def _like_set_inbox_flags(activity: ap.Like, new_meta: _NewMeta) -> None: 116 | _logger.info(f"set_inbox_flags activity={activity!r}") 117 | # Is it a Like of local acitivty/from the outbox 118 | if is_from_outbox(activity.get_object()): 119 | # Flag it as a notification 120 | _flag_as_notification(activity, new_meta) 121 | 122 | # Cache the object (for display on the notifcation page) 123 | Tasks.cache_object(activity.id) 124 | 125 | # Also set the "keep mark" for the GC (as we want to keep it forever) 126 | _set_flag(new_meta, MetaKey.GC_KEEP) 127 | 128 | return None 129 | 130 | 131 | @set_inbox_flags.register 132 | def _announce_set_inbox_flags(activity: ap.Announce, new_meta: _NewMeta) -> None: 133 | _logger.info(f"set_inbox_flags activity={activity!r}") 134 | obj = activity.get_object() 135 | # Is it a Annnounce/boost of local acitivty/from the outbox 136 | if is_from_outbox(obj): 137 | # Flag it as a notification 138 | _flag_as_notification(activity, new_meta) 139 | 140 | # Also set the "keep mark" for the GC (as we want to keep it forever) 141 | _set_flag(new_meta, MetaKey.GC_KEEP) 142 | 143 | # Dedup boosts (it's annoying to see the same note multipe times on the same page) 144 | if not find_one_activity( 145 | { 146 | **in_inbox(), 147 | **by_type([ap.ActivityType.CREATE, ap.ActivityType.ANNOUNCE]), 148 | **by_object_id(obj.id), 149 | **flag(MetaKey.STREAM, True), 150 | **published_after(datetime.now(timezone.utc) - timedelta(hours=12)), 151 | } 152 | ): 153 | # Display it in the stream only it not there already (only looking at the last 12 hours) 154 | _set_flag(new_meta, MetaKey.STREAM) 155 | 156 | return None 157 | 158 | 159 | @set_inbox_flags.register 160 | def _undo_set_inbox_flags(activity: ap.Undo, new_meta: _NewMeta) -> None: 161 | _logger.info(f"set_inbox_flags activity={activity!r}") 162 | obj = activity.get_object() 163 | 164 | if obj.has_type(ap.ActivityType.FOLLOW): 165 | # Flag it as a noticiation (for the "$actor unfollowed you" 166 | _flag_as_notification(activity, new_meta) 167 | 168 | # Also set the "keep mark" for the GC (as we want to keep it forever) 169 | _set_flag(new_meta, MetaKey.GC_KEEP) 170 | 171 | return None 172 | 173 | 174 | @set_inbox_flags.register 175 | def _create_set_inbox_flags(activity: ap.Create, new_meta: _NewMeta) -> None: 176 | _logger.info(f"set_inbox_flags activity={activity!r}") 177 | obj = activity.get_object() 178 | 179 | _set_flag(new_meta, MetaKey.POLL_ANSWER, False) 180 | 181 | in_reply_to = obj.get_in_reply_to() 182 | 183 | # Check if it's a local reply 184 | if in_reply_to and is_local_url(in_reply_to): 185 | # TODO(tsileo): fetch the reply to check for poll answers more precisely 186 | # reply_of = ap.fetch_remote_activity(in_reply_to) 187 | 188 | # Ensure it's not a poll answer 189 | if obj.name and not obj.content: 190 | _set_flag(new_meta, MetaKey.POLL_ANSWER) 191 | return None 192 | 193 | # Flag it as a notification 194 | _flag_as_notification(activity, new_meta) 195 | 196 | # Also set the "keep mark" for the GC (as we want to keep it forever) 197 | _set_flag(new_meta, MetaKey.GC_KEEP) 198 | 199 | return None 200 | 201 | # Check for mention 202 | for mention in obj.get_mentions(): 203 | if mention.href and is_local_url(mention.href): 204 | # Flag it as a notification 205 | _flag_as_notification(activity, new_meta) 206 | 207 | # Also set the "keep mark" for the GC (as we want to keep it forever) 208 | _set_flag(new_meta, MetaKey.GC_KEEP) 209 | 210 | if not in_reply_to or ( 211 | REPLIES_IN_STREAM and obj.get_actor().id in ap.get_backend().following() 212 | ): 213 | # A good candidate for displaying in the stream 214 | _set_flag(new_meta, MetaKey.STREAM) 215 | 216 | return None 217 | -------------------------------------------------------------------------------- /core/shared.py: -------------------------------------------------------------------------------- 1 | import gzip 2 | import json 3 | import os 4 | from functools import lru_cache 5 | from functools import wraps 6 | from typing import Any 7 | 8 | import flask 9 | from bson.objectid import ObjectId 10 | from flask import Response 11 | from flask import current_app as app 12 | from flask import redirect 13 | from flask import request 14 | from flask import session 15 | from flask import url_for 16 | from flask_wtf.csrf import CSRFProtect 17 | from little_boxes import activitypub as ap 18 | from poussetaches import PousseTaches 19 | 20 | import config 21 | from config import DB 22 | from config import ME 23 | from core import activitypub 24 | from core.db import find_activities 25 | from core.meta import MetaKey 26 | from core.meta import by_object_id 27 | from core.meta import by_type 28 | from core.meta import flag 29 | from core.meta import not_deleted 30 | 31 | # _Response = Union[flask.Response, werkzeug.wrappers.Response, str, Any] 32 | _Response = Any 33 | 34 | p = PousseTaches( 35 | os.getenv("MICROBLOGPUB_POUSSETACHES_HOST", "http://localhost:7991"), 36 | os.getenv("MICROBLOGPUB_INTERNAL_HOST", "http://localhost:5000"), 37 | ) 38 | 39 | 40 | csrf = CSRFProtect() 41 | 42 | 43 | back = activitypub.MicroblogPubBackend() 44 | ap.use_backend(back) 45 | 46 | MY_PERSON = ap.Person(**ME) 47 | 48 | 49 | @lru_cache(512) 50 | def build_resp(resp): 51 | """Encode the response to gzip if supported by the client.""" 52 | headers = {"Cache-Control": "max-age=0, private, must-revalidate"} 53 | accept_encoding = request.headers.get("Accept-Encoding", "") 54 | if "gzip" in accept_encoding.lower(): 55 | return ( 56 | gzip.compress(resp.encode(), compresslevel=6), 57 | {**headers, "Vary": "Accept-Encoding", "Content-Encoding": "gzip"}, 58 | ) 59 | 60 | return resp, headers 61 | 62 | 63 | def jsonify(data, content_type="application/json"): 64 | resp, headers = build_resp(json.dumps(data)) 65 | return Response(headers={**headers, "Content-Type": content_type}, response=resp) 66 | 67 | 68 | def htmlify(data): 69 | resp, headers = build_resp(data) 70 | return Response( 71 | response=resp, headers={**headers, "Content-Type": "text/html; charset=utf-8"} 72 | ) 73 | 74 | 75 | def activitypubify(**data): 76 | if "@context" not in data: 77 | data["@context"] = config.DEFAULT_CTX 78 | resp, headers = build_resp(json.dumps(data)) 79 | return Response( 80 | response=resp, headers={**headers, "Content-Type": "application/activity+json"} 81 | ) 82 | 83 | 84 | def is_api_request(): 85 | h = request.headers.get("Accept") 86 | if h is None: 87 | return False 88 | h = h.split(",")[0] 89 | if h in config.HEADERS or h == "application/json": 90 | return True 91 | return False 92 | 93 | 94 | def add_response_headers(headers={}): 95 | """This decorator adds the headers passed in to the response""" 96 | 97 | def decorator(f): 98 | @wraps(f) 99 | def decorated_function(*args, **kwargs): 100 | resp = flask.make_response(f(*args, **kwargs)) 101 | h = resp.headers 102 | for header, value in headers.items(): 103 | h[header] = value 104 | return resp 105 | 106 | return decorated_function 107 | 108 | return decorator 109 | 110 | 111 | def noindex(f): 112 | """This decorator passes X-Robots-Tag: noindex, nofollow""" 113 | return add_response_headers({"X-Robots-Tag": "noindex, nofollow"})(f) 114 | 115 | 116 | def login_required(f): 117 | @wraps(f) 118 | def decorated_function(*args, **kwargs): 119 | if not session.get("logged_in"): 120 | return redirect(url_for("admin.admin_login", next=request.url)) 121 | return f(*args, **kwargs) 122 | 123 | return decorated_function 124 | 125 | 126 | def _get_ip(): 127 | """Guess the IP address from the request. Only used for security purpose (failed logins or bad payload). 128 | 129 | Geoip will be returned if the "broxy" headers are set (it does Geoip 130 | using an offline database and append these special headers). 131 | """ 132 | ip = request.headers.get("X-Forwarded-For", request.remote_addr) 133 | geoip = None 134 | if request.headers.get("Broxy-Geoip-Country"): 135 | geoip = ( 136 | request.headers.get("Broxy-Geoip-Country") 137 | + "/" 138 | + request.headers.get("Broxy-Geoip-Region") 139 | ) 140 | return ip, geoip 141 | 142 | 143 | def _build_thread(data, include_children=True, query=None): # noqa: C901 144 | if query is None: 145 | query = {} 146 | data["_requested"] = True 147 | app.logger.info(f"_build_thread({data!r})") 148 | root_id = ( 149 | data["meta"].get(MetaKey.THREAD_ROOT_PARENT.value) 150 | or data["meta"].get(MetaKey.OBJECT_ID.value) 151 | or data["remote_id"] 152 | ) 153 | 154 | replies = [data] 155 | for dat in find_activities( 156 | { 157 | **by_object_id(root_id), 158 | **not_deleted(), 159 | **by_type(ap.ActivityType.CREATE), 160 | **query, 161 | } 162 | ): 163 | replies.append(dat) 164 | 165 | for dat in find_activities( 166 | { 167 | **flag(MetaKey.THREAD_ROOT_PARENT, root_id), 168 | **not_deleted(), 169 | **by_type(ap.ActivityType.CREATE), 170 | **query, 171 | } 172 | ): 173 | replies.append(dat) 174 | 175 | for dat in DB.replies.find( 176 | {**flag(MetaKey.THREAD_ROOT_PARENT, root_id), **not_deleted(), **query} 177 | ): 178 | # Make a Note/Question/... looks like a Create 179 | dat["meta"].update( 180 | {MetaKey.OBJECT_VISIBILITY.value: dat["meta"][MetaKey.VISIBILITY.value]} 181 | ) 182 | dat = { 183 | "activity": {"object": dat["activity"]}, 184 | "meta": dat["meta"], 185 | "_id": dat["_id"], 186 | } 187 | replies.append(dat) 188 | 189 | replies = sorted(replies, key=lambda d: d["meta"]["published"]) 190 | 191 | # Index all the IDs in order to build a tree 192 | idx = {} 193 | replies2 = [] 194 | for rep in replies: 195 | rep_id = rep["activity"]["object"]["id"] 196 | if rep_id in idx: 197 | continue 198 | idx[rep_id] = rep.copy() 199 | idx[rep_id]["_nodes"] = [] 200 | replies2.append(rep) 201 | 202 | # Build the tree 203 | for rep in replies2: 204 | rep_id = rep["activity"]["object"]["id"] 205 | if rep_id == root_id: 206 | continue 207 | reply_of = ap._get_id(rep["activity"]["object"].get("inReplyTo")) 208 | try: 209 | idx[reply_of]["_nodes"].append(rep) 210 | except KeyError: 211 | app.logger.info(f"{reply_of} is not there! skipping {rep}") 212 | 213 | # Flatten the tree 214 | thread = [] 215 | 216 | def _flatten(node, level=0): 217 | node["_level"] = level 218 | thread.append(node) 219 | 220 | for snode in sorted( 221 | idx[node["activity"]["object"]["id"]]["_nodes"], 222 | key=lambda d: d["activity"]["object"]["published"], 223 | ): 224 | _flatten(snode, level=level + 1) 225 | 226 | try: 227 | _flatten(idx[root_id]) 228 | except KeyError: 229 | app.logger.info(f"{root_id} is not there! skipping") 230 | 231 | return thread 232 | 233 | 234 | def paginated_query(db, q, limit=25, sort_key="_id"): 235 | older_than = newer_than = None 236 | query_sort = -1 237 | first_page = not request.args.get("older_than") and not request.args.get( 238 | "newer_than" 239 | ) 240 | 241 | query_older_than = request.args.get("older_than") 242 | query_newer_than = request.args.get("newer_than") 243 | 244 | if query_older_than: 245 | q["_id"] = {"$lt": ObjectId(query_older_than)} 246 | elif query_newer_than: 247 | q["_id"] = {"$gt": ObjectId(query_newer_than)} 248 | query_sort = 1 249 | 250 | outbox_data = list(db.find(q, limit=limit + 1).sort(sort_key, query_sort)) 251 | outbox_len = len(outbox_data) 252 | outbox_data = sorted( 253 | outbox_data[:limit], key=lambda x: str(x[sort_key]), reverse=True 254 | ) 255 | 256 | if query_older_than: 257 | newer_than = str(outbox_data[0]["_id"]) 258 | if outbox_len == limit + 1: 259 | older_than = str(outbox_data[-1]["_id"]) 260 | elif query_newer_than: 261 | older_than = str(outbox_data[-1]["_id"]) 262 | if outbox_len == limit + 1: 263 | newer_than = str(outbox_data[0]["_id"]) 264 | elif first_page and outbox_len == limit + 1: 265 | older_than = str(outbox_data[-1]["_id"]) 266 | 267 | return outbox_data, older_than, newer_than 268 | -------------------------------------------------------------------------------- /utils/media.py: -------------------------------------------------------------------------------- 1 | import base64 2 | import mimetypes 3 | from enum import Enum 4 | from enum import unique 5 | from functools import lru_cache 6 | from gzip import GzipFile 7 | from io import BytesIO 8 | from typing import Any 9 | from typing import Dict 10 | from typing import Optional 11 | from typing import Tuple 12 | 13 | import gridfs 14 | import piexif 15 | import requests 16 | from little_boxes import activitypub as ap 17 | from PIL import Image 18 | 19 | 20 | @lru_cache(2048) 21 | def _is_img(filename): 22 | mimetype, _ = mimetypes.guess_type(filename.lower()) 23 | if mimetype and mimetype.split("/")[0] in ["image"]: 24 | return True 25 | return False 26 | 27 | 28 | @lru_cache(2048) 29 | def is_video(filename): 30 | mimetype, _ = mimetypes.guess_type(filename.lower()) 31 | if mimetype and mimetype.split("/")[0] in ["video"]: 32 | return True 33 | return False 34 | 35 | 36 | def _load(url: str, user_agent: str) -> Tuple[BytesIO, Optional[str]]: 37 | """Initializes a `PIL.Image` from the URL.""" 38 | out = BytesIO() 39 | with requests.get(url, stream=True, headers={"User-Agent": user_agent}) as resp: 40 | resp.raise_for_status() 41 | 42 | resp.raw.decode_content = True 43 | while 1: 44 | buf = resp.raw.read() 45 | if not buf: 46 | break 47 | out.write(buf) 48 | out.seek(0) 49 | return out, resp.headers.get("content-type") 50 | 51 | 52 | def load(url: str, user_agent: str) -> Image: 53 | """Initializes a `PIL.Image` from the URL.""" 54 | out, _ = _load(url, user_agent) 55 | return Image.open(out) 56 | 57 | 58 | def to_data_uri(img: Image) -> str: 59 | out = BytesIO() 60 | img.save(out, format=img.format) 61 | out.seek(0) 62 | data = base64.b64encode(out.read()).decode("utf-8") 63 | return f"data:{img.get_format_mimetype()};base64,{data}" 64 | 65 | 66 | @unique 67 | class Kind(Enum): 68 | ATTACHMENT = "attachment" 69 | ACTOR_ICON = "actor_icon" 70 | UPLOAD = "upload" 71 | OG_IMAGE = "og" 72 | EMOJI = "emoji" 73 | 74 | 75 | class MediaCache(object): 76 | def __init__(self, gridfs_db: str, user_agent: str) -> None: 77 | self.fs = gridfs.GridFS(gridfs_db) 78 | self.user_agent = user_agent 79 | 80 | def cache_og_image(self, url: str, remote_id: str) -> None: 81 | if self.fs.find_one({"url": url, "kind": Kind.OG_IMAGE.value}): 82 | return 83 | i = load(url, self.user_agent) 84 | # Save the original attachment (gzipped) 85 | i.thumbnail((100, 100)) 86 | with BytesIO() as buf: 87 | with GzipFile(mode="wb", fileobj=buf) as f1: 88 | i.save(f1, format=i.format) 89 | buf.seek(0) 90 | self.fs.put( 91 | buf, 92 | url=url, 93 | size=100, 94 | content_type=i.get_format_mimetype(), 95 | kind=Kind.OG_IMAGE.value, 96 | remote_id=remote_id, 97 | ) 98 | 99 | def cache_attachment(self, attachment: Dict[str, Any], remote_id: str) -> None: 100 | url = attachment["url"] 101 | 102 | # Ensure it's not already there 103 | if self.fs.find_one( 104 | {"url": url, "kind": Kind.ATTACHMENT.value, "remote_id": remote_id} 105 | ): 106 | return 107 | 108 | # If it's an image, make some thumbnails 109 | if ( 110 | _is_img(url) 111 | or attachment.get("mediaType", "").startswith("image/") 112 | or ap._has_type(attachment.get("type"), ap.ActivityType.IMAGE) 113 | ): 114 | try: 115 | i = load(url, self.user_agent) 116 | # Save the original attachment (gzipped) 117 | with BytesIO() as buf: 118 | f1 = GzipFile(mode="wb", fileobj=buf) 119 | i.save(f1, format=i.format) 120 | f1.close() 121 | buf.seek(0) 122 | self.fs.put( 123 | buf, 124 | url=url, 125 | size=None, 126 | content_type=i.get_format_mimetype(), 127 | kind=Kind.ATTACHMENT.value, 128 | remote_id=remote_id, 129 | ) 130 | # Save a thumbnail (gzipped) 131 | i.thumbnail((720, 720)) 132 | with BytesIO() as buf: 133 | with GzipFile(mode="wb", fileobj=buf) as f1: 134 | i.save(f1, format=i.format) 135 | buf.seek(0) 136 | self.fs.put( 137 | buf, 138 | url=url, 139 | size=720, 140 | content_type=i.get_format_mimetype(), 141 | kind=Kind.ATTACHMENT.value, 142 | remote_id=remote_id, 143 | ) 144 | return 145 | except Exception: 146 | # FIXME(tsileo): logging 147 | pass 148 | 149 | # The attachment is not an image, download and save it anyway 150 | with requests.get( 151 | url, stream=True, headers={"User-Agent": self.user_agent} 152 | ) as resp: 153 | resp.raise_for_status() 154 | with BytesIO() as buf: 155 | with GzipFile(mode="wb", fileobj=buf) as f1: 156 | for chunk in resp.iter_content(chunk_size=2 << 20): 157 | if chunk: 158 | print(len(chunk)) 159 | f1.write(chunk) 160 | buf.seek(0) 161 | self.fs.put( 162 | buf, 163 | url=url, 164 | size=None, 165 | content_type=mimetypes.guess_type(url)[0], 166 | kind=Kind.ATTACHMENT.value, 167 | remote_id=remote_id, 168 | ) 169 | 170 | def is_actor_icon_cached(self, url: str) -> bool: 171 | return bool(self.fs.find_one({"url": url, "kind": Kind.ACTOR_ICON.value})) 172 | 173 | def cache_actor_icon(self, url: str) -> None: 174 | if self.is_actor_icon_cached(url): 175 | return 176 | i = load(url, self.user_agent) 177 | for size in [50, 80]: 178 | t1 = i.copy() 179 | t1.thumbnail((size, size)) 180 | with BytesIO() as buf: 181 | with GzipFile(mode="wb", fileobj=buf) as f1: 182 | t1.save(f1, format=i.format) 183 | buf.seek(0) 184 | self.fs.put( 185 | buf, 186 | url=url, 187 | size=size, 188 | content_type=i.get_format_mimetype(), 189 | kind=Kind.ACTOR_ICON.value, 190 | ) 191 | 192 | def is_emoji_cached(self, url: str) -> bool: 193 | return bool(self.fs.find_one({"url": url, "kind": Kind.EMOJI.value})) 194 | 195 | def cache_emoji(self, url: str, iri: str) -> None: 196 | if self.is_emoji_cached(url): 197 | return 198 | i = load(url, self.user_agent) 199 | for size in [25]: 200 | t1 = i.copy() 201 | t1.thumbnail((size, size)) 202 | with BytesIO() as buf: 203 | with GzipFile(mode="wb", fileobj=buf) as f1: 204 | t1.save(f1, format=i.format) 205 | buf.seek(0) 206 | self.fs.put( 207 | buf, 208 | url=url, 209 | size=size, 210 | remote_id=iri, 211 | content_type=i.get_format_mimetype(), 212 | kind=Kind.EMOJI.value, 213 | ) 214 | 215 | def save_upload(self, obuf: BytesIO, filename: str) -> str: 216 | # Remove EXIF metadata 217 | if filename.lower().endswith(".jpg") or filename.lower().endswith(".jpeg"): 218 | obuf.seek(0) 219 | with BytesIO() as buf2: 220 | piexif.remove(obuf.getvalue(), buf2) 221 | obuf.truncate(0) 222 | obuf.write(buf2.getvalue()) 223 | 224 | obuf.seek(0) 225 | mtype = mimetypes.guess_type(filename)[0] 226 | with BytesIO() as gbuf: 227 | with GzipFile(mode="wb", fileobj=gbuf) as gzipfile: 228 | gzipfile.write(obuf.getvalue()) 229 | 230 | gbuf.seek(0) 231 | oid = self.fs.put( 232 | gbuf, 233 | content_type=mtype, 234 | upload_filename=filename, 235 | kind=Kind.UPLOAD.value, 236 | ) 237 | return str(oid) 238 | 239 | def get_actor_icon(self, url: str, size: int) -> Any: 240 | return self.get_file(url, size, Kind.ACTOR_ICON) 241 | 242 | def get_attachment(self, url: str, size: int) -> Any: 243 | return self.get_file(url, size, Kind.ATTACHMENT) 244 | 245 | def get_file(self, url: str, size: int, kind: Kind) -> Any: 246 | return self.fs.find_one({"url": url, "size": size, "kind": kind.value}) 247 | -------------------------------------------------------------------------------- /sass/base_theme.scss: -------------------------------------------------------------------------------- 1 | .note-container p:first-child { 2 | margin-top: 0; 3 | } 4 | html, body { 5 | height: 100%; 6 | } 7 | 8 | @media only screen and (max-width: 480px) { 9 | #menu-item-following { 10 | display: none; 11 | } 12 | } 13 | body { 14 | background-color: $background-color; 15 | color: $color; 16 | display: flex; 17 | flex-direction: column; 18 | } 19 | .base-container { 20 | flex: 1 0 auto; 21 | } 22 | .footer { 23 | flex-shrink: 0; 24 | } 25 | a, h1, h2, h3, h4, h5, h6 { 26 | color: $color-title-link; 27 | } 28 | a { 29 | text-decoration: none; 30 | } 31 | a:hover { 32 | text-decoration: underline; 33 | } 34 | .gold { 35 | color: $primary-color; 36 | } 37 | .pcolor { 38 | color: $primary-color; 39 | } 40 | .lcolor { 41 | color: $color-light; 42 | } 43 | .older-link, .newer-link, .older-link:hover, .newer-link:hover { 44 | text-decoration: none; 45 | padding: 3px; 46 | } 47 | .older-link { float: left } 48 | .newer-link { float: right } 49 | .clear { clear: both; } 50 | .remote-follow-button { 51 | background: $color-menu-background; 52 | color: $color-light; 53 | text-decoration: none; 54 | padding: 5px 8px; 55 | margin-top: 5px; 56 | border-radius: 2px; 57 | } 58 | .remote-follow-button:hover { 59 | text-decoration: none; 60 | background: $primary-color; 61 | color: $background-color; 62 | } 63 | #admin-menu-wrapper { 64 | padding: 10px; 65 | margin:0 auto; 66 | width: 100%; 67 | background: $color-menu-background; 68 | max-width: 720px; 69 | 70 | #admin-menu { 71 | list-style-type: none; 72 | display: inline; 73 | padding: 10px; 74 | color: $color-light; 75 | border-radius-bottom-left: 2px; 76 | border-radius-bottom-right: 2px; 77 | .left { float: left; } 78 | .right { float: right; } 79 | li { 80 | a { text-decoration: none; } 81 | .admin-title { 82 | text-transform: uppercase; 83 | font-weight: bold; 84 | } 85 | padding-right:10px; 86 | .selected, a:hover { 87 | color: $primary-color; 88 | } 89 | } 90 | } 91 | } 92 | header#header { 93 | margin-bottom: 70px; 94 | 95 | .title { 96 | font-size: 1.2em; 97 | padding-right: 15px; 98 | color: $color-title-link; 99 | } 100 | .title:hover { 101 | text-decoration: none; 102 | } 103 | .subtitle-username { 104 | color: $color; 105 | } 106 | .menu { 107 | clear: both; 108 | padding: 0 0 10px 0; 109 | ul { 110 | display: inline; 111 | list-style-type: none; 112 | padding: 0; 113 | li { 114 | float: left; 115 | margin-bottom: 10px; 116 | margin-right: 10px; 117 | } 118 | } 119 | a { 120 | padding: 5px 10px; 121 | small.badge { 122 | background-color: $color-menu-background; 123 | color: $color-light; 124 | border-radius: 2px; 125 | margin-left: 5px; 126 | padding: 3px 5px 0px 5px; 127 | font-weight: bold; 128 | } 129 | } 130 | a.selected { 131 | background: $primary-color; 132 | color: $background-color; 133 | border-radius:2px; 134 | .badge { 135 | color: $primary-color; 136 | background: $background-color; 137 | } 138 | 139 | } 140 | a:hover { 141 | background: $primary-color; 142 | color: $background-color; 143 | text-decoration: none; 144 | border-radius: 2px; 145 | .badge { 146 | color: $primary-color; 147 | background: $background-color; 148 | } 149 | } 150 | } 151 | } 152 | #container { 153 | width: 90%; 154 | max-width: 720px; 155 | margin: 30px auto; 156 | } 157 | #container #notes { 158 | margin-top: 20px; 159 | } 160 | .actor-box { 161 | display: block; 162 | text-decoration: none; 163 | margin-bottom: 40px; 164 | 165 | .actor-icon { 166 | width: 120px; 167 | border-radius:2px; 168 | } 169 | 170 | h3 { margin: 0; } 171 | 172 | .actor-inline { 173 | text-overflow: ellipsis; 174 | white-space: nowrap; 175 | overflow: hidden; 176 | } 177 | } 178 | .actor-box-big { 179 | display: block; 180 | text-decoration: none; 181 | 182 | .actor-box-wrapper { 183 | margin-bottom:40px; 184 | 185 | .actor-icon { 186 | width:120px; 187 | border-radius:2px; 188 | } 189 | 190 | h3 { margin: 0; } 191 | } 192 | } 193 | .note-box { 194 | margin-bottom: 70px; 195 | } 196 | .note { 197 | display: flex; 198 | .l { 199 | color: $color-note-link; 200 | } 201 | 202 | .h-card { 203 | flex: initial; 204 | width: 50px; 205 | } 206 | 207 | .u-photo { 208 | width: 50px; 209 | border-radius: 2px; 210 | } 211 | .note-wrapper { 212 | flex: 1; 213 | padding-left: 15px; 214 | overflow: hidden; 215 | } 216 | 217 | .bottom-bar { margin-top:10px;display:inline-block; } 218 | 219 | .img-attachment { 220 | max-width:100%; 221 | border-radius:2px; 222 | } 223 | 224 | h3 { 225 | font-size: 1.1em; 226 | color: $color-light; 227 | } 228 | 229 | strong { font-weight:600; } 230 | 231 | .note-container { 232 | clear: right; 233 | padding:10px 0; 234 | word-break: normal; 235 | } 236 | } 237 | .color-menu-background { 238 | background: $color-menu-background; 239 | } 240 | .og-link { text-decoration: none; } 241 | .og-link:hover { text-decoration: none; } 242 | .bar-item-no-hover { 243 | cursor: default; 244 | background: $color-menu-background; 245 | padding: 5px; 246 | color: $color-light; 247 | margin-right:10px; 248 | border-radius:2px; 249 | float: left; 250 | } 251 | .bar-item-no-hover:hover { 252 | cursor: default; 253 | } 254 | .bar-item-no-bg { 255 | cursor: default; 256 | padding: 5px; 257 | color: $color-light; 258 | margin-right:10px; 259 | border-radius:2px; 260 | float: left; 261 | } 262 | .bar-item-no-bg:hover { 263 | cursor: default; 264 | } 265 | .bar-item-pinned { 266 | cursor: default; 267 | background: $color-menu-background; 268 | color: $color-light; 269 | padding: 5px; 270 | margin-right:5px; 271 | border-radius:2px; 272 | } 273 | .bar-item-pinned:hover { 274 | cursor: default; 275 | } 276 | .bar-item { 277 | background: $color-menu-background; 278 | padding: 5px; 279 | color: $color-light; 280 | margin-right: 10px; 281 | float: left; 282 | border-radius:2px; 283 | } 284 | .bar-item:hover { 285 | background: $primary-color; 286 | color: $background-color; 287 | text-decoration: none; 288 | } 289 | .bar-item-no-border { 290 | color: $color-light; 291 | background: inherit; 292 | cursor: default; 293 | } 294 | .bar-item-no-border:hover { 295 | color: $color-light; 296 | background: inherit; 297 | cursor: default; 298 | } 299 | .bar-item-reverse { 300 | background: $primary-color; 301 | color: $background-color; 302 | padding: 5px; 303 | margin-right: 10px; 304 | float: left; 305 | border-radius:2px; 306 | border: 0; 307 | } 308 | .bar-item-reverse:hover { 309 | background: $color-menu-background; 310 | color: $color-light; 311 | } 312 | 313 | button.bar-item { 314 | border: 0 315 | } 316 | form.action-form { 317 | display: inline; 318 | } 319 | .perma { 320 | font-size: 1.25em; 321 | } 322 | .bottom-bar .perma-item { 323 | margin-right: 5px; 324 | } 325 | .bottom-bar a.bar-item:hover { 326 | text-decoration: none; 327 | } 328 | .footer > div { 329 | width: 90%; 330 | max-width: 720px; 331 | margin: 40px auto; 332 | } 333 | .footer a, .footer a:hover, .footer a:visited { 334 | text-decoration: underline; 335 | color: $color; 336 | } 337 | .summary { 338 | color: $color-summary; 339 | font-size: 1.1em; 340 | margin-top: 10px; 341 | margin-bottom: 30px; 342 | } 343 | .summary a, .summay a:hover { 344 | color: $color-summary; 345 | text-decoration: underline; 346 | } 347 | #followers, #following, #new { 348 | margin-top: 50px; 349 | } 350 | #admin { 351 | margin-top: 50px; 352 | } 353 | .tabbar { 354 | margin-bottom:50px; 355 | } 356 | .tab { 357 | padding: 10px; 358 | text-decoration: none; 359 | } 360 | .tab.selected { 361 | background: $color-menu-background; 362 | color: $primary-color; 363 | border-top: 1px solid $primary-color; 364 | border-right: 1px solid $primary-color; 365 | border-left: 1px solid $primary-color; 366 | padding: 9px; 367 | 368 | } 369 | .tab:hover { 370 | text-decoration: none; 371 | background: $color-menu-background; 372 | color: $color-light; 373 | } 374 | textarea, input, select { 375 | background: $color-menu-background; 376 | padding: 10px; 377 | color: $color-light; 378 | border: 0px; 379 | border-radius: 2px; 380 | } 381 | select { 382 | padding: 4px 10px; 383 | } 384 | input { 385 | padding: 10px; 386 | } 387 | input[type=submit] { 388 | color: $primary-color; 389 | text-transform: uppercase; 390 | } 391 | .note-video { 392 | margin: 30px 0 10px 0; 393 | } 394 | li.answer { 395 | height:30px; 396 | margin-bottom:10px; 397 | position:relative; 398 | } 399 | .answer .answer-bar { 400 | position:absolute; 401 | height:30px; 402 | border-radius:2px; 403 | } 404 | .answer .answer-text { 405 | position:relative; 406 | top:6px; 407 | padding-left:10px; 408 | white-space: nowrap; 409 | overflow: hidden; 410 | text-overflow: ellipsis; 411 | } 412 | .answer .answer-text > span { 413 | width:70px; 414 | display:inline-block; 415 | } 416 | -------------------------------------------------------------------------------- /core/gc.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from datetime import datetime 3 | from datetime import timedelta 4 | from time import perf_counter 5 | from typing import Any 6 | from typing import Dict 7 | from typing import List 8 | 9 | from little_boxes import activitypub as ap 10 | from little_boxes.errors import ActivityGoneError 11 | from little_boxes.errors import RemoteServerUnavailableError 12 | 13 | from config import DAYS_TO_KEEP 14 | from config import ID 15 | from config import ME 16 | from config import MEDIA_CACHE 17 | from core import activitypub 18 | from core.meta import Box 19 | from core.meta import MetaKey 20 | from core.meta import _meta 21 | from core.meta import by_type 22 | from core.meta import in_inbox 23 | from utils.migrations import DB 24 | 25 | back = activitypub.MicroblogPubBackend() 26 | ap.use_backend(back) 27 | 28 | MY_PERSON = ap.Person(**ME) 29 | 30 | logger = logging.getLogger(__name__) 31 | 32 | 33 | def threads_of_interest() -> List[str]: 34 | out = set() 35 | 36 | # Fetch all the threads we've participed in 37 | for data in DB.activities.find( 38 | { 39 | "meta.thread_root_parent": {"$exists": True}, 40 | "box": Box.OUTBOX.value, 41 | "type": ap.ActivityType.CREATE.value, 42 | } 43 | ): 44 | out.add(data["meta"]["thread_root_parent"]) 45 | 46 | # Fetch all threads related to bookmarked activities 47 | for data in DB.activities.find({"meta.bookmarked": True}): 48 | # Keep the replies 49 | out.add(data["meta"]["object_id"]) 50 | # And the whole thread if any 51 | if "thread_root_parent" in data["meta"]: 52 | out.add(data["meta"]["thread_root_parent"]) 53 | 54 | return list(out) 55 | 56 | 57 | def _keep(data: Dict[str, Any]) -> None: 58 | DB.activities.update_one({"_id": data["_id"]}, {"$set": {"meta.gc_keep": True}}) 59 | 60 | 61 | def perform() -> None: # noqa: C901 62 | start = perf_counter() 63 | d = (datetime.utcnow() - timedelta(days=DAYS_TO_KEEP)).strftime("%Y-%m-%d") 64 | toi = threads_of_interest() 65 | logger.info(f"thread_of_interest={toi!r}") 66 | 67 | delete_deleted = DB.activities.delete_many( 68 | { 69 | **in_inbox(), 70 | **by_type(ap.ActivityType.DELETE), 71 | _meta(MetaKey.PUBLISHED): {"$lt": d}, 72 | } 73 | ).deleted_count 74 | logger.info(f"{delete_deleted} Delete deleted") 75 | 76 | create_deleted = 0 77 | create_count = 0 78 | # Go over the old Create activities 79 | for data in DB.activities.find( 80 | { 81 | "box": Box.INBOX.value, 82 | "type": ap.ActivityType.CREATE.value, 83 | _meta(MetaKey.PUBLISHED): {"$lt": d}, 84 | "meta.gc_keep": {"$exists": False}, 85 | } 86 | ).limit(500): 87 | try: 88 | logger.info(f"data={data!r}") 89 | create_count += 1 90 | remote_id = data["remote_id"] 91 | meta = data["meta"] 92 | 93 | # This activity has been bookmarked, keep it 94 | if meta.get("bookmarked"): 95 | _keep(data) 96 | continue 97 | 98 | obj = None 99 | if not meta.get("deleted"): 100 | try: 101 | activity = ap.parse_activity(data["activity"]) 102 | logger.info(f"activity={activity!r}") 103 | obj = activity.get_object() 104 | except (RemoteServerUnavailableError, ActivityGoneError): 105 | logger.exception( 106 | f"failed to load {remote_id}, this activity will be deleted" 107 | ) 108 | 109 | # This activity mentions the server actor, keep it 110 | if obj and obj.has_mention(ID): 111 | _keep(data) 112 | continue 113 | 114 | # This activity is a direct reply of one the server actor activity, keep it 115 | if obj: 116 | in_reply_to = obj.get_in_reply_to() 117 | if in_reply_to and in_reply_to.startswith(ID): 118 | _keep(data) 119 | continue 120 | 121 | # This activity is part of a thread we want to keep, keep it 122 | if obj and in_reply_to and meta.get("thread_root_parent"): 123 | thread_root_parent = meta["thread_root_parent"] 124 | if thread_root_parent.startswith(ID) or thread_root_parent in toi: 125 | _keep(data) 126 | continue 127 | 128 | # This activity was boosted or liked, keep it 129 | if meta.get("boosted") or meta.get("liked"): 130 | _keep(data) 131 | continue 132 | 133 | # TODO(tsileo): remove after tests 134 | if meta.get("keep"): 135 | logger.warning( 136 | f"{activity!r} would not have been deleted, skipping for now" 137 | ) 138 | _keep(data) 139 | continue 140 | 141 | # Delete the cached attachment 142 | for grid_item in MEDIA_CACHE.fs.find({"remote_id": remote_id}): 143 | MEDIA_CACHE.fs.delete(grid_item._id) 144 | 145 | # Delete the activity 146 | DB.activities.delete_one({"_id": data["_id"]}) 147 | create_deleted += 1 148 | except Exception: 149 | logger.exception(f"failed to process {data!r}") 150 | 151 | for data in DB.replies.find( 152 | {_meta(MetaKey.PUBLISHED): {"$lt": d}, "meta.gc_keep": {"$exists": False}} 153 | ).limit(500): 154 | try: 155 | logger.info(f"data={data!r}") 156 | create_count += 1 157 | remote_id = data["remote_id"] 158 | meta = data["meta"] 159 | 160 | # This activity has been bookmarked, keep it 161 | if meta.get("bookmarked"): 162 | _keep(data) 163 | continue 164 | 165 | obj = ap.parse_activity(data["activity"]) 166 | # This activity is a direct reply of one the server actor activity, keep it 167 | in_reply_to = obj.get_in_reply_to() 168 | 169 | # This activity is part of a thread we want to keep, keep it 170 | if in_reply_to and meta.get("thread_root_parent"): 171 | thread_root_parent = meta["thread_root_parent"] 172 | if thread_root_parent.startswith(ID) or thread_root_parent in toi: 173 | _keep(data) 174 | continue 175 | 176 | # This activity was boosted or liked, keep it 177 | if meta.get("boosted") or meta.get("liked"): 178 | _keep(data) 179 | continue 180 | 181 | # Delete the cached attachment 182 | for grid_item in MEDIA_CACHE.fs.find({"remote_id": remote_id}): 183 | MEDIA_CACHE.fs.delete(grid_item._id) 184 | 185 | # Delete the activity 186 | DB.replies.delete_one({"_id": data["_id"]}) 187 | create_deleted += 1 188 | except Exception: 189 | logger.exception(f"failed to process {data!r}") 190 | 191 | after_gc_create = perf_counter() 192 | time_to_gc_create = after_gc_create - start 193 | logger.info( 194 | f"{time_to_gc_create:.2f} seconds to analyze {create_count} Create, {create_deleted} deleted" 195 | ) 196 | 197 | announce_count = 0 198 | announce_deleted = 0 199 | # Go over the old Create activities 200 | for data in DB.activities.find( 201 | { 202 | "box": Box.INBOX.value, 203 | "type": ap.ActivityType.ANNOUNCE.value, 204 | _meta(MetaKey.PUBLISHED): {"$lt": d}, 205 | "meta.gc_keep": {"$exists": False}, 206 | } 207 | ).limit(500): 208 | try: 209 | announce_count += 1 210 | remote_id = data["remote_id"] 211 | meta = data["meta"] 212 | activity = ap.parse_activity(data["activity"]) 213 | logger.info(f"activity={activity!r}") 214 | 215 | # This activity has been bookmarked, keep it 216 | if meta.get("bookmarked"): 217 | _keep(data) 218 | continue 219 | 220 | object_id = activity.get_object_id() 221 | 222 | # This announce is for a local activity (i.e. from the outbox), keep it 223 | if object_id.startswith(ID): 224 | _keep(data) 225 | continue 226 | 227 | for grid_item in MEDIA_CACHE.fs.find({"remote_id": remote_id}): 228 | MEDIA_CACHE.fs.delete(grid_item._id) 229 | 230 | # TODO(tsileo): here for legacy reason, this needs to be removed at some point 231 | for grid_item in MEDIA_CACHE.fs.find({"remote_id": object_id}): 232 | MEDIA_CACHE.fs.delete(grid_item._id) 233 | 234 | # Delete the activity 235 | DB.activities.delete_one({"_id": data["_id"]}) 236 | 237 | announce_deleted += 1 238 | except Exception: 239 | logger.exception(f"failed to process {data!r}") 240 | 241 | after_gc_announce = perf_counter() 242 | time_to_gc_announce = after_gc_announce - after_gc_create 243 | logger.info( 244 | f"{time_to_gc_announce:.2f} seconds to analyze {announce_count} Announce, {announce_deleted} deleted" 245 | ) 246 | -------------------------------------------------------------------------------- /core/inbox.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from functools import singledispatch 3 | from typing import Any 4 | from typing import Dict 5 | 6 | from little_boxes import activitypub as ap 7 | from little_boxes.errors import NotAnActivityError 8 | 9 | import config 10 | from core.activitypub import _answer_key 11 | from core.activitypub import accept_follow 12 | from core.activitypub import handle_replies 13 | from core.activitypub import update_cached_actor 14 | from core.db import DB 15 | from core.db import update_one_activity 16 | from core.meta import FollowStatus 17 | from core.meta import MetaKey 18 | from core.meta import by_object_id 19 | from core.meta import by_remote_id 20 | from core.meta import by_type 21 | from core.meta import in_inbox 22 | from core.meta import inc 23 | from core.meta import upsert 24 | from core.tasks import Tasks 25 | 26 | _logger = logging.getLogger(__name__) 27 | 28 | _NewMeta = Dict[str, Any] 29 | 30 | 31 | @singledispatch 32 | def process_inbox(activity: ap.BaseActivity, new_meta: _NewMeta) -> None: 33 | _logger.warning(f"skipping {activity!r}") 34 | return None 35 | 36 | 37 | @process_inbox.register 38 | def _delete_process_inbox(delete: ap.Delete, new_meta: _NewMeta) -> None: 39 | _logger.info(f"process_inbox activity={delete!r}") 40 | obj_id = delete.get_object_id() 41 | _logger.debug(f"delete object={obj_id}") 42 | try: 43 | # FIXME(tsileo): call the DB here instead? like for the oubox 44 | obj = ap.fetch_remote_activity(obj_id) 45 | _logger.info(f"inbox_delete handle_replies obj={obj!r}") 46 | in_reply_to = obj.get_in_reply_to() if obj.inReplyTo else None 47 | if obj.has_type(ap.CREATE_TYPES): 48 | post_query = {**by_object_id(obj_id), **by_type(ap.ActivityType.CREATE)} 49 | in_reply_to = ap._get_id( 50 | DB.activities.find_one(post_query)["activity"]["object"].get( 51 | "inReplyTo" 52 | ) 53 | ) 54 | if in_reply_to: 55 | DB.activities.update_one( 56 | {**by_object_id(in_reply_to), **by_type(ap.ActivityType.CREATE)}, 57 | inc(MetaKey.COUNT_REPLY, -1), 58 | ) 59 | DB.replies.update_one( 60 | by_remote_id(in_reply_to), inc(MetaKey.COUNT_REPLY, -1) 61 | ) 62 | except Exception: 63 | _logger.exception(f"failed to handle delete replies for {obj_id}") 64 | 65 | update_one_activity( 66 | {**by_object_id(obj_id), **by_type(ap.ActivityType.CREATE)}, 67 | upsert({MetaKey.DELETED: True}), 68 | ) 69 | 70 | # Foce undo other related activities 71 | DB.activities.update(by_object_id(obj_id), upsert({MetaKey.UNDO: True})) 72 | 73 | 74 | @process_inbox.register 75 | def _update_process_inbox(update: ap.Update, new_meta: _NewMeta) -> None: 76 | _logger.info(f"process_inbox activity={update!r}") 77 | obj = update.get_object() 78 | if obj.ACTIVITY_TYPE == ap.ActivityType.NOTE: 79 | update_one_activity( 80 | {"activity.object.id": obj.id}, {"$set": {"activity.object": obj.to_dict()}} 81 | ) 82 | elif obj.has_type(ap.ActivityType.QUESTION): 83 | choices = obj._data.get("oneOf", obj.anyOf) 84 | total_replies = 0 85 | _set = {} 86 | for choice in choices: 87 | answer_key = _answer_key(choice["name"]) 88 | cnt = choice["replies"]["totalItems"] 89 | total_replies += cnt 90 | _set[f"meta.question_answers.{answer_key}"] = cnt 91 | 92 | _set["meta.question_replies"] = total_replies 93 | 94 | update_one_activity({**in_inbox(), **by_object_id(obj.id)}, {"$set": _set}) 95 | # Also update the cached copies of the question (like Announce and Like) 96 | DB.activities.update_many( 97 | by_object_id(obj.id), upsert({MetaKey.OBJECT: obj.to_dict()}) 98 | ) 99 | 100 | elif obj.has_type(ap.ACTOR_TYPES): 101 | actor = ap.fetch_remote_activity(obj.id, no_cache=True) 102 | update_cached_actor(actor) 103 | 104 | else: 105 | raise ValueError(f"don't know how to update {obj!r}") 106 | 107 | 108 | @process_inbox.register 109 | def _create_process_inbox(create: ap.Create, new_meta: _NewMeta) -> None: 110 | _logger.info(f"process_inbox activity={create!r}") 111 | # If it's a `Quesiion`, trigger an async task for updating it later (by fetching the remote and updating the 112 | # local copy) 113 | obj = create.get_object() 114 | if obj.has_type(ap.ActivityType.QUESTION): 115 | Tasks.fetch_remote_question(obj) 116 | 117 | Tasks.cache_emojis(obj) 118 | 119 | handle_replies(create) 120 | 121 | 122 | @process_inbox.register 123 | def _announce_process_inbox(announce: ap.Announce, new_meta: _NewMeta) -> None: 124 | _logger.info(f"process_inbox activity={announce!r}") 125 | # TODO(tsileo): actually drop it without storing it and better logging, also move the check somewhere else 126 | # or remove it? 127 | try: 128 | obj = announce.get_object() 129 | except NotAnActivityError: 130 | _logger.exception( 131 | f'received an Annouce referencing an OStatus notice ({announce._data["object"]}), dropping the message' 132 | ) 133 | return 134 | 135 | if obj.has_type(ap.ActivityType.QUESTION): 136 | Tasks.fetch_remote_question(obj) 137 | 138 | # Cache the announced object 139 | Tasks.cache_object(announce.id) 140 | 141 | # Process the reply of the announced object if any 142 | in_reply_to = obj.get_in_reply_to() 143 | if in_reply_to: 144 | reply = ap.fetch_remote_activity(in_reply_to) 145 | if reply.has_type(ap.ActivityType.CREATE): 146 | reply = reply.get_object() 147 | 148 | in_reply_to_data = {MetaKey.IN_REPLY_TO: in_reply_to} 149 | # Update the activity to save some data about the reply 150 | if reply.get_actor().id == obj.get_actor().id: 151 | in_reply_to_data.update({MetaKey.IN_REPLY_TO_SELF: True}) 152 | else: 153 | in_reply_to_data.update( 154 | {MetaKey.IN_REPLY_TO_ACTOR: reply.get_actor().to_dict(embed=True)} 155 | ) 156 | update_one_activity(by_remote_id(announce.id), upsert(in_reply_to_data)) 157 | # Spawn a task to process it (and determine if it needs to be saved) 158 | Tasks.process_reply(reply.id) 159 | 160 | update_one_activity( 161 | {**by_type(ap.ActivityType.CREATE), **by_object_id(obj.id)}, 162 | inc(MetaKey.COUNT_BOOST, 1), 163 | ) 164 | 165 | 166 | @process_inbox.register 167 | def _like_process_inbox(like: ap.Like, new_meta: _NewMeta) -> None: 168 | _logger.info(f"process_inbox activity={like!r}") 169 | obj = like.get_object() 170 | # Update the meta counter if the object is published by the server 171 | update_one_activity( 172 | {**by_type(ap.ActivityType.CREATE), **by_object_id(obj.id)}, 173 | inc(MetaKey.COUNT_LIKE, 1), 174 | ) 175 | 176 | 177 | @process_inbox.register 178 | def _emoji_reaction_process_inbox( 179 | emoji_reaction: ap.EmojiReaction, new_meta: _NewMeta 180 | ) -> None: 181 | _logger.info(f"process_inbox activity={emoji_reaction!r}") 182 | obj = emoji_reaction.get_object() 183 | # Try to update an existing emoji reaction counter entry for the activity emoji 184 | if not update_one_activity( 185 | { 186 | **by_type(ap.ActivityType.CREATE), 187 | **by_object_id(obj.id), 188 | "meta.emoji_reactions.emoji": emoji_reaction.content, 189 | }, 190 | {"$inc": {"meta.emoji_reactions.$.count": 1}}, 191 | ): 192 | # Bootstrap the current emoji counter 193 | update_one_activity( 194 | {**by_type(ap.ActivityType.CREATE), **by_object_id(obj.id)}, 195 | { 196 | "$push": { 197 | "meta.emoji_reactions": { 198 | "emoji": emoji_reaction.content, 199 | "count": 1, 200 | } 201 | } 202 | }, 203 | ) 204 | 205 | 206 | @process_inbox.register 207 | def _follow_process_inbox(activity: ap.Follow, new_meta: _NewMeta) -> None: 208 | _logger.info(f"process_inbox activity={activity!r}") 209 | # Reply to a Follow with an Accept if we're not manully approving them 210 | if not config.MANUALLY_APPROVES_FOLLOWERS: 211 | accept_follow(activity) 212 | else: 213 | update_one_activity( 214 | by_remote_id(activity.id), 215 | upsert({MetaKey.FOLLOW_STATUS: FollowStatus.WAITING.value}), 216 | ) 217 | 218 | 219 | def _update_follow_status(follow_id: str, status: FollowStatus) -> None: 220 | _logger.info(f"{follow_id} is {status}") 221 | update_one_activity( 222 | by_remote_id(follow_id), upsert({MetaKey.FOLLOW_STATUS: status.value}) 223 | ) 224 | 225 | 226 | @process_inbox.register 227 | def _accept_process_inbox(activity: ap.Accept, new_meta: _NewMeta) -> None: 228 | _logger.info(f"process_inbox activity={activity!r}") 229 | # Set a flag on the follow 230 | follow = activity.get_object_id() 231 | _update_follow_status(follow, FollowStatus.ACCEPTED) 232 | 233 | 234 | @process_inbox.register 235 | def _reject_process_inbox(activity: ap.Reject, new_meta: _NewMeta) -> None: 236 | _logger.info(f"process_inbox activity={activity!r}") 237 | follow = activity.get_object_id() 238 | _update_follow_status(follow, FollowStatus.REJECTED) 239 | 240 | 241 | @process_inbox.register 242 | def _undo_process_inbox(activity: ap.Undo, new_meta: _NewMeta) -> None: 243 | _logger.info(f"process_inbox activity={activity!r}") 244 | # Fetch the object that's been undo'ed 245 | obj = activity.get_object() 246 | 247 | # Set the undo flag on the mentionned activity 248 | update_one_activity(by_remote_id(obj.id), upsert({MetaKey.UNDO: True})) 249 | 250 | # Handle cached counters 251 | if obj.has_type(ap.ActivityType.LIKE): 252 | # Update the meta counter if the object is published by the server 253 | update_one_activity( 254 | {**by_object_id(obj.get_object_id()), **by_type(ap.ActivityType.CREATE)}, 255 | inc(MetaKey.COUNT_LIKE, -1), 256 | ) 257 | elif obj.has_type(ap.ActivityType.ANNOUNCE): 258 | announced = obj.get_object() 259 | # Update the meta counter if the object is published by the server 260 | update_one_activity( 261 | {**by_type(ap.ActivityType.CREATE), **by_object_id(announced.id)}, 262 | inc(MetaKey.COUNT_BOOST, -1), 263 | ) 264 | -------------------------------------------------------------------------------- /templates/stream.html: -------------------------------------------------------------------------------- 1 | {% extends "layout.html" %} 2 | {% import 'utils.html' as utils %} 3 | {% block title %}{% if request.path == url_for('admin.admin_stream') %}Stream{% elif actor_id %}Profile {{ actor.name }}{% elif list_name %}List {{ list_name }}{% else %}Notifications{% endif %} - {{ config.NAME }}{% endblock %} 4 | {% block content %} 5 |
6 |
7 | 8 | {% if list_name %} 9 |

{{ list_name }}

10 | {% endif %} 11 | 12 | {% if request.path == url_for('admin.admin_notifications') and unread_notifications_count %} 13 |
14 |
15 | 16 | 17 | 18 | 19 |
20 |
21 | {% endif %} 22 | 23 | {% if actor %} 24 | {% set actor_redir = request.path + "?actor_id=" + request.args.get('actor_id') %} 25 | 26 |
27 |
28 | {% if follower %}follows you!{% endif %} 29 | 30 | {% if following %} 31 |
32 | 33 | 34 | 35 | 36 |
37 |
38 | 39 | 40 | 41 | 42 |
43 | 44 | 45 | {% if lists %} 46 |
47 | 48 | 49 | 50 | 58 | 59 |
60 | {% endif %} 61 | 62 | {% for l in lists %} 63 | {% if actor.id in l.members %} 64 |
65 | 66 | 67 | 68 | 69 |
70 | 71 | 72 | {% endif %} 73 | {% endfor %} 74 | 75 | 76 | 77 | 78 | {% else %} 79 |
80 | 81 | 82 | 83 | 84 |
85 |
86 | 87 | 88 | 89 | 90 |
91 | 92 | 93 | {% endif %} 94 |
95 | 96 | 97 | 98 | {% if not actor.icon %} 99 | 100 | {% else %} 101 | {% endif %} 102 | 103 |
104 |
{{ (actor.name or actor.preferredUsername) | clean | replace_custom_emojis(actor) | safe }}
105 | @{{ actor.preferredUsername }}@{{ actor | url_or_id | get_url | domain }} 106 | {% if actor.manuallyApprovesFollowers %}

Manually approves followers

{% endif %} 107 |
108 |
109 | 110 | {% if actor.summary %} 111 |
112 | {{ actor.summary | clean | replace_custom_emojis(actor) | safe }} 113 |
114 | {% endif %} 115 | 116 | {% if actor.attachment %} 117 |
    118 | {% for item in actor.attachment %} 119 | {% if item.type == "PropertyValue" %} 120 |
  • {{ item.name }}: {{ item.value | clean | replace_custom_emojis(actor) | safe }}
  • 121 | {% endif %} 122 | {% endfor %} 123 |
124 | {% endif %} 125 |
126 | 127 | {% endif %} 128 | 129 |
130 | {% for item in inbox_data %} 131 | {% if 'actor' in item.meta %} 132 | {% if item | has_type('Create') %} 133 |
134 | {% if request.path.startswith("/admin/notifications") and not item.meta.reply_acked and item.meta.object_visibility | visibility_is_public %} 135 |
136 | 137 | 138 | 139 | 140 |
141 | 142 | {% endif %} 143 | {{ utils.display_in_reply_to(item.meta, item.activity.object) }} 144 |
145 | {{ utils.display_note(item.activity.object, meta=item.meta) }} 146 | {% else %} 147 | {% if item | has_type('Announce') %} 148 | {% set boost_actor = item.meta.actor %} 149 | {% if boost_actor %} 150 |
151 | {{ utils.display_actor_box(boost_actor, after="boosted") }} 152 | {{ utils.display_in_reply_to(item.meta, item.activity.object) }} 153 | 154 | {% if request.path == url_for('admin.admin_notifications') %} 155 | {% if item.meta.notification_unread %}new{% endif %} 156 | {{ (item.activity.published or item.meta.published) | format_timeago }} 157 | {% endif %} 158 | 159 |
160 | {% endif %} 161 | {% if item.meta.object %} 162 | 163 | {{ utils.display_note(item.meta.object, meta=item.meta) }} 164 | {% endif %} 165 | {% endif %} 166 | 167 | {% if item | has_type('Like') %} 168 | {% set boost_actor = item.meta.actor %} 169 |
170 | {{ utils.display_actor_box(boost_actor, after="liked") }} 171 | {{ utils.display_in_reply_to(item.meta, item.activity.object) }} 172 | {% if item.meta.notification_unread %}new{% endif %} 173 | {{ (item.activity.published or item.meta.published) | format_timeago }} 174 |
175 | {% if item.meta.object %} 176 | {{ utils.display_note(item.meta.object, meta=item.meta) }} 177 | {% endif %} 178 | {% endif %} 179 | 180 | {% if item | has_type('Follow') %} 181 |
182 | new follower 183 | {{ item.meta.follow_status }} 184 | {% if config.MANUALLY_APPROVES_FOLLOWERS and item.meta.follow_status != "accepted" %} 185 |
186 | 187 | 188 | 189 | 190 |
191 | {% endif %} 192 | {% if item.meta.notification_unread %}new{% endif %} 193 | {{ (item.activity.published or item.meta.published) | format_timeago }} 194 | profile 195 | {% if item.meta.notification_follows_back %}already following 196 | {% else %} 197 |
198 | 199 | 200 | 201 | 202 |
203 | {% endif %} 204 |
205 |
206 | {{ utils.display_actor_inline(item.meta.actor, size=50) }} 207 |
208 | 209 | {% elif item | has_type('Accept') %} 210 |
211 | you started following 212 | {% if item.meta.notification_unread %}new{% endif %} 213 | {{ (item.activity.published or item.meta.published) | format_timeago }} 214 | profile 215 | {% if item.meta.notification_follows_back %}follows you back{% endif %} 216 |
217 | 218 |
219 | {{ utils.display_actor_inline(item.meta.actor, size=50) }} 220 |
221 | 222 | {% elif item | has_type('Reject') %} 223 |
224 | rejected your follow request 225 | {% if item.meta.notification_unread %}new{% endif %} 226 | {{ (item.activity.published or item.meta.published) | format_timeago }} 227 | profile 228 | {% if item.meta.notification_follows_back %}follows you{% endif %} 229 |
230 | 231 |
232 | {{ utils.display_actor_inline(item.meta.actor, size=50) }} 233 |
234 | 235 | {% elif item | has_type('Undo') %} 236 |
237 | unfollowed you 238 | {% if item.meta.notification_unread %}new{% endif %} 239 | {{ (item.activity.published or item.meta.published) | format_timeago }} 240 |
241 |
242 | {{ utils.display_actor_inline(item.meta.actor, size=50) }} 243 |
244 | 245 | {% else %} 246 | 247 | {% endif %} 248 | 249 | 250 | {% endif %} 251 | {% else %} 252 | 253 | {% if item | has_type('question_ended') %} 254 |

poll ended

255 | {{ utils.display_note(item.activity, meta={"object_visibility": "PUBLIC"}) }} 256 | {% endif %} 257 | 258 | {% endif %} 259 | {% endfor %} 260 | 261 | {{ utils.display_pagination(older_than, newer_than) }} 262 |
263 |
264 | 265 |
266 | {% endblock %} 267 | -------------------------------------------------------------------------------- /utils/template_filters.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import urllib 3 | from datetime import datetime 4 | from datetime import timezone 5 | from functools import lru_cache 6 | from urllib.parse import urlparse 7 | 8 | import bleach 9 | import emoji_unicode 10 | import flask 11 | import html2text 12 | import timeago 13 | from bs4 import BeautifulSoup 14 | from cachetools import LRUCache 15 | from little_boxes import activitypub as ap 16 | from little_boxes.activitypub import _to_list 17 | from little_boxes.errors import ActivityGoneError 18 | from little_boxes.errors import ActivityNotFoundError 19 | 20 | from config import BASE_URL 21 | from config import EMOJI_TPL 22 | from config import ID 23 | from config import MEDIA_CACHE 24 | from core.activitypub import _answer_key 25 | from utils import parse_datetime 26 | from utils.highlight import highlight 27 | from utils.media import Kind 28 | from utils.media import _is_img 29 | 30 | _logger = logging.getLogger(__name__) 31 | 32 | H2T = html2text.HTML2Text() 33 | H2T.ignore_links = True 34 | H2T.ignore_images = True 35 | 36 | 37 | filters = flask.Blueprint("filters", __name__) 38 | 39 | 40 | @filters.app_template_filter() 41 | def get_visibility(meta): 42 | if "object_visibility" in meta and meta["object_visibility"]: 43 | return meta["object_visibility"] 44 | return meta.get("visibility") 45 | 46 | 47 | @filters.app_template_filter() 48 | def visibility(v: str) -> str: 49 | try: 50 | return ap.Visibility[v].value.lower() 51 | except Exception: 52 | return v 53 | 54 | 55 | @filters.app_template_filter() 56 | def visibility_is_public(v: str) -> bool: 57 | return v in [ap.Visibility.PUBLIC.name, ap.Visibility.UNLISTED.name] 58 | 59 | 60 | @filters.app_template_filter() 61 | def code_highlight(content): 62 | return highlight(content) 63 | 64 | 65 | @filters.app_template_filter() 66 | def emojify(text): 67 | return emoji_unicode.replace( 68 | text, lambda e: EMOJI_TPL.format(filename=e.code_points, raw=e.unicode) 69 | ) 70 | 71 | 72 | # HTML/templates helper 73 | ALLOWED_TAGS = [ 74 | "a", 75 | "abbr", 76 | "acronym", 77 | "b", 78 | "br", 79 | "blockquote", 80 | "code", 81 | "pre", 82 | "em", 83 | "i", 84 | "li", 85 | "ol", 86 | "strong", 87 | "sup", 88 | "sub", 89 | "del", 90 | "ul", 91 | "span", 92 | "div", 93 | "p", 94 | "h1", 95 | "h2", 96 | "h3", 97 | "h4", 98 | "h5", 99 | "h6", 100 | "table", 101 | "th", 102 | "tr", 103 | "td", 104 | "thead", 105 | "tbody", 106 | "tfoot", 107 | "colgroup", 108 | "caption", 109 | "img", 110 | ] 111 | 112 | ALLOWED_ATTRIBUTES = { 113 | "a": ["href", "title"], 114 | "abbr": ["title"], 115 | "acronym": ["title"], 116 | "img": ["src", "alt", "title"], 117 | } 118 | 119 | 120 | @filters.app_template_filter() 121 | def replace_custom_emojis(content, note): 122 | idx = {} 123 | for tag in note.get("tag", []): 124 | if tag.get("type") == "Emoji": 125 | # try: 126 | idx[tag["name"]] = _get_file_url(tag["icon"]["url"], 25, Kind.EMOJI) 127 | 128 | for emoji_name, emoji_url in idx.items(): 129 | content = content.replace( 130 | emoji_name, 131 | f'{emoji_name}', 132 | ) 133 | 134 | return content 135 | 136 | 137 | def clean_html(html): 138 | try: 139 | return bleach.clean( 140 | html, tags=ALLOWED_TAGS, attributes=ALLOWED_ATTRIBUTES, strip=True 141 | ) 142 | except Exception: 143 | return "failed to clean HTML" 144 | 145 | 146 | @filters.app_template_filter() 147 | def gtone(n): 148 | return n > 1 149 | 150 | 151 | @filters.app_template_filter() 152 | def gtnow(dtstr): 153 | return ap.format_datetime(datetime.now(timezone.utc)) > dtstr 154 | 155 | 156 | @filters.app_template_filter() 157 | def clean(html): 158 | out = clean_html(html) 159 | return emoji_unicode.replace( 160 | out, lambda e: EMOJI_TPL.format(filename=e.code_points, raw=e.unicode) 161 | ) 162 | 163 | 164 | @filters.app_template_filter() 165 | def permalink_id(val): 166 | return str(hash(val)) 167 | 168 | 169 | @filters.app_template_filter() 170 | def quote_plus(t): 171 | return urllib.parse.quote_plus(t) 172 | 173 | 174 | @filters.app_template_filter() 175 | def is_from_outbox(t): 176 | return t.startswith(ID) 177 | 178 | 179 | @filters.app_template_filter() 180 | def html2plaintext(body): 181 | return H2T.handle(body) 182 | 183 | 184 | @filters.app_template_filter() 185 | def domain(url): 186 | return urlparse(url).netloc 187 | 188 | 189 | @filters.app_template_filter() 190 | def format_time(val): 191 | if val: 192 | dt = parse_datetime(val) 193 | return datetime.strftime(dt, "%B %d, %Y, %H:%M %p") 194 | return val 195 | 196 | 197 | @filters.app_template_filter() 198 | def format_ts(val): 199 | return datetime.fromtimestamp(val).strftime("%B %d, %Y, %H:%M %p") 200 | 201 | 202 | @filters.app_template_filter() 203 | def gt_ts(val): 204 | return datetime.now() > datetime.fromtimestamp(val) 205 | 206 | 207 | @filters.app_template_filter() 208 | def format_timeago(val): 209 | if val: 210 | dt = parse_datetime(val) 211 | return timeago.format(dt.astimezone(timezone.utc), datetime.now(timezone.utc)) 212 | return val 213 | 214 | 215 | @filters.app_template_filter() 216 | def url_or_id(d): 217 | if isinstance(d, dict): 218 | if "url" in d and isinstance(d["url"], str): 219 | return d["url"] 220 | else: 221 | return d["id"] 222 | return "" 223 | 224 | 225 | @filters.app_template_filter() 226 | def get_url(u): 227 | if isinstance(u, list): 228 | for l in u: 229 | if l.get("mimeType") == "text/html": 230 | u = l 231 | if isinstance(u, dict): 232 | return u["href"] 233 | elif isinstance(u, str): 234 | return u 235 | else: 236 | return u 237 | 238 | 239 | @filters.app_template_filter() 240 | def get_actor(url): 241 | if not url: 242 | return None 243 | if isinstance(url, list): 244 | url = url[0] 245 | if isinstance(url, dict): 246 | url = url.get("id") 247 | try: 248 | return ap.get_backend().fetch_iri(url) 249 | except (ActivityNotFoundError, ActivityGoneError): 250 | return f"Deleted<{url}>" 251 | except Exception as exc: 252 | return f"Error<{url}/{exc!r}>" 253 | 254 | 255 | @filters.app_template_filter() 256 | def has_place(note): 257 | if note.get("location") and note["location"].get("type") == "Place": 258 | return True 259 | return False 260 | 261 | 262 | @filters.app_template_filter() 263 | def get_place(note): 264 | if note.get("location") and note["location"].get("type") == "Place": 265 | tag = note["location"] 266 | if tag.get("latitude") and tag.get("longitude"): 267 | lat = tag["latitude"] 268 | lng = tag["longitude"] 269 | out = "" 270 | if tag.get("name"): 271 | out += f"{tag['name']} " 272 | 273 | out += ( 274 | '' 275 | f'' 276 | f'' 277 | f'{lat},{lng}' 278 | "" 279 | ) 280 | 281 | return out 282 | 283 | return "" 284 | 285 | return "" 286 | 287 | 288 | @filters.app_template_filter() 289 | def poll_answer_key(choice: str) -> str: 290 | return _answer_key(choice) 291 | 292 | 293 | @filters.app_template_filter() 294 | def get_answer_count(choice, obj, meta): 295 | count_from_meta = meta.get("question_answers", {}).get(_answer_key(choice), 0) 296 | if count_from_meta: 297 | return count_from_meta 298 | for option in obj.get("oneOf", obj.get("anyOf", [])): 299 | if option.get("name") == choice: 300 | return option.get("replies", {}).get("totalItems", 0) 301 | 302 | _logger.warning(f"invalid poll data {choice} {obj} {meta}") 303 | return 0 304 | 305 | 306 | @filters.app_template_filter() 307 | def get_total_answers_count(obj, meta): 308 | cached = meta.get("question_replies", 0) 309 | if cached: 310 | return cached 311 | cnt = 0 312 | for choice in obj.get("anyOf", obj.get("oneOf", [])): 313 | cnt += choice.get("replies", {}).get("totalItems", 0) 314 | return cnt 315 | 316 | 317 | _FILE_URL_CACHE = LRUCache(4096) 318 | 319 | 320 | def _get_file_url(url, size, kind) -> str: 321 | k = (url, size, kind) 322 | cached = _FILE_URL_CACHE.get(k) 323 | if cached: 324 | return cached 325 | 326 | doc = MEDIA_CACHE.get_file(*k) 327 | if doc: 328 | out = f"/media/{str(doc._id)}" 329 | _FILE_URL_CACHE[k] = out 330 | return out 331 | 332 | _logger.error(f"cache not available for {url}/{size}/{kind}") 333 | if url.startswith(BASE_URL): 334 | return url 335 | 336 | p = urlparse(url) 337 | return f"/p/{p.scheme}" + p._replace(scheme="").geturl()[1:] 338 | 339 | 340 | @filters.app_template_filter() 341 | def get_actor_icon_url(url, size): 342 | return _get_file_url(url, size, Kind.ACTOR_ICON) 343 | 344 | 345 | @filters.app_template_filter() 346 | def get_attachment_url(url, size): 347 | return _get_file_url(url, size, Kind.ATTACHMENT) 348 | 349 | 350 | @filters.app_template_filter() 351 | @lru_cache(maxsize=256) 352 | def update_inline_imgs(content): 353 | soup = BeautifulSoup(content, "html5lib") 354 | imgs = soup.find_all("img") 355 | if not imgs: 356 | return content 357 | for img in imgs: 358 | if not img.attrs.get("src"): 359 | continue 360 | 361 | img.attrs["src"] = _get_file_url(img.attrs["src"], 720, Kind.ATTACHMENT) 362 | 363 | return soup.find("body").decode_contents() 364 | 365 | 366 | @filters.app_template_filter() 367 | def get_video_url(url): 368 | if isinstance(url, list): 369 | for link in url: 370 | if link.get("mediaType", "").startswith("video/"): 371 | return _get_file_url(link.get("href"), None, Kind.ATTACHMENT) 372 | else: 373 | return _get_file_url(url, None, Kind.ATTACHMENT) 374 | 375 | 376 | @filters.app_template_filter() 377 | def get_og_image_url(url, size=100): 378 | try: 379 | return _get_file_url(url, size, Kind.OG_IMAGE) 380 | except Exception: 381 | return "" 382 | 383 | 384 | @filters.app_template_filter() 385 | def remove_mongo_id(dat): 386 | if isinstance(dat, list): 387 | return [remove_mongo_id(item) for item in dat] 388 | if "_id" in dat: 389 | dat["_id"] = str(dat["_id"]) 390 | for k, v in dat.items(): 391 | if isinstance(v, dict): 392 | dat[k] = remove_mongo_id(dat[k]) 393 | return dat 394 | 395 | 396 | @filters.app_template_filter() 397 | def get_video_link(data): 398 | if isinstance(data, list): 399 | for link in data: 400 | if link.get("mimeType", "").startswith("video/"): 401 | return link.get("href") 402 | elif isinstance(data, str): 403 | return data 404 | return None 405 | 406 | 407 | @filters.app_template_filter() 408 | def get_text(data): 409 | """return first in 'content', 'name' or ''""" 410 | for _t in ("content", "name"): 411 | if _t in data: 412 | return data[_t] 413 | return "" 414 | 415 | 416 | @filters.app_template_filter() 417 | def has_type(doc, _types): 418 | for _type in _to_list(_types): 419 | if _type in _to_list(doc["type"]): 420 | return True 421 | return False 422 | 423 | 424 | @filters.app_template_filter() 425 | def has_actor_type(doc): 426 | # FIXME(tsileo): skipping the last one "Question", cause Mastodon sends question restuls as an update coming from 427 | # the question... Does Pleroma do that too? 428 | for t in ap.ACTOR_TYPES[:-1]: 429 | if has_type(doc, t.value): 430 | return True 431 | return False 432 | 433 | 434 | @lru_cache(maxsize=256) 435 | def _get_inlined_imgs(content): 436 | imgs = [] 437 | if not content: 438 | return imgs 439 | 440 | soup = BeautifulSoup(content, "html5lib") 441 | for img in soup.find_all("img"): 442 | src = img.attrs.get("src") 443 | if src: 444 | imgs.append(src) 445 | 446 | return imgs 447 | 448 | 449 | @filters.app_template_filter() 450 | def iter_note_attachments(note): 451 | attachments = note.get("attachment", []) 452 | imgs = _get_inlined_imgs(note.get("content")) 453 | return [a for a in attachments if a.get("url") not in imgs] 454 | 455 | 456 | @filters.app_template_filter() 457 | def not_only_imgs(attachment): 458 | for a in attachment: 459 | if isinstance(a, dict) and not _is_img(a["url"]): 460 | return True 461 | if isinstance(a, str) and not _is_img(a): 462 | return True 463 | return False 464 | 465 | 466 | @filters.app_template_filter() 467 | def is_img(filename): 468 | return _is_img(filename) 469 | --------------------------------------------------------------------------------