├── .github
└── workflows
│ └── codeql-analysis.yml
├── .gitignore
├── Dockerfile
├── LICENSE
├── README.md
├── __init__.py
├── actions
├── change_to_json.py
├── channels.py
├── following.py
├── preview.py
├── react.py
├── search.py
├── timeline.py
└── user_ops.py
├── authentication
├── auth.py
└── check_token.py
├── client
└── client_views.py
├── config_example.py
├── dates.py
├── feeds
├── clean.py
├── hfeed.py
├── json_feed.py
├── read_later.py
└── xml_feed.py
├── main.py
├── poll_feeds.py
├── requirements.txt
├── requirements_dev.txt
├── screenshots
├── desktop.png
├── feed.png
├── mobile.png
└── screenshot.png
├── seed.py
├── server
├── main.py
├── server_views.py
└── websub.py
├── static
├── .DS_Store
├── css
│ └── styles.css
├── favicon.ico
├── icons
│ ├── .DS_Store
│ ├── 100.png
│ ├── 1024.png
│ ├── 114.png
│ ├── 120.png
│ ├── 128.png
│ ├── 144.png
│ ├── 152.png
│ ├── 16.png
│ ├── 167.png
│ ├── 172.png
│ ├── 180.png
│ ├── 196.png
│ ├── 20.png
│ ├── 216.png
│ ├── 256.png
│ ├── 29.png
│ ├── 32.png
│ ├── 40.png
│ ├── 48.png
│ ├── 50.png
│ ├── 512.png
│ ├── 55.png
│ ├── 57.png
│ ├── 58.png
│ ├── 60.png
│ ├── 64.png
│ ├── 72.png
│ ├── 76.png
│ ├── 80.png
│ ├── 87.png
│ ├── 88.png
│ ├── bell.svg
│ ├── create.svg
│ ├── follow.svg
│ ├── home.svg
│ ├── search.svg
│ └── settings.svg
├── images
│ ├── feed.png
│ ├── gradient.png
│ └── wood.avif
├── js
│ ├── editor.js
│ └── reader.js
├── robots.txt
└── styles.css
├── templates
├── 404.html
├── auth.html
├── base.html
├── client
│ ├── create.html
│ ├── discover.html
│ ├── feed_item.html
│ ├── preview.html
│ ├── read_article.html
│ ├── reader.html
│ ├── search.html
│ └── settings.html
├── index.html
├── server
│ ├── dashboard.html
│ └── following.html
├── setup.html
└── show_error.html
├── tox.ini
└── wsgi.py
/.github/workflows/codeql-analysis.yml:
--------------------------------------------------------------------------------
1 | # For most projects, this workflow file will not need changing; you simply need
2 | # to commit it to your repository.
3 | #
4 | # You may wish to alter this file to override the set of languages analyzed,
5 | # or to provide custom queries or build logic.
6 | #
7 | # ******** NOTE ********
8 | # We have attempted to detect the languages in your repository. Please check
9 | # the `language` matrix defined below to confirm you have the correct set of
10 | # supported CodeQL languages.
11 | #
12 | name: "CodeQL"
13 |
14 | on:
15 | push:
16 | branches: [ main ]
17 | pull_request:
18 | # The branches below must be a subset of the branches above
19 | branches: [ main ]
20 | schedule:
21 | - cron: '41 10 * * 1'
22 |
23 | jobs:
24 | analyze:
25 | name: Analyze
26 | runs-on: ubuntu-latest
27 | permissions:
28 | actions: read
29 | contents: read
30 | security-events: write
31 |
32 | strategy:
33 | fail-fast: false
34 | matrix:
35 | language: [ 'python' ]
36 | # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ]
37 | # Learn more:
38 | # https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#changing-the-languages-that-are-analyzed
39 |
40 | steps:
41 | - name: Checkout repository
42 | uses: actions/checkout@v2
43 |
44 | # Initializes the CodeQL tools for scanning.
45 | - name: Initialize CodeQL
46 | uses: github/codeql-action/init@v1
47 | with:
48 | languages: ${{ matrix.language }}
49 | # If you wish to specify custom queries, you can do so here or in a config file.
50 | # By default, queries listed here will override any specified in a config file.
51 | # Prefix the list here with "+" to use these queries and those in the config file.
52 | # queries: ./path/to/local/query, your-org/your-repo/queries@main
53 |
54 | # Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
55 | # If this step fails, then you should remove it and run the build manually (see below)
56 | - name: Autobuild
57 | uses: github/codeql-action/autobuild@v1
58 |
59 | # ℹ️ Command-line programs to run using the OS shell.
60 | # 📚 https://git.io/JvXDl
61 |
62 | # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
63 | # and modify them (or add more) to build your code if your project
64 | # uses a compiled language
65 |
66 | #- run: |
67 | # make bootstrap
68 | # make release
69 |
70 | - name: Perform CodeQL Analysis
71 | uses: github/codeql-action/analyze@v1
72 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 | *.db
6 | # C extensions
7 | *.so
8 | config.py
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | pip-wheel-metadata/
24 | share/python-wheels/
25 | *.egg-info/
26 | .installed.cfg
27 | *.egg
28 | MANIFEST
29 |
30 | # PyInstaller
31 | # Usually these files are written by a python script from a template
32 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
33 | *.manifest
34 | *.spec
35 |
36 | # Installer logs
37 | pip-log.txt
38 | pip-delete-this-directory.txt
39 |
40 | # Unit test / coverage reports
41 | htmlcov/
42 | .tox/
43 | .nox/
44 | .coverage
45 | .coverage.*
46 | .cache
47 | nosetests.xml
48 | coverage.xml
49 | *.cover
50 | *.py,cover
51 | .hypothesis/
52 | .pytest_cache/
53 |
54 | # Translations
55 | *.mo
56 | *.pot
57 |
58 | # Django stuff:
59 | *.log
60 | local_settings.py
61 | db.sqlite3
62 | db.sqlite3-journal
63 |
64 | # Flask stuff:
65 | instance/
66 | .webassets-cache
67 |
68 | # Scrapy stuff:
69 | .scrapy
70 |
71 | # Sphinx documentation
72 | docs/_build/
73 |
74 | # PyBuilder
75 | target/
76 |
77 | # Jupyter Notebook
78 | .ipynb_checkpoints
79 |
80 | # IPython
81 | profile_default/
82 | ipython_config.py
83 |
84 | # pyenv
85 | .python-version
86 |
87 | # pipenv
88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
91 | # install all needed dependencies.
92 | #Pipfile.lock
93 |
94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
95 | __pypackages__/
96 |
97 | # Celery stuff
98 | celerybeat-schedule
99 | celerybeat.pid
100 |
101 | # SageMath parsed files
102 | *.sage.py
103 |
104 | # Environments
105 | .env
106 | .venv
107 | env/
108 | venv/
109 | ENV/
110 | env.bak/
111 | venv.bak/
112 |
113 | # Spyder project settings
114 | .spyderproject
115 | .spyproject
116 |
117 | # Rope project settings
118 | .ropeproject
119 |
120 | # mkdocs documentation
121 | /site
122 |
123 | # mypy
124 | .mypy_cache/
125 | .dmypy.json
126 | dmypy.json
127 |
128 | # Pyre type checker
129 | .pyre/
130 | *.json
131 | import.py
132 | logs
133 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | # syntax=docker/dockerfile:1
2 |
3 | FROM python:3.9.0
4 |
5 | WORKDIR /app
6 |
7 | COPY requirements.txt requirements.txt
8 |
9 | RUN pip3 install -r requirements.txt
10 | COPY . .
11 |
12 | CMD ["python3", "-m", "flask", "run", "--host=0.0.0.0"]
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT No Attribution
2 |
3 | Copyright 2022 capjamesg
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy of this
6 | software and associated documentation files (the "Software"), to deal in the Software
7 | without restriction, including without limitation the rights to use, copy, modify,
8 | merge, publish, distribute, sublicense, and/or sell copies of the Software, and to
9 | permit persons to whom the Software is furnished to do so.
10 |
11 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
12 | INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
13 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
14 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
15 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
16 | SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
17 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Cinnamon Social Reader
2 |
3 | This repository contains the code that powers my personal Microsub social reader called Cinnamon.
4 |
5 | [Microsub](https://indieweb.org/Microsub) is an IndieWeb specification currently in development that separates the tasks of processing feeds and presenting feeds so that servers and feed readers can be developed independently but work together.
6 |
7 | The Microsub server currently supports:
8 |
9 | - Creating channels
10 | - Updating channel names
11 | - Changing the order of channels in a list
12 | - Deleting channels
13 | - Subscribing to a feed
14 | - Unsubscribing from a feed
15 | - Marking entries in a feed view as read
16 | - Creating a post via Micropub
17 | - Reacting to posts via Micropub
18 | - Among other features
19 |
20 | This project is in active development. The entire Microsub specification is not yet incorporated into this project.
21 |
22 | ## Screenshots
23 |
24 | ### Desktop
25 |
26 | 
27 |
28 | ### Mobile
29 |
30 | 
31 |
32 | ## Getting Started
33 |
34 | You can install and configure this Microsub server using Docker or manually.
35 |
36 | ### Docker Setup
37 |
38 | To set up this project with Docker, first install Docker on your local machine.
39 |
40 | Next, run the following command:
41 |
42 | docker build -t microsub .
43 |
44 | This will build the microsub image using the Dockerfile in the root directory of this project.
45 |
46 | Next, copy the config_example.py file into the config.py file and change the values to match your server:
47 |
48 | cp config_example.py config.py
49 |
50 | The Dockerfile automates the project setup process.
51 |
52 | Next, run:
53 |
54 | docker run microsub
55 |
56 | This will run the microsub server on port 5000.
57 |
58 | ### Manual Setup
59 |
60 | To use this Microsub server for yourself, please run the following command:
61 |
62 | pip3 install -r requirements.txt
63 |
64 | This command will install the dependencies you need to run the Microsub server.
65 |
66 | Next, copy the config_example.py file into the config.py file and change the values to match your server:
67 |
68 | cp config_example.py config.py
69 |
70 | Next, you need to set up the database for the server. You can do this using the following command:
71 |
72 | python3 seed.py
73 |
74 | Now that you have set up the database, you are ready to run the Microsub server.
75 |
76 | Execute this command to run the server:
77 |
78 | python3 wsgi.py
79 |
80 | ## File Definitions
81 |
82 | Here is the structure of this project:
83 |
84 | ── Dockerfile
85 | ├── LICENSE.md
86 | ├── README.md
87 | ├── actions # implementations of the actions defined in the Microsub specification
88 | │ ├── change_to_json.py
89 | │ ├── channels.py
90 | │ ├── following.py
91 | │ ├── preview.py
92 | │ ├── react.py
93 | │ ├── search.py
94 | │ ├── timeline.py
95 | │ └── user_ops.py
96 | ├── authentication # functions to handle authentication and authorization
97 | │ ├── auth.py
98 | │ └── check_token.py
99 | ├── client # views used to read and manage feeds
100 | │ └── client_views.py
101 | ├── config.py # configuration file required for the project to run
102 | ├── feeds # code to transform three different types of feed into a jf2 object, consumed by the server
103 | │ ├── hfeed.py
104 | │ ├── json_feed.py
105 | │ ├── read_later.py
106 | │ └── xml_feed.py
107 | ├── legacy # old code not currently in use
108 | │ └── dates.py
109 | ├── logs
110 | ├── main.py # the main microsub server that responds to queries at /microsub
111 | ├── poll_feeds.py
112 | ├── requirements.txt
113 | ├── requirements_dev.txt
114 | ├── seed.py
115 | ├── server # code that powers feed management and the back-end server
116 | │ ├── server_views.py
117 | │ └── websub.py
118 | ├── static # all static files used in the project
119 | │ ├── css
120 | │ │ └── styles.css
121 | │ ├── emojis.json
122 | │ ├── favicon.ico
123 | │ ├── icons
124 | │ ├── images
125 | │ │ └── wood.avif
126 | │ ├── js
127 | │ │ ├── editor.js # js to load the post editor form
128 | │ │ └── reader.js # js to enhance reading capabilities, including reactions
129 | │ ├── manifest.json
130 | │ └── robots.txt
131 | ├── templates # all the HTML templates for the project
132 | │ ├── 404.html
133 | │ ├── auth.html
134 | │ ├── base.html
135 | │ ├── client # HTML used by the client
136 | │ │ ├── discover.html
137 | │ │ ├── feed_item.html
138 | │ │ ├── preview.html
139 | │ │ ├── read_article.html
140 | │ │ ├── reader.html
141 | │ │ ├── search.html
142 | │ │ └── settings.html
143 | │ ├── index.html
144 | │ ├── server # HTML used by the server management client
145 | │ │ ├── dashboard.html
146 | │ │ └── following.html
147 | │ ├── setup.html
148 | │ └── show_error.html
149 | ├── tox.ini
150 | └── wsgi.py
151 |
152 | This tree was generated using the following command:
153 |
154 | tree -I '*.pyc|*.png|*.svg|*.log|__*'
155 |
156 | ## License
157 |
158 | This project is licensed under an [MIT 0 license](LICENSE).
159 |
160 | ## Acknowledgements
161 |
162 | The author of this project would like to thank the [Feather](https://github.com/feathericons/feather) open source icon set for creating an open repository of icons. This project uses a few icons from Feather in the mobile view.
163 |
164 | This project uses the "[Complete list of github markdown emoji markup](https://gist.github.com/rxaviers/7360908)" Gist for its emoji autocomplete dictionary.
165 |
166 | ## Maintainers
167 |
168 | - [capjamesg](https://github.com/capjamesg)
--------------------------------------------------------------------------------
/__init__.py:
--------------------------------------------------------------------------------
1 | import os
2 | from datetime import timedelta
3 |
4 | import requests
5 | from dateutil import parser
6 | from flask import Flask, render_template, request, send_from_directory, session
7 |
8 | from authentication.check_token import verify
9 | from config import SENTRY_DSN, SENTRY_SERVER_NAME
10 |
11 | # set up sentry for error handling
12 | if SENTRY_DSN != "":
13 | import sentry_sdk
14 | from sentry_sdk.integrations.flask import FlaskIntegration
15 |
16 | sentry_sdk.init(
17 | dsn=SENTRY_DSN,
18 | integrations=[FlaskIntegration()],
19 | traces_sample_rate=1.0,
20 | server_name=SENTRY_SERVER_NAME,
21 | )
22 |
23 |
24 | def handle_error(request, session, error_code):
25 | auth_result = verify(request.headers, session)
26 |
27 | if auth_result:
28 | headers = {"Authorization": session["access_token"]}
29 |
30 | channel_req = requests.get(
31 | session.get("server_url") + "?action=channels", headers=headers
32 | )
33 |
34 | all_channels = channel_req.json()["channels"]
35 | else:
36 | all_channels = []
37 |
38 | template = "404.html"
39 |
40 | return (
41 | render_template(
42 | template, title="Error", error=error_code, channels=all_channels
43 | ),
44 | 500,
45 | )
46 |
47 |
48 | def create_app():
49 | app = Flask(__name__)
50 |
51 | app.config["SECRET_KEY"] = os.urandom(32)
52 | app.config["SQLALCHEMY_DATABASE_URI"] = "sqlite:///microsub.db"
53 | app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
54 |
55 | # read config.py file
56 | app.config.from_pyfile(os.path.join(".", "config.py"), silent=False)
57 |
58 | # set maximum lifetime for session
59 | app.permanent_session_lifetime = timedelta(days=120)
60 |
61 | # blueprint for non-auth parts of app
62 | from server.main import main as main_blueprint
63 |
64 | app.register_blueprint(main_blueprint)
65 |
66 | from client.client_views import client as client_blueprint
67 |
68 | app.register_blueprint(client_blueprint)
69 |
70 | from authentication.auth import auth as auth_blueprint
71 |
72 | app.register_blueprint(auth_blueprint)
73 |
74 | from server.websub import websub as websub_blueprint
75 |
76 | app.register_blueprint(websub_blueprint)
77 |
78 | from server.server_views import server_views as server_views_blueprint
79 |
80 | app.register_blueprint(server_views_blueprint)
81 |
82 | # filter used to parse dates
83 | # source: https://stackoverflow.com/questions/4830535/how-do-i-format-a-date-in-jinja2
84 | @app.template_filter("strftime")
85 | def _jinja2_filter_datetime(date, fmt=None):
86 | date = parser.parse(date)
87 | native = date.replace(tzinfo=None)
88 | format = "%b %d, %Y"
89 | return native.strftime(format)
90 |
91 | @app.errorhandler(404)
92 | def page_not_found(e):
93 | return handle_error(request, session, 400)
94 |
95 | @app.errorhandler(405)
96 | def method_not_allowed(e):
97 | return handle_error(request, session, 405)
98 |
99 | @app.errorhandler(500)
100 | def server_error():
101 | handle_error(request, session, 500)
102 |
103 | @app.route("/robots.txt")
104 | def robots():
105 | return send_from_directory(app.static_folder, "robots.txt")
106 |
107 | @app.route("/favicon.ico")
108 | def favicon():
109 | return send_from_directory(app.static_folder, "favicon.ico")
110 |
111 | @app.route("/emojis.json")
112 | def emojis():
113 | return send_from_directory("static", "emojis.json")
114 |
115 | @app.route("/manifest.json")
116 | def web_app_manifest():
117 | return send_from_directory("static", "manifest.json")
118 |
119 | @app.route("/assets/")
120 | def assets(path):
121 | return send_from_directory("assets", path)
122 |
123 | # from werkzeug.middleware.profiler import ProfilerMiddleware
124 | # app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[5], profile_dir='./profile')
125 |
126 | return app
127 |
128 |
129 | create_app()
130 |
--------------------------------------------------------------------------------
/actions/change_to_json.py:
--------------------------------------------------------------------------------
1 | def change_to_json(database_result):
2 | columns = [column[0] for column in database_result.description]
3 |
4 | result = [dict(zip(columns, row)) for row in database_result]
5 |
6 | return result
7 |
--------------------------------------------------------------------------------
/actions/channels.py:
--------------------------------------------------------------------------------
1 | import random
2 | import sqlite3
3 | import string
4 |
5 | from flask import jsonify, request
6 |
7 | from .change_to_json import change_to_json
8 |
9 |
10 | def get_channels() -> dict:
11 | connection = sqlite3.connect("microsub.db")
12 |
13 | with connection:
14 | cursor = connection.cursor()
15 |
16 | cursor.execute("SELECT uid, channel FROM channels ORDER BY position ASC;")
17 |
18 | result = change_to_json(cursor)
19 |
20 | final_result = []
21 |
22 | total_unread = 0
23 |
24 | for r in result:
25 | get_unread = cursor.execute(
26 | "SELECT COUNT(*) FROM timeline WHERE channel = ? AND read_status = 'unread';",
27 | (r["uid"],),
28 | ).fetchone()
29 | r["unread"] = get_unread[0]
30 | r["name"] = r["channel"]
31 | total_unread += r["unread"]
32 | final_result.append(r)
33 | del r["channel"]
34 |
35 | # add "all" as a special value
36 | # used to show every post stored in the server
37 | final_result.insert(0, {"uid": "all", "name": "All", "unread": total_unread})
38 |
39 | return jsonify({"channels": final_result}), 200
40 |
41 |
42 | def create_channel(request: request) -> dict:
43 | connection = sqlite3.connect("microsub.db")
44 |
45 | with connection:
46 | cursor = connection.cursor()
47 | three_random_letters = "".join(
48 | random.choice(string.ascii_lowercase) for _ in range(3)
49 | )
50 | # check if name taken
51 | cursor.execute(
52 | "SELECT * FROM channels WHERE channel = ?", (request.args.get("name"),)
53 | )
54 |
55 | if cursor.fetchone():
56 | return jsonify({"error": "This channel name has been taken."}), 400
57 |
58 | existing_channels = cursor.execute(
59 | "SELECT position FROM channels ORDER BY position DESC LIMIT 1"
60 | ).fetchone()
61 |
62 | if existing_channels and len(existing_channels) > 0:
63 | last_position = int(existing_channels[0])
64 | else:
65 | last_position = 0
66 |
67 | cursor.execute(
68 | "INSERT INTO channels VALUES(?, ?, ?)",
69 | (
70 | request.form.get("name"),
71 | request.form.get("name").lower() + three_random_letters,
72 | last_position + 1,
73 | ),
74 | )
75 |
76 | all_channels = cursor.execute(
77 | "SELECT * FROM channels ORDER BY position ASC"
78 | ).fetchall()
79 |
80 | return jsonify(all_channels), 200
81 |
82 |
83 | def update_channel(request: request) -> dict:
84 | connection = sqlite3.connect("microsub.db")
85 |
86 | with connection:
87 | cursor = connection.cursor()
88 | cursor.execute(
89 | "UPDATE channels SET channel = ? WHERE uid = ?",
90 | (request.form.get("name"), request.form.get("channel")),
91 | )
92 |
93 | get_updated_channel = cursor.execute(
94 | "SELECT * FROM channels WHERE uid = ?", (request.form.get("channel"),)
95 | ).fetchone()
96 |
97 | return get_updated_channel
98 |
99 |
100 | def delete_channel(request: request) -> dict:
101 | connection = sqlite3.connect("microsub.db")
102 |
103 | with connection:
104 | cursor = connection.cursor()
105 |
106 | get_channel = cursor.execute(
107 | "SELECT * FROM channels WHERE uid = ?", (request.form.get("channel"),)
108 | ).fetchone()
109 |
110 | if get_channel:
111 | cursor.execute(
112 | "DELETE FROM channels WHERE uid = ?", (request.form.get("channel"),)
113 | )
114 |
115 | # get_channel[0] is the deleted channel name
116 | return jsonify({"channel": get_channel[0]}), 200
117 | else:
118 | return jsonify({"error": "channel not found"}), 400
119 |
120 |
121 | def reorder_channels(request: request) -> dict:
122 | connection = sqlite3.connect("microsub.db")
123 |
124 | if len(request.form.getlist("channels")) == 2:
125 | with connection:
126 | cursor = connection.cursor()
127 | position_for_first = cursor.execute(
128 | "SELECT position FROM channels WHERE uid = ?",
129 | (request.form.getlist("channels")[0],),
130 | ).fetchone()
131 | position_for_second = cursor.execute(
132 | "SELECT position FROM channels WHERE uid = ?",
133 | (request.form.getlist("channels")[1],),
134 | ).fetchone()
135 | cursor.execute(
136 | "UPDATE channels SET position = ? WHERE uid = ?",
137 | (position_for_second[0], request.form.getlist("channels")[0]),
138 | )
139 | cursor.execute(
140 | "UPDATE channels SET position = ? WHERE uid = ?",
141 | (position_for_first[0], request.form.getlist("channels")[1]),
142 | )
143 |
144 | return {"type": "reorder"}
145 |
146 | with connection:
147 | cursor = connection.cursor()
148 | cursor.execute("DELETE FROM channels")
149 |
150 | position = 1
151 |
152 | for channel in request.form.getlist("channels"):
153 | cursor.execute(
154 | "INSERT INTO channels VALUES(?, ?, ?)",
155 | (
156 | channel["name"],
157 | channel["name"].lower(),
158 | position,
159 | ),
160 | )
161 |
162 | position += 1
163 |
164 | return {"type": "reorder_channels"}
165 |
--------------------------------------------------------------------------------
/actions/following.py:
--------------------------------------------------------------------------------
1 | import random
2 | import sqlite3
3 | import string
4 |
5 | import indieweb_utils
6 | import requests
7 | from bs4 import BeautifulSoup
8 | from flask import jsonify, request
9 | from urllib.parse import urlparse as parse_url
10 |
11 | from config import CLIENT_ID
12 |
13 |
14 | def get_follow(channel: str) -> dict:
15 | connection = sqlite3.connect("microsub.db")
16 |
17 | if not channel:
18 | return jsonify({}), 200
19 |
20 | with connection:
21 | cursor = connection.cursor()
22 | if channel == "all":
23 | results = cursor.execute(
24 | "SELECT * FROM following ORDER BY id DESC;"
25 | ).fetchall()
26 | else:
27 | results = cursor.execute(
28 | "SELECT * FROM following WHERE channel = ? ORDER by id DESC;",
29 | (channel,),
30 | ).fetchall()
31 |
32 | results = [
33 | {"type": "feed", "url": r[1], "photo": r[3], "name": r[4]} for r in results
34 | ]
35 |
36 | final_result = {"items": results}
37 |
38 | return jsonify(final_result), 200
39 |
40 |
41 | def create_follow(request: request) -> dict:
42 | connection = sqlite3.connect("microsub.db")
43 |
44 | with connection:
45 | cursor = connection.cursor()
46 | print(request.form)
47 |
48 | url = request.form.get("url").strip()
49 |
50 | # check if following
51 | cursor.execute(
52 | "SELECT * FROM following WHERE channel = ? AND url = ?",
53 | (request.form.get("channel"), url),
54 | )
55 |
56 | if cursor.fetchone():
57 | return (
58 | jsonify(
59 | {
60 | "error": f"You are already following this feed in the {request.form.get('channel')} channel."
61 | }
62 | ),
63 | 400,
64 | )
65 | title = url
66 | favicon = ""
67 |
68 | home_page_request = requests.get(indieweb_utils.canonicalize_url(url, url)).text
69 |
70 | home_page = BeautifulSoup(home_page_request, "lxml")
71 |
72 | if home_page.find("title"):
73 | title = home_page.find("title").text
74 |
75 | # "" empty string is etag which will be populated in poll_feeds.py if available
76 | last_id = cursor.execute("SELECT MAX(id) FROM following").fetchone()
77 |
78 | if last_id and last_id[0] is not None:
79 | last_id = int(last_id[0]) + 1
80 | else:
81 | last_id = 1
82 |
83 | favicon = get_feed_icon(home_page, url)
84 |
85 | # set cadence to hourly by default
86 | cursor.execute(
87 | "INSERT INTO following VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?)",
88 | (
89 | request.form.get("channel"),
90 | url,
91 | "",
92 | favicon,
93 | title,
94 | last_id,
95 | 0,
96 | 0,
97 | "hourly",
98 | ),
99 | )
100 |
101 | subscribe_to_websub_hub(request, home_page, url, cursor)
102 |
103 | return {"type": "feed", "url": url}
104 |
105 |
106 | def get_feed_icon(home_page: BeautifulSoup, url: str) -> str:
107 | favicon = home_page.find("link", rel="shortcut icon")
108 |
109 | url_domain = parse_url(url).netloc
110 |
111 | if favicon:
112 | favicon = indieweb_utils.canonicalize_url(favicon.get("href"), url_domain, url)
113 | else:
114 | favicon = ""
115 |
116 | if favicon == "":
117 | favicon = home_page.find("link", rel="icon")
118 |
119 | if favicon:
120 | favicon = indieweb_utils.canonicalize_url(
121 | favicon.get("href"), url_domain, url
122 | )
123 |
124 | if favicon:
125 | try:
126 | r = requests.get(favicon)
127 |
128 | if r.status_code != 200:
129 | favicon = ""
130 | except requests.exceptions.RequestException:
131 | favicon = ""
132 |
133 | if not favicon or favicon == "":
134 | favicon = "/static/image/gradient.png"
135 |
136 | return favicon
137 |
138 |
139 | def subscribe_to_websub_hub(
140 | request: request, soup: BeautifulSoup, url: str, cursor: sqlite3.Cursor
141 | ) -> dict:
142 | # discover websub_hub
143 |
144 | # check link headers for websub hub
145 |
146 | link_header = request.headers.get("link")
147 |
148 | hub = None
149 |
150 | if link_header:
151 | # parse link header
152 | parsed_links = requests.utils.parse_header_links(
153 | link_header.rstrip(">").replace(">,<", ",<")
154 | )
155 |
156 | for link in parsed_links:
157 | if "rel" in link and "hub" in link["rel"]:
158 | hub = link["url"]
159 | break
160 |
161 | if hub is None:
162 | hub_link_tags = soup.find_all("link", rel="hub")
163 |
164 | if len(hub_link_tags) > 0:
165 | hub = hub_link_tags[0].get("href")
166 |
167 | if hub is None:
168 | return
169 |
170 | random_string = "".join(random.choice(string.ascii_lowercase) for _ in range(10))
171 |
172 | requests.post(
173 | hub,
174 | data={
175 | "hub.mode": "subscribe",
176 | "hub.topic": url,
177 | "hub.callback": CLIENT_ID.strip("/") + "/websub_callback",
178 | },
179 | )
180 |
181 | cursor.execute(
182 | "INSERT INTO websub_subscriptions VALUES (?, ?, ?, ?);",
183 | (url, random_string, request.form.get("channel"), 1),
184 | )
185 |
186 |
187 | def unfollow(request: request) -> dict:
188 | connection = sqlite3.connect("microsub.db")
189 |
190 | with connection:
191 | cursor = connection.cursor()
192 | cursor.execute(
193 | "DELETE FROM following WHERE url = ? AND channel = ?",
194 | (
195 | request.form.get("url"),
196 | request.form.get("channel"),
197 | ),
198 | )
199 |
200 | return {"type": "unfollow"}
201 |
--------------------------------------------------------------------------------
/actions/preview.py:
--------------------------------------------------------------------------------
1 | import feedparser
2 | import indieweb_utils
3 | import mf2py
4 | import requests
5 | from bs4 import BeautifulSoup
6 | from flask import jsonify, request
7 |
8 | from feeds import hfeed, json_feed, xml_feed
9 | from urllib.parse import urlparse as parse_url
10 |
11 |
12 | def process_h_feed_preview(
13 | r: requests.Request, items_to_return: list, url: str
14 | ) -> list:
15 | print(r.headers)
16 | parsed = mf2py.parse(r.text)
17 |
18 | h_card = None
19 |
20 | for item in parsed["items"]:
21 | if "type" in item and item["type"] == "h-card":
22 | h_card = item
23 |
24 | for item in parsed["items"]:
25 | if "type" in item and item["type"][0] == "h-feed":
26 | for entry in item["children"]:
27 | if entry["type"][0] == "h-entry":
28 | result = hfeed.process_hfeed(entry, h_card, "", url, "")
29 |
30 | items_to_return.append(result)
31 | elif "type" in item and item["type"][0] == "h-entry":
32 | result = hfeed.process_hfeed(item, h_card, "", url, "")
33 |
34 | items_to_return.append(result)
35 |
36 | content_type = "h-feed"
37 |
38 | return items_to_return, content_type
39 |
40 |
41 | def get_preview_items(content_type: str, url: str, r: requests.Request) -> list:
42 | items_to_return = []
43 |
44 | if "xml" in content_type or ".xml" in url:
45 | feed = feedparser.parse(url)
46 |
47 | print(url)
48 |
49 | for entry in feed.entries:
50 | result, _ = xml_feed.process_xml_feed(entry, feed, url)
51 |
52 | items_to_return.append(result)
53 | elif "json" in content_type or url.endswith(".json"):
54 | try:
55 | feed = requests.get(url, timeout=5).json()
56 | except requests.exceptions.RequestException:
57 | return jsonify({"error": "invalid url"}), 400
58 |
59 | for entry in feed.get("items", []):
60 | result, _ = json_feed.process_json_feed(entry, feed)
61 |
62 | items_to_return.append(result)
63 | else:
64 | items_to_return, content_type = process_h_feed_preview(r, items_to_return, url)
65 |
66 | return items_to_return, content_type
67 |
68 |
69 | def preview(request: request) -> dict:
70 | url = request.form.get("url")
71 |
72 | # get content type of url
73 | try:
74 | r = requests.head(url)
75 | except requests.exceptions.RequestException:
76 | return jsonify({"error": "invalid url"}), 400
77 |
78 | soup = BeautifulSoup(r.text, "lxml")
79 |
80 | if r.headers.get("content-type"):
81 | content_type = r.headers["content-type"]
82 | else:
83 | content_type = ""
84 |
85 | items_to_return, content_type = get_preview_items(content_type, url, r)
86 |
87 | feed = {"url": url, "feed_type": content_type}
88 |
89 | # get homepage favicon
90 | parsed_url = parse_url(url)
91 | url_protocol = parsed_url.scheme
92 | url_domain = parsed_url.netloc
93 |
94 | url_to_check = url_protocol + "://" + url_domain
95 |
96 | soup = BeautifulSoup(requests.get(url_to_check).text, "lxml")
97 |
98 | favicon = soup.find("link", rel="shortcut icon")
99 |
100 | if favicon:
101 | feed["icon"] = indieweb_utils.canonicalize_url(
102 | favicon.get("href"), url_domain, favicon.get("href")
103 | )
104 |
105 | if soup.find("title"):
106 | feed["title"] = soup.find("title").text
107 |
108 | result = {"feed": feed, "items": items_to_return}
109 |
110 | return jsonify(result), 200
111 |
--------------------------------------------------------------------------------
/actions/react.py:
--------------------------------------------------------------------------------
1 | import json
2 | import sqlite3
3 |
4 | from flask import request
5 |
6 |
7 | def react(request: request) -> dict:
8 | connection = sqlite3.connect("microsub.db")
9 |
10 | uid = request.form.get("uid")
11 | reaction_type = request.form.get("reaction")
12 | reaction_url = request.form.get("url")
13 |
14 | with connection:
15 | cursor = connection.cursor()
16 |
17 | timeline_item = cursor.execute(
18 | "SELECT * FROM timeline WHERE uid = ?", (uid,)
19 | ).fetchone()
20 |
21 | jf2 = json.loads(timeline_item[1])
22 |
23 | if not jf2.get("reactions"):
24 | jf2["reactions"] = {}
25 |
26 | if not jf2["reactions"].get("replies"):
27 | jf2["reactions"]["replies"] = []
28 |
29 | if request.form.get("content"):
30 | jf2["reactions"]["replies"] = [
31 | {"content": request.form.get("content"), "url": reaction_url}
32 | ]
33 | else:
34 | jf2["reactions"][reaction_type] = ""
35 |
36 | cursor.execute(
37 | "UPDATE timeline SET jf2 = ? WHERE uid = ?", (json.dumps(jf2), uid)
38 | )
39 |
40 | return {"type": "success"}, 200
41 |
42 |
43 | def mark_as_read(request: request) -> dict:
44 | connection = sqlite3.connect("microsub.db")
45 |
46 | read_status = request.form.get("method")
47 |
48 | if read_status == "mark_read":
49 | read = "read"
50 | else:
51 | read = "unread"
52 |
53 | with connection:
54 | cursor = connection.cursor()
55 |
56 | if request.form.get("channel") == "all":
57 | # set all items in the timeline to read other than notifications
58 |
59 | notification_channel = cursor.execute(
60 | "SELECT uid FROM channels WHERE position = 1;"
61 | ).fetchone()[0]
62 |
63 | cursor.execute(
64 | "UPDATE timeline SET read_status = ? WHERE channel != ?",
65 | (
66 | read,
67 | notification_channel,
68 | ),
69 | )
70 |
71 | if request.form.getlist("entry[]"):
72 | for entry in request.form.getlist("entry[]"):
73 | cursor.execute(
74 | "UPDATE timeline SET read_status = ? WHERE uid = ?",
75 | (
76 | read,
77 | entry,
78 | ),
79 | )
80 |
81 | elif request.form.get("entry"):
82 | cursor.execute(
83 | "UPDATE timeline SET read_status = ? WHERE channel = ?",
84 | (
85 | read,
86 | request.form.get("channel"),
87 | ),
88 | )
89 |
90 | get_item = cursor.execute(
91 | "SELECT date, channel FROM timeline WHERE uid = ?;",
92 | (request.form.get("last_read_entry"),),
93 | ).fetchone()
94 | cursor.execute(
95 | "UPDATE timeline SET read_status = ? WHERE date <= ? AND channel = ?",
96 | (read, get_item[0], get_item[1]),
97 | )
98 |
99 | return {"type": "mark_as_read"}
100 |
--------------------------------------------------------------------------------
/actions/search.py:
--------------------------------------------------------------------------------
1 | import json
2 | import sqlite3
3 |
4 | import requests
5 | from flask import jsonify, request
6 |
7 |
8 | def search_for_content(request: request) -> dict:
9 | channel = request.form.get("channel")
10 | query = request.form.get("query")
11 |
12 | connection = sqlite3.connect("microsub.db")
13 |
14 | with connection:
15 | cursor = connection.cursor()
16 |
17 | if channel == "all":
18 | result = cursor.execute(
19 | "SELECT jf2 FROM timeline WHERE jf2 LIKE ? ORDER BY date DESC;",
20 | (f"%{query}%",),
21 | ).fetchall()
22 | else:
23 | result = cursor.execute(
24 | "SELECT jf2 FROM timeline WHERE jf2 LIKE ? AND channel = ? ORDER BY date DESC;",
25 | (f"%{query}%", channel),
26 | ).fetchall()
27 |
28 | items = [[json.loads(item[1]), item[3], item[5]] for item in result]
29 |
30 | return jsonify({"items": items})
31 |
32 |
33 | def search_for_feeds(request: request) -> dict:
34 | query = request.form.get("query").strip()
35 |
36 | search_url = (
37 | f"https://indieweb-search.jamesg.blog/results?query=discover {query}&format=jf2"
38 | )
39 |
40 | r = requests.get(search_url)
41 |
42 | if r.status_code == 200:
43 | return jsonify({"items": r.json()})
44 | else:
45 | return jsonify({"items": []})
46 |
--------------------------------------------------------------------------------
/actions/timeline.py:
--------------------------------------------------------------------------------
1 | import json
2 | import sqlite3
3 |
4 | from flask import jsonify, request
5 |
6 | from .change_to_json import change_to_json
7 |
8 |
9 | def get_timeline(request: request) -> dict:
10 | channel = request.args.get("channel")
11 | after = request.args.get("after")
12 | before = request.args.get("before")
13 |
14 | connection = sqlite3.connect("microsub.db")
15 |
16 | with connection:
17 | cursor = connection.cursor()
18 |
19 | channel_arg = "channel = ? AND"
20 | second_channel_arg = "channel = ? AND"
21 | channel_tuple = (
22 | channel,
23 | channel,
24 | )
25 |
26 | if channel == "all":
27 | channel_arg = ""
28 | second_channel_arg = ""
29 | channel_tuple = ()
30 |
31 | if not after and not before:
32 | item_list = cursor.execute(
33 | f"""SELECT * FROM timeline WHERE {channel_arg} {second_channel_arg} hidden = 0 AND
34 | feed_id IN (SELECT id FROM following WHERE muted = 0 AND blocked = 0)
35 | ORDER BY date DESC, id DESC LIMIT 21;""",
36 | channel_tuple,
37 | ).fetchall()
38 | elif before and not after:
39 | item_list = cursor.execute(
40 | f"""SELECT * FROM timeline WHERE {channel_arg} {second_channel_arg} hidden = 0 AND
41 | id < ? AND feed_id IN (SELECT id FROM following WHERE muted = 0 AND blocked = 0)
42 | ORDER BY date DESC, id DESC LIMIT 21;""",
43 | channel_tuple + (int(before),),
44 | ).fetchall()
45 | else:
46 | item_list = cursor.execute(
47 | f"""SELECT * FROM timeline WHERE {channel_arg} {second_channel_arg} hidden = 0 AND
48 | id > ? AND feed_id IN (SELECT id FROM following WHERE muted = 0 AND blocked = 0)
49 | ORDER BY date DESC, id DESC LIMIT 21;""",
50 | channel_tuple + (int(after),),
51 | ).fetchall()
52 |
53 | items = [[json.loads(item[1]), item[3], item[5]] for item in item_list]
54 |
55 | for i in items:
56 | if i[1] == "unread":
57 | i[0]["_is_read"] = False
58 | else:
59 | i[0]["_is_read"] = True
60 |
61 | i[0]["_id"] = i[2]
62 |
63 | items = [i[0] for i in items]
64 |
65 | if (
66 | len(item_list) > 20
67 | and not request.args.get("after")
68 | and not request.args.get("before")
69 | ):
70 | # 8 = id
71 | before = item_list[-1][8]
72 | after = ""
73 | elif len(item_list) <= 21 and len(item_list) != 0:
74 | before = item_list[0][8]
75 | after = item_list[-1][8]
76 | else:
77 | before = ""
78 | after = ""
79 |
80 | return jsonify({"items": items, "paging": {"before": before, "after": after}}), 200
81 |
82 |
83 | def get_post(request: request) -> dict:
84 | connection = sqlite3.connect("microsub.db")
85 |
86 | with connection:
87 | cursor = connection.cursor()
88 |
89 | cursor.execute(
90 | "SELECT * FROM timeline WHERE uid = ?", (request.args.get("id"),)
91 | )
92 |
93 | return jsonify({"post": change_to_json(cursor)}), 200
94 |
95 |
96 | def remove_entry(request: request) -> dict:
97 | connection = sqlite3.connect("microsub.db")
98 |
99 | if request.form.getlist("entry[]"):
100 | for entry in request.form.getlist("entry[]"):
101 | with connection:
102 | cursor = connection.cursor()
103 | cursor.execute("UPDATE timeline SET hidden = 1 WHERE uid = ?", (entry,))
104 |
105 | return {"type": "remove_entry"}
106 |
107 | else:
108 | with connection:
109 | cursor = connection.cursor()
110 |
111 | cursor.execute(
112 | "UPDATE timeline SET hidden = 1 WHERE uid = ?",
113 | (request.form.get("entry"),),
114 | )
115 |
116 | return {"type": "remove_entry"}
117 |
--------------------------------------------------------------------------------
/actions/user_ops.py:
--------------------------------------------------------------------------------
1 | import sqlite3
2 |
3 | from flask import jsonify, request
4 |
5 |
6 | def get_muted(request: request) -> dict:
7 | connection = sqlite3.connect("microsub.db")
8 |
9 | with connection:
10 | cursor = connection.cursor()
11 | cursor.execute(
12 | "SELECT * FROM following WHERE muted = 1 AND channel = ?",
13 | (request.args.get("channel"),),
14 | )
15 |
16 | return cursor.fetchall()
17 |
18 |
19 | def mute(request: request) -> dict:
20 | connection = sqlite3.connect("microsub.db")
21 |
22 | with connection:
23 | cursor = connection.cursor()
24 |
25 | cursor.execute(
26 | "UPDATE following SET muted = 1 WHERE url = ?", (request.form.get("url"),)
27 | )
28 |
29 | get_url = cursor.execute(
30 | "SELECT url FROM following WHERE url = ?", (request.form.get("url"),)
31 | ).fetchone()
32 |
33 | if get_url:
34 | return jsonify({"url": get_url[0], "type": "mute"}), 200
35 | else:
36 | return jsonify({"error": "You are not following this feed."}), 400
37 |
38 |
39 | def block(request: request) -> dict:
40 | connection = sqlite3.connect("microsub.db")
41 |
42 | with connection:
43 | cursor = connection.cursor()
44 |
45 | cursor.execute(
46 | "UPDATE following SET blocked = 1 WHERE url = ?", (request.form.get("url"),)
47 | )
48 |
49 | get_url = cursor.execute(
50 | "SELECT url FROM following WHERE url = ?", (request.form.get("url"),)
51 | ).fetchone()
52 |
53 | if get_url:
54 | return jsonify({"url": get_url[0], "type": "block"}), 200
55 | else:
56 | return jsonify({"error": "You are not following this feed."}), 400
57 |
58 |
59 | def unblock(request: request) -> dict:
60 | connection = sqlite3.connect("microsub.db")
61 |
62 | with connection:
63 | cursor = connection.cursor()
64 |
65 | cursor.execute(
66 | "UPDATE following SET blocked = 0 WHERE url = ?", (request.form.get("url"),)
67 | )
68 |
69 | get_url = cursor.execute(
70 | "SELECT url FROM following WHERE url = ?", (request.form.get("url"),)
71 | ).fetchone()
72 |
73 | if get_url:
74 | return jsonify({"url": get_url[0], "type": "unblock"}), 200
75 | else:
76 | return jsonify({"error": "You are not following this feed."}), 400
77 |
78 |
79 | def unmute(request: request) -> dict:
80 | connection = sqlite3.connect("microsub.db")
81 |
82 | with connection:
83 | cursor = connection.cursor()
84 |
85 | cursor.execute(
86 | "UPDATE following SET muted = 0 WHERE url = ?", (request.form.get("url"),)
87 | )
88 |
89 | get_url = cursor.execute(
90 | "SELECT url FROM following WHERE url = ?", (request.form.get("url"),)
91 | ).fetchone()
92 |
93 | if get_url:
94 | return jsonify({"url": get_url[0], "type": "unmute"}), 200
95 | else:
96 | return jsonify({"error": "You are not following this feed."}), 400
97 |
--------------------------------------------------------------------------------
/authentication/auth.py:
--------------------------------------------------------------------------------
1 | import base64
2 | import hashlib
3 | import random
4 | import string
5 | import requests
6 |
7 | import indieweb_utils
8 | from flask import Blueprint, flash, redirect, render_template, request, session
9 |
10 | from config import CALLBACK_URL, CLIENT_ID, ME
11 |
12 | auth = Blueprint("auth", __name__)
13 |
14 |
15 | @auth.route("/callback")
16 | def indieauth_callback_handler_view():
17 | code = request.args.get("code")
18 | state = request.args.get("state")
19 |
20 | # these are the scopes necessary for the application to run
21 | required_scopes = ["read", "channels"]
22 |
23 | message, response = indieweb_utils.indieauth_callback_handler(
24 | code,
25 | state,
26 | session.get("token_endpoint"),
27 | session["code_verifier"],
28 | session.get("state"),
29 | ME,
30 | CALLBACK_URL,
31 | CLIENT_ID,
32 | required_scopes,
33 | )
34 |
35 | if message is not None:
36 | flash(message)
37 | return redirect("/login")
38 |
39 | session.pop("code_verifier")
40 |
41 | session["me"] = response.get("me")
42 | session["access_token"] = response.get("access_token")
43 | session["scopes"] = response.get("scope", "")
44 |
45 | session.permanent = True
46 |
47 | # get media endpoint url
48 | try:
49 | req = requests.get(
50 | session.get("token_endpoint"),
51 | headers={"Authorization": "Bearer " + session.get("access_token")},
52 | )
53 | session["media_endpoint"] = req.json().get("media_endpoint")
54 | except requests.exceptions.RequestException:
55 | session["media_endpoint"] = None
56 |
57 | return redirect("/")
58 |
59 |
60 | @auth.route("/logout")
61 | def logout():
62 | session.pop("me")
63 | session.pop("access_token")
64 |
65 | return redirect("/login")
66 |
67 |
68 | @auth.route("/login", methods=["GET"])
69 | def login():
70 | return render_template("auth.html", title="Cinnamon Login")
71 |
72 |
73 | @auth.route("/discover", methods=["POST"])
74 | def discover_auth_endpoint():
75 | domain = request.form.get("me")
76 |
77 | headers_to_find = [
78 | "authorization_endpoint",
79 | "token_endpoint",
80 | "micropub",
81 | "microsub",
82 | ]
83 |
84 | headers = indieweb_utils.discover_endpoints(domain, headers_to_find)
85 |
86 | if not headers.get("authorization_endpoint"):
87 | flash(
88 | "A valid IndieAuth authorization endpoint could not be found on your website."
89 | )
90 | return redirect("/login")
91 |
92 | if not headers.get("token_endpoint"):
93 | flash("A valid IndieAuth token endpoint could not be found on your website.")
94 | return redirect("/login")
95 |
96 | authorization_endpoint = headers.get("authorization_endpoint")
97 | token_endpoint = headers.get("token_endpoint")
98 |
99 | session["micropub_url"] = headers.get("micropub")
100 | session["server_url"] = headers.get("microsub")
101 |
102 | random_code = "".join(
103 | random.choice(string.ascii_uppercase + string.digits) for _ in range(30)
104 | )
105 |
106 | session["code_verifier"] = random_code
107 | session["authorization_endpoint"] = authorization_endpoint
108 | session["token_endpoint"] = token_endpoint
109 |
110 | sha256_code = hashlib.sha256(random_code.encode("utf-8")).hexdigest()
111 |
112 | code_challenge = base64.b64encode(sha256_code.encode("utf-8")).decode("utf-8")
113 |
114 | state = "".join(
115 | random.choice(string.ascii_uppercase + string.digits) for _ in range(10)
116 | )
117 |
118 | session["state"] = state
119 |
120 | return redirect(
121 | authorization_endpoint
122 | + "?client_id="
123 | + CLIENT_ID
124 | + "&redirect_uri="
125 | + CALLBACK_URL
126 | + "&scope=read follow mute block channels create&response_type=code&code_challenge="
127 | + code_challenge
128 | + "&code_challenge_method=S256&state="
129 | + state
130 | )
131 |
--------------------------------------------------------------------------------
/authentication/check_token.py:
--------------------------------------------------------------------------------
1 | import requests
2 | from flask import session
3 |
4 | from config import ME
5 |
6 |
7 | def verify(headers, session):
8 | if headers.get("Authorization") is not None:
9 | access_token = headers.get("Authorization").split(" ")[-1]
10 | elif session.get("access_token"):
11 | access_token = session.get("access_token")
12 | else:
13 | return False
14 |
15 | # request = requests.get(
16 | # session.get("token_endpoint"), headers={"Authorization": "Bearer " + access_token}
17 | # )
18 |
19 | # if request.status_code != 200 or (
20 | # request.json().get("me") and request.json()["me"].strip("/") != ME.strip("/")
21 | # ):
22 | # return False
23 |
24 | return True
25 |
--------------------------------------------------------------------------------
/config_example.py:
--------------------------------------------------------------------------------
1 | CLIENT_ID = "https://example.com" # url at which you will host your server
2 | CALLBACK_URL = CLIENT_ID + "/callback"
3 | ME = "https://example.com" # your domain name
4 |
5 | SECRET_KEY = "" # set this to a long, random string
6 |
7 | PROJECT_DIRECTORY = "/home/username/" # the root directory of the project
8 |
9 | SERVER_API_WEBHOOK = False # whether or not to use the server API webhook
10 | WEBHOOK_CHANNEL = (
11 | "channel_name" # the channel to which new posts should be sent via a webhook
12 | )
13 | WEBHOOK_TOKEN = "auth_token" # the auth token to be sent in an Authorization header with the webhook
14 |
15 | SENTRY_DSN = "sentry_url" # your sentry logging URL (if you want to log with Sentry)
16 | SENTRY_SERVER_NAME = (
17 | "Microsub Client and Server" # the name of your server for use in Sentry
18 | )
19 |
20 | TWITTER_BEARER_TOKEN = "" # used to generate reply contexts in the post editor
21 |
--------------------------------------------------------------------------------
/dates.py:
--------------------------------------------------------------------------------
1 | import datetime
2 |
3 | # get average of dates
4 | # not currently in use but may be used later
5 |
6 |
7 | def find_poll_cadence(dates):
8 | average_interval = []
9 |
10 | for d in range(len(dates)):
11 | if d == 0:
12 | last_date = dates[d]
13 | else:
14 | last_date = dates[d - 1]
15 |
16 | current_date = dates[d]
17 | # convert to datetime
18 | current_date = datetime.datetime.strptime(current_date, "%Y%m%d")
19 | last_date = datetime.datetime.strptime(last_date, "%Y%m%d")
20 |
21 | day_delta = (current_date - last_date).days * 24
22 | hour_delta = (current_date - last_date).seconds // 3600
23 |
24 | average_interval.append(day_delta + hour_delta)
25 |
26 | if len(average_interval[:5]) > 0 and sum(average_interval[:5]) > 0:
27 | last_five_average = sum(average_interval[:5]) / len(average_interval[:5])
28 | else:
29 | last_five_average = 24
30 |
31 | if last_five_average < 24:
32 | update_cadence = "hourly"
33 | else:
34 | update_cadence = "daily"
35 |
36 | return update_cadence
37 |
--------------------------------------------------------------------------------
/feeds/clean.py:
--------------------------------------------------------------------------------
1 | from bs4 import BeautifulSoup
2 |
3 | def clean_html_from_entry(text):
4 | content = BeautifulSoup(text, "lxml").get_text(
5 | separator="\n"
6 | )
7 |
8 | # only allow p tags, a tags, divs, sections, and hrs
9 | soup = BeautifulSoup(content, "lxml")
10 |
11 | for tag in soup.find_all(reject=["p", "a", "div", "section", "hr"]):
12 | tag.extract()
13 |
14 | return soup.get_text(separator="\n")
--------------------------------------------------------------------------------
/feeds/hfeed.py:
--------------------------------------------------------------------------------
1 | import datetime
2 | import json
3 | import random
4 | import string
5 |
6 | import indieweb_utils
7 | from .clean import clean_html_from_entry
8 | from dateutil.parser import parse
9 | from urllib.parse import urlparse as parse_url
10 |
11 |
12 | def process_hfeed_author(
13 | jf2: dict, url: str, child: dict, hcard: dict, feed_title: str, feed_icon: str
14 | ) -> dict:
15 | domain_name = parse_url(url).netloc
16 |
17 | if hcard:
18 | jf2["author"] = {
19 | "type": "card",
20 | "name": hcard[0]["properties"]["name"][0],
21 | "url": indieweb_utils.canonicalize_url(
22 | hcard[0]["properties"]["url"][0],
23 | domain_name,
24 | child["properties"]["url"][0],
25 | ),
26 | }
27 |
28 | if hcard[0]["properties"].get("photo"):
29 | jf2["photo"] = indieweb_utils.canonicalize_url(
30 | hcard[0]["properties"]["photo"][0],
31 | domain_name,
32 | child["properties"]["url"][0],
33 | )
34 |
35 | elif child["properties"].get("author") is not None and isinstance(
36 | child["properties"].get("author"), dict
37 | ):
38 | if type(child["properties"].get("author")[0]["properties"]) == str:
39 | h_card = [{"properties": {"name": child["properties"].get("author")[0]}}]
40 | elif child["properties"].get("author")[0]["properties"].get("url"):
41 | h_card = indieweb_utils.discover_author(
42 | child["properties"].get("author")[0]["properties"].get("url")[0]
43 | )
44 | else:
45 | h_card = []
46 |
47 | if h_card and len(h_card) > 0:
48 | jf2["author"] = {
49 | "type": "card",
50 | "name": h_card["properties"]["name"][0],
51 | "url": indieweb_utils.canonicalize_url(
52 | h_card["properties"]["url"][0],
53 | domain_name,
54 | child["properties"]["url"][0],
55 | ),
56 | }
57 |
58 | if h_card["properties"].get("photo"):
59 | jf2["photo"] = indieweb_utils.canonicalize_url(
60 | h_card["properties"]["photo"][0],
61 | domain_name,
62 | child["properties"]["url"][0],
63 | )
64 | elif feed_title is not None:
65 | jf2["author"] = {
66 | "type": "card",
67 | "name": feed_title,
68 | "url": indieweb_utils.canonicalize_url(
69 | url, domain_name, child["properties"]["url"][0]
70 | ),
71 | }
72 |
73 | if feed_icon is not None:
74 | jf2["author"]["photo"] = feed_icon
75 |
76 | return jf2
77 |
78 |
79 | def get_name_and_content(child: dict, jf2: dict, url: str) -> dict:
80 | if child["properties"].get("name"):
81 | jf2["title"] = child["properties"].get("name")[0]
82 | elif jf2.get("author") and jf2["author"]["name"]:
83 | jf2["title"] = f"Post by {jf2['author']['name']}"
84 | else:
85 | jf2["title"] = f"Post by {url.split('/')[2]}"
86 |
87 | if child["properties"].get("content"):
88 | jf2["content"] = {
89 | "html": clean_html_from_entry(child["properties"].get("content")[0]["html"]),
90 | "text": child["properties"].get("content")[0]["value"],
91 | }
92 | elif child["properties"].get("summary"):
93 | jf2["content"] = {
94 | "text": clean_html_from_entry(child["properties"].get("summary")[0]),
95 | "html": child["properties"].get("summary")[0],
96 | }
97 |
98 | return jf2
99 |
100 |
101 | def process_hfeed(
102 | child, hcard, channel_uid, url, feed_id, feed_title=None, feed_icon=None
103 | ):
104 | parsed_url = parse_url(url)
105 | domain_name = parsed_url.netloc
106 |
107 | if not child.get("properties") or not child["properties"].get("url"):
108 | return {}
109 |
110 | jf2 = {
111 | "url": indieweb_utils.canonicalize_url(
112 | child["properties"]["url"][0],
113 | domain_name,
114 | child["properties"]["url"][0],
115 | ),
116 | }
117 |
118 | if child["properties"].get("content"):
119 | jf2["type"] = indieweb_utils.get_post_type(child)
120 | else:
121 | jf2["type"] = "article"
122 |
123 | jf2 = process_hfeed_author(jf2, url, child, hcard, feed_title, feed_icon)
124 |
125 | if child["properties"].get("photo"):
126 | jf2["photo"] = indieweb_utils.canonicalize_url(
127 | child["properties"].get("photo")[0],
128 | domain_name,
129 | child["properties"]["url"][0],
130 | )
131 |
132 | if child["properties"].get("video"):
133 | video_url = indieweb_utils.canonicalize_url(
134 | child["properties"].get("video")[0],
135 | domain_name,
136 | child["properties"]["url"][0],
137 | )
138 | jf2["video"] = [{"content_type": "", "url": video_url}]
139 |
140 | if child["properties"].get("category"):
141 | jf2["category"] = child["properties"].get("category")[0]
142 |
143 | jf2 = get_name_and_content(child, jf2, url)
144 |
145 | wm_properties = ["in-reply-to", "like-of", "bookmark-of", "repost-of"]
146 |
147 | for w in wm_properties:
148 | if child["properties"].get(w):
149 | jf2[w] = child["properties"].get(w)[0]
150 |
151 | if child.get("published"):
152 | parse_date = parse(child["published"][0])
153 |
154 | if parse_date:
155 | month_with_padded_zero = str(parse_date.month).zfill(2)
156 | day_with_padded_zero = str(parse_date.day).zfill(2)
157 | date = f"{parse_date.year}{month_with_padded_zero}{day_with_padded_zero}"
158 | else:
159 | month_with_padded_zero = str(datetime.datetime.now().month).zfill(2)
160 | day_with_padded_zero = str(datetime.datetime.now().day).zfill(2)
161 | date = f"{datetime.datetime.now().year}{month_with_padded_zero}{day_with_padded_zero}"
162 | else:
163 | date = datetime.datetime.now().strftime("%Y%m%d")
164 |
165 | ten_random_letters = "".join(
166 | random.choice(string.ascii_lowercase) for _ in range(10)
167 | )
168 |
169 | jf2["published"] = date
170 |
171 | record = {
172 | "channel_uid": channel_uid,
173 | "result": json.dumps(jf2),
174 | "published": date,
175 | "unread": "unread",
176 | "url": jf2["url"],
177 | "uid": ten_random_letters,
178 | "hidden": 0,
179 | "feed_id": feed_id,
180 | "etag": "",
181 | "feed_url": url,
182 | }
183 |
184 | with open("feed_items.json", "a+") as file:
185 | file.write(json.dumps(record) + "\n")
186 |
187 | return jf2
188 |
--------------------------------------------------------------------------------
/feeds/json_feed.py:
--------------------------------------------------------------------------------
1 | import datetime
2 |
3 | import indieweb_utils
4 | from bs4 import BeautifulSoup
5 | from dateutil.parser import parse
6 | from urllib.parse import urlparse as parse_url
7 | from .clean import clean_html_from_entry
8 |
9 |
10 | def process_json_feed_author(item: dict, feed: dict, result: dict) -> dict:
11 | domain_name = parse_url(item.get("url")).netloc
12 |
13 | if feed.get("author") and not item.get("author"):
14 | result["author"] = {"type": "card", "name": feed.get("author").get("name")}
15 | if feed.get("home_page_url"):
16 | result["author"]["url"] = indieweb_utils.canonicalize_url(
17 | feed.get("home_page_url"),
18 | domain_name,
19 | feed.get("home_page_url"),
20 | )
21 | else:
22 | result["author"]["url"] = indieweb_utils.canonicalize_url(
23 | feed.get("feed_url"),
24 | domain_name,
25 | feed.get("feed_url"),
26 | )
27 | elif item.get("author") is not None and item["author"].get("url"):
28 | author_url_domain = parse_url(item["author"].get("url")).netloc
29 |
30 | result["author"] = {
31 | "type": "card",
32 | "name": item.get("author").get("name"),
33 | "url": indieweb_utils.canonicalize_url(
34 | item["author"].get("url"),
35 | author_url_domain,
36 | item["author"].get("url"),
37 | ),
38 | }
39 |
40 | if item["author"].get("avatar"):
41 | result["author"]["photo"] = item["author"].get("avatar")
42 | else:
43 | author_url_domain = parse_url(item["author"].get("url")).netloc
44 |
45 | result["author"] = {
46 | "type": "card",
47 | "name": feed.get("title"),
48 | "url": indieweb_utils.canonicalize_url(
49 | item["author"].get("url"),
50 | author_url_domain,
51 | item["author"].get("url"),
52 | ),
53 | }
54 |
55 | return result
56 |
57 |
58 | def process_attachments(item: dict, result: dict) -> dict:
59 | for i in item.get("attachments"):
60 | if "audio" in i.get("mime_type"):
61 | result["audio"] = [
62 | {"content_type": i.get("mime_type"), "url": i.get("url")}
63 | ]
64 | break
65 | elif "video" in i.get("mime_type"):
66 | result["video"] = [
67 | {"content_type": i.get("mime_type"), "url": i.get("url")}
68 | ]
69 | break
70 |
71 | return result
72 |
73 |
74 | def process_json_feed(item: dict, feed: dict) -> dict:
75 | parsed_url = parse_url(item.get("url"))
76 | result = {
77 | "type": "entry",
78 | "url": indieweb_utils.canonicalize_url(
79 | item.get("url"), parsed_url.netloc, item.get("url")
80 | ),
81 | }
82 |
83 | if item.get("image"):
84 | result["photo"] = item.get("image")
85 |
86 | result = process_json_feed_author(item, feed, result)
87 |
88 | # get audio or video attachment
89 | # only collect one because clients will only be expected to render one attachment
90 | if item.get("attachments"):
91 | result = process_attachments(item, result)
92 |
93 | if item.get("published"):
94 | parse_date = parse(item["published"])
95 |
96 | if parse_date:
97 | month_with_padded_zero = str(parse_date.month).zfill(2)
98 | day_with_padded_zero = str(parse_date.day).zfill(2)
99 | date = f"{parse_date.year}{month_with_padded_zero}{day_with_padded_zero}"
100 | else:
101 | month_with_padded_zero = str(datetime.datetime.now().month).zfill(2)
102 | day_with_padded_zero = str(datetime.datetime.now().day).zfill(2)
103 | date = f"{datetime.datetime.now().year}{month_with_padded_zero}{day_with_padded_zero}"
104 | else:
105 | date = datetime.datetime.now().strftime("%Y%m%d")
106 |
107 | result["published"] = date
108 |
109 | if item.get("content_html"):
110 | result["content"] = {}
111 | result["content"]["text"] = clean_html_from_entry(item.get("content_html"))
112 | result["content"]["html"] = item.get("content_html")
113 |
114 | if item.get("title"):
115 | result["title"] = item.get("title")
116 | else:
117 | result[
118 | "title"
119 | ] = f"Post by {result['author'].get('name', item.get('url').split('/')[2])}"
120 |
121 | if item.get("url"):
122 | result["url"] = item.get("url")
123 |
124 | if item.get("post_type"):
125 | result["post-type"] = indieweb_utils.get_post_type(item)
126 |
127 | return result, date
128 |
--------------------------------------------------------------------------------
/feeds/read_later.py:
--------------------------------------------------------------------------------
1 | import datetime
2 | import json
3 | import sqlite3
4 |
5 | import indieweb_utils
6 | import requests
7 | from bs4 import BeautifulSoup
8 | from urllib.parse import urlparse as parse_url
9 | from .clean import clean_html_from_entry
10 |
11 |
12 | def save_read_later_to_database(record: dict) -> None:
13 | database = sqlite3.connect("microsub.db")
14 |
15 | with database:
16 | cursor = database.cursor()
17 |
18 | last_id = cursor.execute("SELECT MAX(id) FROM timeline;").fetchone()
19 |
20 | if last_id[0] is not None:
21 | last_id = last_id[0] + 1
22 | else:
23 | last_id = 0
24 |
25 | last_id += 1
26 |
27 | feed_id = cursor.execute(
28 | "SELECT id FROM following WHERE channel = 'read-later';"
29 | ).fetchone()[0]
30 |
31 | cursor.execute(
32 | """INSERT INTO timeline VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?);""",
33 | (
34 | "read-later",
35 | json.dumps(record["result"]),
36 | record["result"]["published"],
37 | 0,
38 | record["result"]["url"],
39 | record["result"]["url"],
40 | 0,
41 | feed_id,
42 | last_id,
43 | ),
44 | )
45 |
46 |
47 | def get_read_later_photo(record: dict, soup: BeautifulSoup, url: str) -> dict:
48 | # we will remove header and nav tags so that we are more likely to find a "featured image" for the post
49 | # remove tags
50 | parsed_url = parse_url(url)
51 |
52 | for header in soup.find_all("header"):
53 | header.decompose()
54 |
55 | # remove tags
56 | for nav in soup.find_all("nav"):
57 | nav.decompose()
58 |
59 | # get all images
60 | all_images = soup.find_all("img")
61 |
62 | if all_images and len(all_images) > 0 and all_images[0].get("src"):
63 | all_images = [i for i in all_images if "u-photo" not in i.get("class", [])]
64 |
65 | if len(all_images) > 0:
66 | record["photo"] = indieweb_utils.canonicalize_url(
67 | all_images[0]["src"], parsed_url.netloc, all_images[0]["src"]
68 | )
69 |
70 |
71 | def read_later(url: str) -> None:
72 | """
73 | Processes a URL and saves it to the Microsub timeline.
74 |
75 | :param url: The URL to process.
76 | :type url: str
77 | :return: None
78 | :rtype: None
79 | """
80 | parsed_url = parse_url(url)
81 |
82 | try:
83 | r = requests.get(url, timeout=5, allow_redirects=True)
84 | except requests.exceptions.RequestException:
85 | return None
86 |
87 | if r.status_code != 200:
88 | return None
89 |
90 | soup = BeautifulSoup(r.text, "lxml")
91 |
92 | content = ""
93 |
94 | if soup.find(".h-entry"):
95 | content = soup.find(".h-entry").get_text(separator="\n")
96 | elif soup.find("article"):
97 | content = soup.find("article").get_text(separator="\n")
98 | else:
99 | content = clean_html_from_entry(soup)
100 |
101 | date = datetime.datetime.now().strftime("%Y%m%d")
102 |
103 | record = {
104 | "result": {
105 | "url": url,
106 | "type": "summary",
107 | "content": {"text": content, "html": content},
108 | "title": soup.title.text,
109 | "published": date,
110 | }
111 | }
112 |
113 | # get og_image tag
114 | og_image = soup.find("meta", property="og:image")
115 |
116 | if og_image:
117 | record["photo"] = indieweb_utils.canonicalize_url(
118 | og_image["content"], parsed_url.netloc, og_image["content"]
119 | )
120 |
121 | if not record.get("photo"):
122 | record = get_read_later_photo(record, soup, url)
123 |
124 | save_read_later_to_database(record)
125 |
--------------------------------------------------------------------------------
/feeds/xml_feed.py:
--------------------------------------------------------------------------------
1 | import datetime
2 | from unicodedata import name
3 |
4 | import indieweb_utils
5 | import requests
6 | from bs4 import BeautifulSoup
7 | from urllib.parse import urlparse as parse_url
8 | from .clean import clean_html_from_entry
9 |
10 |
11 | def get_published_date(entry: dict) -> str:
12 | if entry.get("published_parsed"):
13 | month_with_padded_zero = str(entry.published_parsed.tm_mon).zfill(2)
14 | day_with_padded_zero = str(entry.published_parsed.tm_mday).zfill(2)
15 | published = str(entry.published_parsed.tm_year)
16 | elif entry.get("updated_parsed"):
17 | month_with_padded_zero = str(entry.updated_parsed.tm_mon).zfill(2)
18 | day_with_padded_zero = str(entry.updated_parsed.tm_mday).zfill(2)
19 | published = str(entry.updated_parsed.tm_year)
20 | else:
21 | month_with_padded_zero = str(datetime.datetime.now().month).zfill(2)
22 | day_with_padded_zero = str(datetime.datetime.now().day).zfill(2)
23 |
24 | published = str(datetime.datetime.now().year)
25 |
26 | hour_minute_second = (
27 | str(datetime.datetime.now().hour).zfill(2)
28 | + ":"
29 | + str(datetime.datetime.now().minute).zfill(2)
30 | + ":"
31 | + str(datetime.datetime.now().second).zfill(2)
32 | )
33 |
34 | published += month_with_padded_zero
35 | published += day_with_padded_zero
36 | published += "T" + hour_minute_second
37 |
38 | return published
39 |
40 |
41 | def get_content(entry: dict) -> dict:
42 | if entry.get("content"):
43 | content = {
44 | "text": clean_html_from_entry(entry.content[0].value),
45 | "html": entry.content[0].value,
46 | }
47 | elif entry.get("summary"):
48 | content = {"text": clean_html_from_entry(entry.summary), "html": entry.summary}
49 | elif entry.get("description"):
50 | content = {"text": clean_html_from_entry(entry.description), "html": entry.description}
51 | elif entry.get("title") and entry.get("link"):
52 | # get feed author
53 | content = {
54 | "text": entry.get("title"),
55 | "html": "" + entry.get("title") + " ",
56 | }
57 | elif entry.get("title") and not entry.get("link"):
58 | # get feed author
59 | content = {
60 | "text": entry.get("title"),
61 | "html": entry.get("title"),
62 | }
63 | else:
64 | content = {}
65 |
66 | if content == {} and soup.find("meta", property="description"):
67 | content = {
68 | "text": soup.find("meta", property="description")["content"],
69 | "html": soup.find("meta", property="description")["content"],
70 | }
71 | elif content == {} and soup.find("meta", property="og:description"):
72 | content = {
73 | "text": soup.find("meta", property="og:description")["content"],
74 | "html": soup.find("meta", property="og:description")["content"],
75 | }
76 |
77 | return content
78 |
79 |
80 | def process_media_content(entry: dict, result: dict, link: str) -> dict:
81 | if entry.get("links"):
82 | for link in entry.get("links"):
83 | if link.get("type") and "video" in link.get("type") and link.get("href"):
84 | result["video"] = [
85 | {"content_type": link.get("type"), "url": link.get("href")}
86 | ]
87 | break
88 | elif link.get("type") and "audio" in link.get("type") and link.get("href"):
89 | result["audio"] = [
90 | {"content_type": link.get("type"), "url": link.get("href")}
91 | ]
92 | break
93 |
94 | for media in entry.get("media_content"):
95 | if media.get("url") is None:
96 | continue
97 |
98 | parsed_url = parse_url(media.get("url"))
99 |
100 | # get domain name
101 | domain = parsed_url.netloc
102 |
103 | if domain == "youtube.com":
104 | new_url = media["url"].replace("/v/", "/embed/")
105 | media["url"] = new_url
106 |
107 | if (
108 | media.get("type")
109 | and (
110 | "video" in media.get("type") or "x-shockwave-flash" in media.get("type")
111 | )
112 | and media.get("url")
113 | ):
114 | result["video"] = [
115 | {"content_type": media.get("type"), "url": media.get("url")}
116 | ]
117 | break
118 | elif media.get("type") and "audio" in link.get("type") and media.get("url"):
119 | result["audio"] = [
120 | {"content_type": media.get("type"), "url": media.get("url")}
121 | ]
122 | break
123 |
124 | return result
125 |
126 |
127 | def get_featured_photo(result: dict, url: str, parse_post: BeautifulSoup) -> dict:
128 | # we will remove header and nav tags so that we are more likely to find a "featured image" for the post
129 | # remove tags
130 | parsed_url = parse_url(url)
131 | for header in parse_post.find_all("header"):
132 | header.decompose()
133 |
134 | # remove tags
135 | for nav in parse_post.find_all("nav"):
136 | nav.decompose()
137 |
138 | # get all images
139 | all_images = parse_post.find_all("img")
140 |
141 | if all_images and len(all_images) > 0 and all_images[0].get("src"):
142 | all_images = [i for i in all_images if "u-photo" not in i.get("class", [])]
143 |
144 | if len(all_images) > 0:
145 | if all_images[0].get("src"):
146 | result["photo"] = indieweb_utils.canonicalize_url(
147 | all_images[0]["src"], parsed_url.netloc, all_images[0]["src"]
148 | )
149 |
150 | return result
151 |
152 |
153 | def process_xml_feed(entry: dict, feed: str, url: str) -> dict:
154 | """
155 | Processes an entry from an XML feed and turns it into a jf2 object.
156 |
157 | :param entry: The entry to process.
158 | :type entry: dict
159 | :param feed: The feed the entry came from.
160 | :type feed: str
161 | :param url: The URL of the feed.
162 | :type url: str
163 | :return: The processed entry.
164 | :rtype: dict
165 | """
166 | parsed_url = parse_url(url)
167 |
168 | if not entry or not entry.get("link"):
169 | return None, None
170 |
171 | if entry.get("author"):
172 | author = {"type": "card", "name": entry.author, "url": entry.author_detail}
173 | elif feed.get("author"):
174 | author = {
175 | "type": "card",
176 | "name": feed.feed.author,
177 | "url": feed.feed.author_detail,
178 | }
179 | else:
180 | author = {
181 | "type": "card",
182 | "name": feed.feed.get("title"),
183 | "url": feed.feed.get("link"),
184 | }
185 |
186 | content = get_content(entry)
187 |
188 | published = get_published_date(entry)
189 |
190 | result = {
191 | "type": "entry",
192 | "author": author,
193 | "published": published,
194 | "content": content,
195 | "post-type": "entry",
196 | "title": "",
197 | "url": entry.link,
198 | }
199 |
200 | if entry.get("title"):
201 | result["title"] = entry.title
202 | else:
203 | result["title"] = f"Post by {author.get('name', url.split('/')[2])}"
204 |
205 | published = published.split("T")[0]
206 |
207 | if not entry.get("media_content"):
208 | return result, published
209 |
210 | result = process_media_content(entry, result, published)
211 |
212 | return result, published
213 |
--------------------------------------------------------------------------------
/main.py:
--------------------------------------------------------------------------------
1 | import requests
2 | from flask import (
3 | Blueprint,
4 | abort,
5 | flash,
6 | jsonify,
7 | redirect,
8 | render_template,
9 | request,
10 | session,
11 | )
12 |
13 | from actions.channels import (
14 | create_channel,
15 | delete_channel,
16 | get_channels,
17 | reorder_channels,
18 | update_channel,
19 | )
20 | from actions.following import create_follow, get_follow, unfollow
21 | from actions.preview import preview
22 | from actions.react import mark_as_read, react
23 | from actions.search import search_for_content, search_for_feeds
24 | from actions.timeline import get_post, get_timeline, remove_entry
25 | from actions.user_ops import block, get_muted, mute, unblock, unmute
26 | from authentication.check_token import verify as check_token
27 |
28 | main = Blueprint("main", __name__, template_folder="templates")
29 |
30 |
31 | def process_get_request(request: request, action: str, identifier: str, channel: str):
32 | if action == "timeline" and not identifier:
33 | return get_timeline(request)
34 | elif action == "timeline" and identifier:
35 | return get_post(request)
36 | elif action == "follow":
37 | return get_follow(channel, request)
38 | elif action == "mute":
39 | return get_muted(request)
40 | elif action == "channels":
41 | return get_channels(request)
42 | elif action == "search" and not channel:
43 | return search_for_feeds(request)
44 | elif action == "search" and channel:
45 | return search_for_content(request)
46 |
47 |
48 | def process_channels(request: request, method: str):
49 | if request.form.get("name") and request.form.get("channel"):
50 | return update_channel(request)
51 |
52 | if request.form.get("channels") and method == "order":
53 | return reorder_channels(request)
54 |
55 | if method == "delete":
56 | return delete_channel()
57 |
58 | return create_channel()
59 |
60 |
61 | def process_post_user_actions(request: request, action: str):
62 | if action == "follow":
63 | return create_follow(request)
64 | elif action == "unfollow":
65 | return unfollow(request)
66 | elif action == "block":
67 | return block(request)
68 | elif action == "unblock":
69 | return unblock(request)
70 | elif action == "mute":
71 | return mute(request)
72 | elif action == "unmute":
73 | return unmute(request)
74 |
75 |
76 | def process_post_request(request: request, action: str, method: str):
77 | if action == "timeline" and method == "remove":
78 | return remove_entry(request)
79 | elif action == "timeline":
80 | return mark_as_read(request)
81 | elif action == "preview":
82 | return preview(request)
83 | elif action == "react":
84 | return react(request)
85 | elif action == "channels":
86 | process_channels(request, method)
87 |
88 | process_post_user_actions(request, action)
89 |
90 |
91 | def microsub_api_request(post_data, success_message):
92 | request = requests.post(
93 | session.get("server_url"),
94 | data=post_data,
95 | headers={"Authorization": "Bearer " + session["access_token"]},
96 | )
97 |
98 | if request.status_code == 200:
99 | flash(success_message)
100 | else:
101 | flash(request.json()["error"])
102 |
103 |
104 | @main.route("/")
105 | def index():
106 | is_authenticated = check_token(request.headers, session)
107 |
108 | if is_authenticated:
109 | return redirect("/reader/all")
110 |
111 | return render_template("index.html", title="Home", channels=[])
112 |
113 |
114 | @main.route("/setup")
115 | def setup():
116 | return render_template("setup.html", title="Setup", channels=[])
117 |
118 |
119 | @main.route("/endpoint", methods=["GET", "POST"])
120 | def home():
121 | if request.form:
122 | action = request.form.get("action")
123 | method = request.form.get("method")
124 | channel = request.form.get("channel")
125 | identifier = request.form.get("id")
126 | else:
127 | action = request.args.get("action")
128 | method = request.args.get("method")
129 | channel = request.args.get("channel")
130 | identifier = request.args.get("id")
131 |
132 | is_authenticated = check_token(request.headers, session)
133 |
134 | if not is_authenticated:
135 | return abort(403)
136 |
137 | if not action:
138 | return jsonify({"error": "No action specified."}), 400
139 |
140 | if request.method == "GET":
141 | process_get_request(request, action, identifier, channel)
142 | elif request.method == "POST":
143 | process_post_request(request, action, method)
144 |
145 | return (
146 | jsonify(
147 | {
148 | "error": "invalid_request",
149 | "error_description": "The action and method provided are not valid.",
150 | }
151 | ),
152 | 400,
153 | )
154 |
155 |
156 | if __name__ == "__main__":
157 | main.run()
158 |
--------------------------------------------------------------------------------
/poll_feeds.py:
--------------------------------------------------------------------------------
1 | import concurrent.futures
2 | import datetime
3 | import json
4 | import logging
5 | import os
6 | import random
7 | import sqlite3
8 | import string
9 |
10 | import feedparser
11 | import mf2py
12 | import requests
13 | from dates import find_poll_cadence
14 |
15 | from config import PROJECT_DIRECTORY, WEBHOOK_CHANNEL, WEBHOOK_TOKEN, WEBHOOK_URL
16 | from feeds import hfeed, json_feed, xml_feed
17 |
18 | logging.basicConfig(
19 | filename=f"logs/{datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')}.log",
20 | datefmt="%Y-%m-%d %H:%M:%S",
21 | )
22 |
23 | print(
24 | f"Printing logs to logs/{datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')}.log"
25 | )
26 |
27 | poll_cadences = []
28 |
29 | # delete feed_items.json file so old records are not added to db again
30 | if os.path.isfile(PROJECT_DIRECTORY.rstrip("/") + "/feed_items.json"):
31 | os.remove(PROJECT_DIRECTORY.rstrip("/") + "/feed_items.json")
32 |
33 |
34 | def handle_xml_feed(channel_uid: str, url: str, feed_id: str, etag: str) -> None:
35 | try:
36 | feed = requests.get(url, timeout=20)
37 | except requests.exceptions.RequestException:
38 | return
39 |
40 | feed = feedparser.parse(feed.text)
41 |
42 | print("entries found: " + str(len(feed.entries)))
43 |
44 | validate_entry_count(feed.entries, url, feed_id)
45 |
46 | dates = []
47 |
48 | for entry in feed.entries[:10]:
49 | result, published = xml_feed.process_xml_feed(entry, feed, url)
50 |
51 | if result == None:
52 | continue
53 |
54 | ten_random_letters = "".join(
55 | random.choice(string.ascii_lowercase) for _ in range(10)
56 | )
57 |
58 | record = {
59 | "channel_uid": channel_uid,
60 | "result": json.dumps(result),
61 | "published": published,
62 | "unread": "unread",
63 | "url": result["url"],
64 | "uid": ten_random_letters,
65 | "hidden": 0,
66 | "feed_id": feed_id,
67 | "etag": etag,
68 | "feed_url": url,
69 | }
70 |
71 | dates.append(published)
72 |
73 | with open(PROJECT_DIRECTORY + "/feed_items.json", "a+") as file:
74 | file.write(json.dumps(record) + "\n")
75 |
76 | poll_cadence = find_poll_cadence(dates)
77 |
78 | poll_cadences.append((poll_cadence, url))
79 |
80 |
81 | def handle_json_feed(
82 | channel_uid: str, url: str, feed_id: str, etag: str, s: list
83 | ) -> None:
84 | try:
85 | feed = requests.get(url, timeout=20)
86 | except requests.exceptions.RequestException:
87 | return
88 |
89 | if feed.status_code != 200:
90 | return
91 |
92 | # get etag header
93 | etag = feed.headers.get("etag", "")
94 |
95 | if etag != "" and etag == s[2]:
96 | print(f"{url} has not changed since last poll, skipping")
97 | return
98 |
99 | feed = feed.json()
100 |
101 | dates = []
102 |
103 | print("entries found: " + str(len(feed.get("items", []))))
104 |
105 | validate_entry_count(feed.get("items", []), url, feed_id)
106 |
107 | for entry in feed.get("items", []):
108 | result, published = json_feed.process_json_feed(entry, feed)
109 |
110 | if result is None:
111 | continue
112 |
113 | ten_random_letters = "".join(
114 | random.choice(string.ascii_lowercase) for _ in range(10)
115 | )
116 |
117 | record = {
118 | "channel_uid": channel_uid,
119 | "result": json.dumps(result),
120 | "published": published,
121 | "unread": "unread",
122 | "url": result["url"],
123 | "uid": ten_random_letters,
124 | "hidden": 0,
125 | "feed_id": feed_id,
126 | "etag": etag,
127 | "feed_url": url,
128 | }
129 |
130 | with open(PROJECT_DIRECTORY + "/feed_items.json", "a+") as file:
131 | file.write(json.dumps(record) + "\n")
132 |
133 | dates.append(published)
134 |
135 | poll_cadence = find_poll_cadence(dates)
136 |
137 | poll_cadences.append((poll_cadence, url))
138 |
139 |
140 | def handle_hfeed(channel_uid, url, feed_id, etag):
141 | session = requests.Session()
142 | session.max_redirects = 2
143 |
144 | accept_headers = {
145 | "Accept": "text/html",
146 | }
147 |
148 | try:
149 | r = session.get(url, allow_redirects=True, timeout=10, headers=accept_headers)
150 | except requests.exceptions.RequestException:
151 | return None
152 |
153 | mf2_raw = mf2py.parse(r.text)
154 |
155 | hcard = [item for item in mf2_raw["items"] if item["type"][0] == "h-card"]
156 |
157 | h_feed = [
158 | item
159 | for item in mf2_raw["items"]
160 | if item["type"] and item["type"][0] == "h-feed"
161 | ]
162 |
163 | feed_title = None
164 | feed_icon = None
165 |
166 | dates = []
167 |
168 | if len(h_feed) > 0 and h_feed[0].get("children"):
169 | feed = h_feed[0]["children"]
170 | feed_title = h_feed[0]["properties"].get("name")
171 |
172 | if feed_title:
173 | feed_title = feed_title[0]
174 |
175 | feed_icon = h_feed[0]["properties"].get("icon")
176 |
177 | if feed_icon:
178 | feed_icon = feed_icon[0]
179 | feed = [item for item in feed if item["type"] == ["h-entry"]]
180 | else:
181 | # get all non h-card items
182 | # this will let the program parse non h-entry feeds such as h-event feeds
183 | feed = [
184 | item
185 | for item in mf2_raw["items"]
186 | if item["type"] and item["type"][0] != ["h-card"]
187 | ]
188 |
189 | if len(feed) == 0:
190 | return None
191 |
192 | print("entries found: " + str(len(feed)))
193 |
194 | validate_entry_count(feed, url, feed_id)
195 |
196 | for child in feed[:10]:
197 | result = hfeed.process_hfeed(
198 | child, hcard, channel_uid, url, feed_id, feed_title
199 | )
200 |
201 | if result == {}:
202 | continue
203 |
204 | ten_random_letters = "".join(
205 | random.choice(string.ascii_lowercase) for _ in range(10)
206 | )
207 |
208 | record = {
209 | "channel_uid": channel_uid,
210 | "result": json.dumps(result),
211 | "published": result["published"],
212 | "unread": "unread",
213 | "url": result["url"],
214 | "uid": ten_random_letters,
215 | "hidden": 0,
216 | "feed_id": feed_id,
217 | "etag": etag,
218 | "feed_url": url,
219 | }
220 |
221 | with open(PROJECT_DIRECTORY + "/feed_items.json", "a+") as file:
222 | file.write(json.dumps(record) + "\n")
223 |
224 | dates.append(result["published"])
225 |
226 | poll_cadence = find_poll_cadence(dates)
227 |
228 | poll_cadences.append((poll_cadence, url))
229 |
230 |
231 | def validate_entry_count(entries, feed_url, feed_id):
232 | length = len(entries)
233 |
234 | if length < 3:
235 | published = datetime.datetime.now().strftime("%Y%m%d")
236 |
237 | message = f"""{feed_url} feed does not have any posts.
238 | Please check that the feed URL is working correctly."""
239 |
240 | jf2 = {
241 | "type": "entry",
242 | "content": {
243 | "text": message,
244 | "html": message,
245 | },
246 | "published": published,
247 | "title": f"{feed_url} feed does not have any posts",
248 | "url": "https://webmention.jamesg.blog",
249 | "wm-property": "article",
250 | }
251 |
252 | ten_random_letters = "".join(
253 | random.choice(string.ascii_lowercase) for _ in range(10)
254 | )
255 |
256 | record = {
257 | "channel_uid": "notifications",
258 | "result": json.dumps(jf2),
259 | "published": published,
260 | "unread": "unread",
261 | "url": jf2["url"],
262 | "uid": ten_random_letters,
263 | "hidden": 0,
264 | "feed_id": feed_id,
265 | "etag": "",
266 | "feed_url": feed_url,
267 | }
268 |
269 | with open(PROJECT_DIRECTORY + "/feed_items.json", "a+") as file:
270 | file.write(json.dumps(record) + "\n")
271 |
272 |
273 | def extract_feed_items(s, url, channel_uid, feed_id):
274 | session = requests.Session()
275 | session.max_redirects = 2
276 |
277 | headers = {
278 | "If-None-Match": s[2],
279 | "If-Modified-Since": s[4],
280 | }
281 |
282 | try:
283 | r = session.head(url, allow_redirects=True, timeout=10, headers=headers)
284 | except requests.exceptions.RequestException:
285 | return [None, None]
286 |
287 | if r.status_code == 304:
288 | # nothing has changed, escape
289 | return [None, None]
290 |
291 | # get content type of url
292 | if r.headers.get("content-type"):
293 | content_type = r.headers["content-type"]
294 | else:
295 | content_type = ""
296 |
297 | # # get etag of url
298 | if r.headers.get("etag"):
299 | etag = r.headers["etag"]
300 | else:
301 | etag = ""
302 |
303 | if etag != "" and etag == s[2]:
304 | print(f"{url} has not changed since last poll, skipping")
305 | return [None, None]
306 |
307 | # get last modified date of url
308 | if r.headers.get("last-modified"):
309 | last_modified = r.headers["last-modified"]
310 | else:
311 | last_modified = ""
312 |
313 | if last_modified != "" and datetime.datetime.strptime(
314 | last_modified, "%a, %d %b %Y %H:%M:%S %Z"
315 | ) < datetime.datetime.now() - datetime.timedelta(hours=12):
316 | print(f"{url} has not been modified in the last 12 hours, skipping")
317 | return [None, None]
318 |
319 | if "xml" in content_type or content_type == "binary/octet-stream":
320 | handle_xml_feed(channel_uid, url, feed_id, etag)
321 | elif "application/json" in content_type:
322 | handle_json_feed(channel_uid, url, feed_id, etag, s)
323 | else:
324 | handle_hfeed(channel_uid, url, feed_id, etag)
325 |
326 | return r.headers.get("Last-Modified", ""), feed_id
327 |
328 |
329 | def poll_feeds():
330 | connection = sqlite3.connect(PROJECT_DIRECTORY.rstrip("/") + "/microsub.db")
331 |
332 | with connection:
333 | cursor = connection.cursor()
334 |
335 | # don't poll feeds that have been blocked
336 | # see https://indieweb.org/Microsub-spec#Blocking
337 |
338 | subscriptions = cursor.execute(
339 | "SELECT url, channel, etag, id, poll_cadence FROM following WHERE blocked = 0"
340 | ).fetchall()
341 |
342 | with concurrent.futures.ThreadPoolExecutor(max_workers=15) as executor:
343 | channel_uids = []
344 | tasks = []
345 |
346 | for s in subscriptions:
347 | if s[0] is None:
348 | continue
349 |
350 | url = s[0]
351 | feed_id = s[3]
352 |
353 | # get channel uid
354 | try:
355 | channel_uid = cursor.execute(
356 | "SELECT uid FROM channels WHERE uid = ?;", (s[1],)
357 | ).fetchone()
358 | if channel_uid:
359 | channel_uids.append(channel_uid[0])
360 | except Exception as e:
361 | print(e)
362 | print("channel uid not found")
363 | # continue
364 |
365 | tasks.append(
366 | executor.submit(extract_feed_items, s, url, channel_uid, feed_id)
367 | )
368 |
369 | for task in concurrent.futures.as_completed(tasks):
370 | try:
371 | modified_since, feed_item = task.result()
372 |
373 | if modified_since is not None:
374 | cursor.execute(
375 | "UPDATE following SET poll_cadence = ? WHERE id = ?;",
376 | (modified_since, feed_item),
377 | )
378 | except Exception as e:
379 | print(e)
380 | continue
381 |
382 | print("polled all subscriptions")
383 |
384 |
385 | def write_record_to_database(line, cursor, last_id):
386 | record = json.loads(line)
387 |
388 | print("Adding: " + record["url"])
389 |
390 | # check if url in db
391 | in_db = cursor.execute(
392 | "SELECT * FROM timeline WHERE url = ?", (record["url"],)
393 | ).fetchall()
394 |
395 | if len(in_db) > 0:
396 | return
397 |
398 | if type(record["channel_uid"]) == list:
399 | record["channel_uid"] = record["channel_uid"][0]
400 |
401 | if record["channel_uid"] == WEBHOOK_CHANNEL and WEBHOOK_TOKEN != "":
402 | record_jf2 = json.loads(record["result"])
403 | # send notification to calibot that a new post has been found
404 | data = {
405 | "message": "{} ({}) has been published in the {} channel.".format(
406 | record_jf2["title"],
407 | record_jf2["url"],
408 | record["channel_uid"],
409 | )
410 | }
411 |
412 | headers = {"Authorization": "Bearer " + WEBHOOK_TOKEN}
413 |
414 | try:
415 | requests.post(WEBHOOK_URL, data=data, headers=headers)
416 | except requests.exceptions.RequestException:
417 | print("error sending webhook")
418 |
419 | cursor.execute(
420 | """INSERT INTO timeline VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?);""",
421 | (
422 | record["channel_uid"],
423 | record["result"],
424 | record["published"],
425 | record["unread"],
426 | record["url"],
427 | record["uid"],
428 | record["hidden"],
429 | record["feed_id"],
430 | last_id,
431 | ),
432 | )
433 |
434 | last_id += 1
435 |
436 | # update following to add new etag so we can track modifications to a feed
437 | cursor.execute(
438 | "UPDATE following SET etag = ? WHERE url = ?;",
439 | (record["etag"], record["feed_url"]),
440 | )
441 |
442 |
443 | def add_feed_items_to_database():
444 | print("adding feed items to database")
445 |
446 | with open(PROJECT_DIRECTORY.rstrip("/") + "/feed_items.json", "r") as f:
447 | connection = sqlite3.connect(PROJECT_DIRECTORY.rstrip("/") + "/microsub.db")
448 |
449 | with connection:
450 | cursor = connection.cursor()
451 |
452 | for p in poll_cadences:
453 | cursor.execute(
454 | "UPDATE following SET poll_cadence = ? WHERE url = ?;", (p[0], p[1])
455 | )
456 |
457 | last_id = cursor.execute("SELECT MAX(id) FROM timeline;").fetchone()
458 |
459 | if last_id[0] is not None:
460 | last_id = last_id[0] + 1
461 | else:
462 | last_id = 0
463 |
464 | for line in f:
465 | write_record_to_database(line, cursor, last_id)
466 | last_id += 1
467 |
468 |
469 | if __name__ == "__main__":
470 | poll_feeds()
471 | add_feed_items_to_database()
472 |
473 | # remove feed items file after all items have been added to the database
474 | os.remove(PROJECT_DIRECTORY + "/feed_items.json")
475 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | beautifulsoup4==4.10.0
2 | blinker==1.4
3 | bs4==0.0.1
4 | certifi==2024.7.4
5 | charset-normalizer==2.0.10
6 | click==8.0.3
7 | feedparser==6.0.8
8 | Flask==2.3.2
9 | gunicorn==20.1.0
10 | html5lib==1.1
11 | idna==3.7
12 | indieweb-utils==0.1.2
13 | itsdangerous==2.0.1
14 | Jinja2==3.1.4
15 | MarkupSafe==2.0.1
16 | mf2py==1.1.2
17 | python-dateutil==2.8.2
18 | pytz==2021.3
19 | requests==2.32.0
20 | sentry-sdk==1.14.0
21 | sgmllib3k==1.0.0
22 | six==1.16.0
23 | soupsieve==2.3.1
24 | urllib3==1.26.19
25 | webencodings==0.5.1
26 | Werkzeug==3.0.3
27 |
--------------------------------------------------------------------------------
/requirements_dev.txt:
--------------------------------------------------------------------------------
1 | -r requirements.txt
2 |
3 | tox==3.24.5
4 | black==24.3.0
5 | isort==5.10.1
6 | pytest==6.2.5
7 | pytest-pythonpath==0.7.3
8 | mypy==0.931
9 | types-dataclasses==0.6.4
10 | types-requests==2.27.3
11 | typing-extensions==4.0.1
12 | flake8==4.0.1
13 | responses==0.17.0
--------------------------------------------------------------------------------
/screenshots/desktop.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/capjamesg/cinnamon/3dfb2c05dd118c3d7780e91fd05e543e996e7d32/screenshots/desktop.png
--------------------------------------------------------------------------------
/screenshots/feed.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/capjamesg/cinnamon/3dfb2c05dd118c3d7780e91fd05e543e996e7d32/screenshots/feed.png
--------------------------------------------------------------------------------
/screenshots/mobile.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/capjamesg/cinnamon/3dfb2c05dd118c3d7780e91fd05e543e996e7d32/screenshots/mobile.png
--------------------------------------------------------------------------------
/screenshots/screenshot.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/capjamesg/cinnamon/3dfb2c05dd118c3d7780e91fd05e543e996e7d32/screenshots/screenshot.png
--------------------------------------------------------------------------------
/seed.py:
--------------------------------------------------------------------------------
1 | import sqlite3
2 |
3 | connection = sqlite3.connect("microsub.db")
4 |
5 | with connection:
6 | cursor = connection.cursor()
7 |
8 | cursor.execute(
9 | """CREATE TABLE IF NOT EXISTS following(
10 | channel text,
11 | url text,
12 | etag text,
13 | photo text,
14 | name text,
15 | id integer primary key autoincrement,
16 | muted integer,
17 | blocked integer
18 | )
19 | """
20 | )
21 |
22 | cursor.execute(
23 | """CREATE TABLE IF NOT EXISTS channels(
24 | channel text,
25 | uid text,
26 | position text
27 | )
28 | """
29 | )
30 |
31 | cursor.execute(
32 | """CREATE TABLE IF NOT EXISTS timeline(
33 | channel text,
34 | jf2 text,
35 | date integer,
36 | read_status text,
37 | url text,
38 | uid text,
39 | hidden integer,
40 | feed_id integer,
41 | id integer primary key not null,
42 | poll_cadence text
43 | )
44 | """
45 | )
46 |
47 | cursor.execute(
48 | """CREATE TABLE IF NOT EXISTS websub_subscriptions(
49 | url text,
50 | uid text,
51 | channel text,
52 | approved integer
53 | )
54 | """
55 | )
56 |
57 | print("microsub.db has been seeded.")
58 | print("You are now ready to run the Microsub server.")
59 |
--------------------------------------------------------------------------------
/server/main.py:
--------------------------------------------------------------------------------
1 | import requests
2 | from flask import (
3 | Blueprint,
4 | abort,
5 | flash,
6 | jsonify,
7 | redirect,
8 | render_template,
9 | request,
10 | session,
11 | )
12 |
13 | from actions.channels import (
14 | create_channel,
15 | delete_channel,
16 | get_channels,
17 | reorder_channels,
18 | update_channel,
19 | )
20 | from actions.following import create_follow, get_follow, unfollow
21 | from actions.preview import preview
22 | from actions.react import mark_as_read, react
23 | from actions.search import search_for_content, search_for_feeds
24 | from actions.timeline import get_post, get_timeline, remove_entry
25 | from actions.user_ops import block, get_muted, mute, unblock, unmute
26 | from authentication.check_token import verify as check_token
27 |
28 | main = Blueprint("main", __name__, template_folder="templates")
29 |
30 |
31 | def process_get_request(request: request, action: str, identifier: str, channel: str):
32 | if action == "timeline" and not identifier:
33 | return get_timeline(request)
34 | elif action == "timeline" and identifier:
35 | return get_post(request)
36 | elif action == "follow":
37 | return get_follow(channel)
38 | elif action == "mute":
39 | return get_muted(request)
40 | elif action == "channels":
41 | return get_channels()
42 | elif action == "search" and not channel:
43 | return search_for_feeds(request)
44 | elif action == "search" and channel:
45 | return search_for_content(request)
46 |
47 |
48 | def process_channels(request: request, method: str):
49 | if request.form.get("name") and request.form.get("channel"):
50 | return update_channel(request)
51 |
52 | if request.form.get("channels") and method == "order":
53 | return reorder_channels(request)
54 |
55 | if method == "delete":
56 | return delete_channel()
57 |
58 | return create_channel(request)
59 |
60 |
61 | def process_post_user_actions(request: request, action: str):
62 | if action == "follow":
63 | return create_follow(request)
64 | elif action == "unfollow":
65 | return unfollow(request)
66 | elif action == "block":
67 | return block(request)
68 | elif action == "unblock":
69 | return unblock(request)
70 | elif action == "mute":
71 | return mute(request)
72 | elif action == "unmute":
73 | return unmute(request)
74 |
75 |
76 | def process_post_request(request: request, action: str, method: str):
77 | if action == "timeline" and method == "remove":
78 | return remove_entry(request)
79 | elif action == "timeline":
80 | return mark_as_read(request)
81 | elif action == "preview":
82 | return preview(request)
83 | elif action == "react":
84 | return react(request)
85 | elif action == "channels":
86 | return process_channels(request, method)
87 |
88 | return process_post_user_actions(request, action)
89 |
90 |
91 | def microsub_api_request(post_data, success_message):
92 | request = requests.post(
93 | session.get("server_url"),
94 | data=post_data,
95 | headers={"Authorization": "Bearer " + session["access_token"]},
96 | )
97 |
98 | if request.status_code == 200:
99 | flash(success_message)
100 | else:
101 | flash(request.json()["error"])
102 |
103 |
104 | @main.route("/")
105 | def index():
106 | is_authenticated = check_token(request.headers, session)
107 |
108 | if is_authenticated:
109 | return redirect("/reader/all")
110 |
111 | return render_template("index.html", title="Home", channels=[])
112 |
113 |
114 | @main.route("/setup")
115 | def setup():
116 | return render_template("setup.html", title="Setup", channels=[])
117 |
118 |
119 | @main.route("/endpoint", methods=["GET", "POST"])
120 | def home():
121 | if request.form:
122 | action = request.form.get("action")
123 | method = request.form.get("method")
124 | channel = request.form.get("channel")
125 | identifier = request.form.get("id")
126 | else:
127 | action = request.args.get("action")
128 | method = request.args.get("method")
129 | channel = request.args.get("channel")
130 | identifier = request.args.get("id")
131 |
132 | is_authenticated = check_token(request.headers, session)
133 |
134 | if not is_authenticated:
135 | return abort(403)
136 |
137 | if not action:
138 | return jsonify({"error": "No action specified."}), 400
139 |
140 | if request.method == "GET":
141 | return process_get_request(request, action, identifier, channel)
142 | elif request.method == "POST":
143 | return process_post_request(request, action, method)
144 |
145 | return (
146 | jsonify(
147 | {
148 | "error": "invalid_request",
149 | "error_description": "The action and method provided are not valid.",
150 | }
151 | ),
152 | 400,
153 | )
154 |
155 |
156 | if __name__ == "__main__":
157 | main.run()
158 |
--------------------------------------------------------------------------------
/server/server_views.py:
--------------------------------------------------------------------------------
1 | import sqlite3
2 |
3 | import requests
4 | from flask import Blueprint, jsonify, redirect, render_template, request, session
5 |
6 | from authentication.check_token import verify as check_token
7 | from .main import microsub_api_request
8 |
9 | server_views = Blueprint("server_views", __name__, template_folder="templates")
10 |
11 |
12 | @server_views.route("/lists")
13 | def dashboard():
14 | auth_result = check_token(request.headers, session)
15 |
16 | if not auth_result:
17 | return redirect("/login")
18 |
19 | headers = {"Authorization": session["access_token"]}
20 |
21 | channel_req = requests.get(
22 | session.get("server_url") + "?action=channels", headers=headers
23 | )
24 |
25 | all_channels = channel_req.json()["channels"]
26 |
27 | connection = sqlite3.connect("microsub.db")
28 |
29 | with connection:
30 | cursor = connection.cursor()
31 |
32 | feeds = cursor.execute(
33 | "SELECT * FROM channels ORDER by position ASC;"
34 | ).fetchall()
35 |
36 | return render_template(
37 | "server/dashboard.html", title="Your Lists", channels=all_channels, feeds=feeds
38 | )
39 |
40 |
41 | @server_views.route("/reorder", methods=["POST"])
42 | def reorder_channels_view():
43 | auth_result = check_token(request.headers, session)
44 |
45 | if not auth_result:
46 | return redirect("/login")
47 |
48 | if request.form.get("channel"):
49 | req = {
50 | "action": "channels",
51 | "method": "order",
52 | "channels": request.form.getlist("channel"),
53 | }
54 |
55 | microsub_api_request(req, "Your channels have been reordered.")
56 |
57 | return redirect("/lists")
58 | else:
59 | return redirect("/lists")
60 |
61 |
62 | @server_views.route("/create-channel", methods=["POST"])
63 | def create_channel_view():
64 | auth_result = check_token(request.headers, session)
65 |
66 | if not auth_result:
67 | return redirect("/login")
68 |
69 | if request.form.get("name"):
70 | req = {"action": "channels", "name": request.form.get("name")}
71 |
72 | microsub_api_request(
73 | req, f"You have created a new channel called {request.form.get('name')}."
74 | )
75 |
76 | return redirect("/lists")
77 |
78 |
79 | @server_views.route("/delete-channel", methods=["POST"])
80 | def delete_channel_view():
81 | auth_result = check_token(request.headers, session)
82 |
83 | if not auth_result:
84 | return redirect("/login")
85 |
86 | if request.form.get("channel"):
87 | req = {
88 | "action": "channels",
89 | "channel": request.form.get("channel"),
90 | "method": "delete",
91 | }
92 |
93 | microsub_api_request(req, "The specified channel has been deleted.")
94 |
95 | return redirect("/lists")
96 |
97 | return redirect("/lists")
98 |
99 |
100 | @server_views.route("/unfollow", methods=["POST"])
101 | def unfollow_view():
102 | auth_result = check_token(request.headers, session)
103 |
104 | if not auth_result:
105 | return redirect("/login")
106 |
107 | if request.form.get("channel") and request.form.get("url"):
108 | req = {
109 | "action": "unfollow",
110 | "channel": request.form.get("channel"),
111 | "url": request.form.get("url"),
112 | }
113 |
114 | microsub_api_request(req, "Your unfollow was successful.")
115 |
116 | return redirect("/following")
117 |
118 |
119 | @server_views.route("/following/search", methods=["POST"])
120 | def search_for_feed():
121 | auth_result = check_token(request.headers, session)
122 |
123 | if not auth_result:
124 | return redirect("/login")
125 |
126 | connection = sqlite3.connect("microsub.db")
127 | connection.row_factory = sqlite3.Row
128 |
129 | query = request.args.get("query")
130 |
131 | with connection:
132 | cursor = connection.cursor()
133 |
134 | feeds = cursor.execute(
135 | "SELECT * FROM following WHERE name LIKE ? ORDER BY id DESC",
136 | (f"%{query}%",),
137 | ).fetchall()
138 |
139 | unpacked = [{k: item[k] for k in item.keys()} for item in feeds]
140 |
141 | return jsonify({"items": unpacked}), 200
142 |
143 |
144 | @server_views.route("/following", methods=["GET", "POST"])
145 | def get_all_feeds():
146 | auth_result = check_token(request.headers, session)
147 |
148 | if not auth_result:
149 | return redirect("/login")
150 |
151 | connection = sqlite3.connect("microsub.db")
152 |
153 | channel = request.args.get("channel")
154 |
155 | if request.method == "POST":
156 | req = {
157 | "action": "follow",
158 | "channel": "all",
159 | "url": request.form.get("url"),
160 | }
161 |
162 | microsub_api_request(
163 | req, f"The channel was successfully renamed to {request.form.get('name')}"
164 | )
165 |
166 | return redirect("/reader/all")
167 |
168 | connection.row_factory = sqlite3.Row
169 |
170 | with connection:
171 | cursor = connection.cursor()
172 |
173 | if channel:
174 | feeds = cursor.execute(
175 | """
176 | SELECT f.channel, f.url, f.etag, f.photo, f.name, f.id, f.muted, f.blocked, c.channel AS channel_name
177 | FROM following AS f, channels AS c
178 | INNER JOIN channels ON c.uid = f.channel
179 | GROUP BY f.id
180 | WHERE channel = ? ORDER BY id DESC;
181 | """,
182 | (channel,),
183 | ).fetchall()
184 | else:
185 | feeds = cursor.execute(
186 | """
187 | SELECT f.channel, f.url, f.etag, f.photo, f.name, f.id, f.muted, f.blocked, c.channel AS channel_name
188 | FROM following AS f, channels AS c
189 | INNER JOIN channels ON c.uid = f.channel
190 | GROUP BY f.id
191 | ORDER BY id DESC;
192 | """
193 | ).fetchall()
194 |
195 | # source: https://nickgeorge.net/programming/python-sqlite3-extract-to-dictionary/#writing_a_function
196 | unpacked = [{k: item[k] for k in item.keys()} for item in feeds]
197 |
198 | count = len(feeds)
199 |
200 | headers = {"Authorization": session["access_token"]}
201 |
202 | channel_req = requests.get(
203 | session.get("server_url") + "?action=channels", headers=headers
204 | )
205 |
206 | return render_template(
207 | "server/following.html",
208 | title="People You Follow",
209 | feeds=unpacked,
210 | count=count,
211 | channels=channel_req.json()["channels"],
212 | )
213 |
--------------------------------------------------------------------------------
/server/websub.py:
--------------------------------------------------------------------------------
1 | import json
2 | import sqlite3
3 |
4 | import requests
5 | from flask import Blueprint, jsonify, redirect, request, session
6 |
7 | from authentication.check_token import verify as check_token
8 | from actions.preview import get_preview_items
9 |
10 | websub = Blueprint("websub", __name__, template_folder="templates")
11 |
12 |
13 | @websub.route("/websub/", methods=["POST"])
14 | def save_new_post_from_websub(uid):
15 | connection = sqlite3.connect("microsub.db")
16 |
17 | with connection:
18 | cursor = connection.cursor()
19 |
20 | # check if subscription exists
21 | subscription = cursor.execute(
22 | "SELECT url, channel FROM websub_subscriptions WHERE uid = ? AND approved = 1",
23 | (uid,),
24 | ).fetchone()
25 |
26 | if not subscription:
27 | return jsonify({"error": "Subscription does not exist."}), 400
28 |
29 | url = subscription[0]
30 | channel = subscription[1]
31 |
32 | feed_id = cursor.execute(
33 | "SELECT id FROM following WHERE url = ?", (url,)
34 | ).fetchone()[0]
35 |
36 | # retrieve feed
37 | try:
38 | r = requests.get(url, timeout=5, allow_redirects=True)
39 | except requests.exceptions.RequestException:
40 | return jsonify({"error": "invalid url"}), 400
41 |
42 | if r.headers.get("content-type"):
43 | content_type = r.headers["content-type"]
44 | else:
45 | content_type = ""
46 |
47 | items_to_return, content_type = get_preview_items(content_type, url, r)
48 |
49 | last_id = cursor.execute("SELECT MAX(id) FROM timeline;").fetchone()
50 |
51 | if last_id[0] is not None:
52 | last_id = last_id[0] + 1
53 | else:
54 | last_id = 0
55 |
56 | for record in items_to_return:
57 | record["published"] = record.get("published")
58 |
59 | cursor.execute(
60 | """INSERT INTO timeline VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?);""",
61 | (
62 | channel,
63 | json.dumps(record),
64 | record["published"],
65 | 0,
66 | record["url"],
67 | record["url"],
68 | 0,
69 | feed_id,
70 | last_id,
71 | ),
72 | )
73 |
74 | last_id += 1
75 |
76 | return jsonify({"success": "Entry added to feed."}), 200
77 |
78 |
79 | @websub.route("/websub_callback")
80 | def verify_websub_subscription():
81 | auth_result = check_token(session.get("access_token"))
82 |
83 | if not auth_result:
84 | return redirect("/login")
85 |
86 | if not request.args.get("hub.mode"):
87 | return jsonify({"error": "hub.mode not found"}), 400
88 |
89 | if not request.args.get("hub.topic"):
90 | return jsonify({"error": "No topic provided."}), 400
91 |
92 | if request.args.get("hub.challenge"):
93 | connection = sqlite3.connect("microsub.db")
94 |
95 | with connection:
96 | cursor = connection.cursor()
97 | check_subscription = cursor.execute(
98 | "SELECT * FROM websub_subscriptions WHERE url = ? AND random_string = ?",
99 | (
100 | request.args.get("hub.topic"),
101 | request.args.get("hub.challenge"),
102 | ),
103 | ).fetchone()
104 |
105 | if not check_subscription:
106 | return jsonify({"error": "Subscription does not exist."}), 400
107 |
108 | cursor.execute(
109 | "UPDATE websub_subscriptions SET approved = ? WHERE url = ?",
110 | (
111 | 1,
112 | request.args.get("hub.topic"),
113 | ),
114 | )
115 |
116 | return request.args.get("hub.challenge"), 200
117 |
118 | return jsonify({"error": "No challenge found."}), 400
119 |
--------------------------------------------------------------------------------
/static/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/capjamesg/cinnamon/3dfb2c05dd118c3d7780e91fd05e543e996e7d32/static/.DS_Store
--------------------------------------------------------------------------------
/static/css/styles.css:
--------------------------------------------------------------------------------
1 | html, body, header, figure {
2 | margin: 0;
3 | overflow-x: hidden;
4 | }
5 | * {
6 | max-width: 100%;
7 | }
8 | a {
9 | text-decoration: none;
10 | }
11 | html {
12 | background: url("/static/images/wood.avif");
13 | background-attachment: fixed;
14 | background-size: cover;
15 | font-family: "Helvetica Neue", Helvetica, sans-serif;
16 | }
17 | #main {
18 | max-width: 50em;
19 | margin: auto;
20 | margin-top: 10px;
21 | margin-bottom: 75px;
22 | }
23 | p, code, li {
24 | font-size: 1.0625rem;
25 | }
26 | #results {
27 | text-align: left !important;
28 | }
29 | .search_box {
30 | text-align: center;
31 | min-height: 15em;
32 | }
33 | select {
34 | font-size: 1.0625rem;
35 | background: white;
36 | padding: 10px;
37 | border-radius: 5px;
38 | margin-bottom: 15px;
39 | }
40 | .content_box {
41 | margin-bottom: 15px !important;
42 | }
43 | .notification, .green_notification {
44 | border: 3px solid royalblue;
45 | border-radius: 10px;
46 | background: white;
47 | padding: 20px;
48 | margin-bottom: 15px;
49 | }
50 | .notification {
51 | border: 1px solid royalblue;
52 | }
53 | .green_notification {
54 | border: 1px solid lightgreen;
55 | margin-bottom: 15px;
56 | }
57 | nav, .button_list {
58 | display: flex;
59 | justify-content: space-between;
60 | align-items: center;
61 | }
62 | .button_list {
63 | margin: 10px 0;
64 | }
65 | button, .button_link {
66 | border-radius: 10px;
67 | padding: 10px !important;
68 | display: inline-block;
69 | margin-bottom: 10px !important;
70 | color: white;
71 | border: none;
72 | font-size: 16px;
73 | text-align: center;
74 | background-color: royalblue;
75 | }
76 | button a:hover, .button_link:hover {
77 | color: white !important;
78 | }
79 | .button_link {
80 | margin: 0;
81 | padding: 0;
82 | width: 100%;
83 | box-sizing: border-box;
84 | }
85 | .reacted {
86 | border: 2px solid lightgreen !important;
87 | }
88 | nav, footer ul {
89 | display: flex;
90 | background: white;
91 | padding-left: 25px;
92 | padding-right: 25px;
93 | }
94 | .subscribe_button {
95 | border: 2px solid lightgrey;
96 | border-radius: 10px;
97 | padding: 5px;
98 | }
99 | .subscribe_button a:focus {
100 | text-decoration: none;
101 | border: none;
102 | }
103 | nav ul li {
104 | display: inline-block;
105 | padding-left: 10px;
106 | }
107 | footer ul {
108 | padding: 15px;
109 | flex-direction: row;
110 | flex-wrap: wrap;
111 | list-style-type: none;
112 | }
113 | ul {
114 | list-style-type: none;
115 | padding-left: 0;
116 | margin-bottom: 0;
117 | }
118 | footer li {
119 | flex: auto;
120 | text-align: center;
121 | margin: 0;
122 | padding: 5px;
123 | }
124 | h1 {
125 | font-size: 2rem;
126 | }
127 | img {
128 | border-radius: 10px;
129 | display: block;
130 | }
131 | footer {
132 | text-align: center;
133 | margin-top: 5px;
134 | }
135 | .flex_left_home {
136 | width: 30%;
137 | word-wrap: break-word;
138 | }
139 | .flex_right_home {
140 | flex: 1 50%;
141 | }
142 | .notification_bar {
143 | position: fixed;
144 | top: 0;
145 | text-align: center;
146 | background-color: green;
147 | color: white;
148 | width: 100%;
149 | box-shadow: 2px 2px lightgrey;
150 | font-weight: 600px;
151 | }
152 | img {
153 | max-width: 100%;
154 | }
155 | input {
156 | min-width: 50%;
157 | text-align: center;
158 | }
159 | h2, h1, h3 {
160 | color: royalblue;
161 | }
162 | .author img {
163 | float: left;
164 | margin-right: 10px;
165 | }
166 | details {
167 | background-color: white;
168 | border-radius: 5px;
169 | padding: 10px;
170 | display: inline;
171 | border: 3px dashed orange;
172 | }
173 | textarea, #content {
174 | width: 100%;
175 | }
176 | input[type="submit"] {
177 | text-align: center;
178 | }
179 | summary {
180 | padding-top: 5px;
181 | }
182 | details ul {
183 | padding-top: 10px;
184 | }
185 | details > p {
186 | margin-bottom: 0px;
187 | }
188 | a {
189 | color: #006BD6;
190 | border-bottom: 2px dotted #006BD6;
191 | }
192 | .bottom-navigation {
193 | display: block;
194 | width: auto;
195 | overflow: hidden;
196 | padding-top: 20px;
197 | padding-bottom: 20px;
198 | }
199 | .prev {
200 | display: block;
201 | width: 50%;
202 | }
203 | .prev, .prev a {
204 | float: left
205 | }
206 | .next {
207 | float: right;
208 | width: 50%;
209 | text-align: right;
210 | }
211 | article header, article section {
212 | list-style-position: inside;
213 | }
214 | .main_image {
215 | width: 50%;
216 | }
217 | .indieweb_footer {
218 | padding-bottom: 1em;
219 | }
220 | .indieweb_footer img {
221 | width: auto;
222 | border-radius: 0px;
223 | }
224 | p, li, h1, h2, h3 {
225 | line-height: 1.5em;
226 | }
227 | a:focus {
228 | background-color: #006BD6;
229 | color: white;
230 | outline: lightblue 2px;
231 | border-bottom: 2px dotted black;
232 | }
233 |
234 | input[type="text"]:focus, input[type="url"]:focus, input[type="number"]:focus, textarea:focus, #content:focus, input[type="text"]:focus, input[type="username"]:focus, input[type="password"]:focus {
235 | outline: 3px solid royalblue;
236 | }
237 |
238 | button:focus, input[type="submit"]:focus, .button_link:focus {
239 | background-color: yellow;
240 | color: black !important;
241 | outline: 3px royalblue solid;
242 | border-bottom: none;
243 | }
244 | button:focus > a {
245 | color: black !important;
246 | }
247 | .error {
248 | font-weight: 600;
249 | }
250 |
251 | .button_list {
252 | border-top: 2px solid lightgray;
253 | }
254 |
255 | .button_list form {
256 | float: right;
257 | padding-left: 10px;
258 | }
259 | .notice {
260 | padding: 15px;
261 | margin-left: 5px;
262 | border-radius: 10px;
263 | border: 1px solid lightgrey;
264 | text-align: left;
265 | background-color: white;
266 | }
267 |
268 | a:hover {
269 | color: #0000CD;
270 | }
271 |
272 | input[type="text"], select, input[type="username"], input[type="number"], input[type="password"], input[type="url"] {
273 | padding: 10px;
274 | border-radius: 10px;
275 | margin-bottom: 10px;
276 | display: inline-block;
277 | text-align: left;
278 | min-width: 50%;
279 | width: 100%;
280 | box-sizing: border-box;
281 | font-size: 1.0625rem;
282 | }
283 | input[type="submit"] {
284 | padding: 10px;
285 | border: none;
286 | background-color: royalblue;
287 | color: white;
288 | border-radius: 10px;
289 | min-width: 0 !important;
290 | appearance: none;
291 | font-size: 1.1875rem;
292 | }
293 | /* skip link hiding code from https://www.a11ymatters.com/pattern/skip-link/ */
294 | .accessibility_label_top {
295 | position: fixed;
296 | top: -200px;
297 | }
298 | .accessibility_label_top:focus {
299 | top: 0;
300 | left: 50%;
301 | padding: 10px;
302 | }
303 | .accessibility_label {
304 | position: fixed;
305 | bottom: -200px;
306 | }
307 | .accessibility_label:focus {
308 | bottom: 0 !important;
309 | left: 50%;
310 | padding: 10px;
311 | z-index: 1;
312 | }
313 |
314 | .following_item form {
315 | display: inline;
316 | }
317 |
318 | #settings {
319 | display: none;
320 | }
321 |
322 | .notification_item {
323 | background: lightblue;
324 | color: black;
325 | position: relative;
326 | top: -10px;
327 | padding: 10px;
328 | border-radius: 25%;
329 | display: inline-block;
330 | }
331 |
332 | @media print {
333 | html {
334 | background-color: white;
335 | }
336 | body {
337 | width: 100%;
338 | margin-left: 10px;
339 | margin-right: 10px;
340 | }
341 | nav, footer, .accessibility_label, .print_message, .announcement {
342 | display: none;
343 | }
344 | h1, h2, h3 {
345 | color: black !important;
346 | }
347 | a:after {
348 | content: "(" attr(href) ")";
349 | display: inline-block;
350 | width: auto;
351 | text-decoration: none;
352 | margin-left: 0.3em;
353 | font-style: italic;
354 | }
355 | }
356 | select {
357 | width: 100%;
358 | }
359 | #mobile_navigation, .mobile_close, .mobile {
360 | display: none;
361 | }
362 | .post_container_footer {
363 | display: flex;
364 | }
365 | @media only screen and (max-width: 1000px) {
366 | article img, .blogList img {
367 | width: 100% !important;
368 | }
369 | .post_container_footer {
370 | display: block;
371 | text-align: center;
372 | }
373 | .post_container_footer .flex_right_home {
374 | flex: none;
375 | }
376 | .post_container_footer button {
377 | margin-top: 10px;
378 | }
379 | .post_container_footer .reaction, .post_container_footer .reaction-no-link {
380 | margin-bottom: 0;
381 | }
382 | .flex_left_home {
383 | flex: none;
384 | max-width: 100%;
385 | }
386 | aside, #settings {
387 | margin-bottom: 0;
388 | z-index: 1;
389 | position: fixed;
390 | left: 0;
391 | top: 0;
392 | width: 100%;
393 | height: 100%;
394 | overflow: auto;
395 | background-color: white;
396 | box-sizing: border-box;
397 | display: none;
398 | text-align: center;
399 | }
400 | #settings a {
401 | width: 100%;
402 | display: block;
403 | border: 1px solid lightgrey;
404 | padding: 30px;
405 | box-sizing: border-box;
406 | }
407 | .inner_content_box {
408 | width: 100% !important;;
409 | }
410 | .button_list form {
411 | float: right;
412 | }
413 | .prev, .next {
414 | width: 100% !important;
415 | background-color: white;
416 | border-radius: 10px;
417 | padding: 10px;
418 | margin-bottom: 10px;
419 | border: 1px solid lightgrey;
420 | box-sizing: border-box;
421 | text-align: center;
422 | float: none !important;
423 | }
424 | .prev a {
425 | float: none !important;
426 | }
427 | .reaction_set {
428 | text-align: center;
429 | }
430 | button, input, select, textarea, #content {
431 | width: 100% !important;
432 | box-sizing: border-box;
433 | }
434 | button, input[type="submit"] {
435 | text-align: center !important;
436 | }
437 | .main_menu_mobile_message {
438 | display: block !important;
439 | }
440 | aside, .search_bar, #settings {
441 | width: 100% !important;
442 | margin-right: 0 !important;
443 | }
444 | nav {
445 | background-color: white;
446 | padding: 0;
447 | border-bottom: 1px solid lightgray;
448 | padding-left: 20px;
449 | padding-right: 20px;
450 | }
451 | #desktop_navigation {
452 | display: none;
453 | }
454 | #mobile_navigation, .mobile {
455 | display: block;
456 | }
457 | .modal_content {
458 | width: 100% !important;
459 | max-width: 100% !important;
460 | }
461 | .mobile_close {
462 | display: block
463 | }
464 | }
465 | .close {
466 | cursor: pointer;
467 | padding: 10px;
468 | }
469 | .main_menu_mobile_message {
470 | display: none;
471 | }
472 | h1 {
473 | display: inline-block;
474 | margin-bottom: 5px;
475 | }
476 | input {
477 | padding: 10px;
478 | border-radius: 10px;
479 | margin-bottom: 10px;
480 | display: inline-block;
481 | text-align: left;
482 | }
483 | input[type="submit"] {
484 | padding: 10px;
485 | border: none;
486 | background-color: royalblue;
487 | color: white;
488 | border-radius: 10px;
489 | min-width: 0 !important;
490 | appearance: none;
491 | margin-top: 10px;
492 | }
493 | pre {
494 | background-color: white;
495 | border: 2px solid lightgrey;
496 | border-radius: 10px;
497 | padding: 10px;
498 | }
499 | .tab {
500 | display: inline-block;
501 | }
502 | .tab li {
503 | padding-right: 10px;
504 | float: left;
505 | list-style-type: none;
506 | }
507 | ol {
508 | padding: 0;
509 | }
510 | .active_tab {
511 | font-weight: 600;
512 | }
513 | .reaction, .reaction-no-link {
514 | padding: 10px;
515 | margin-left: 5px;
516 | margin-bottom: 5px;
517 | display: inline-block;
518 | border-radius: 10px;
519 | border: 1px solid lightgrey;
520 | background-color: white;
521 | }
522 | .notification {
523 | padding: 15px;
524 | margin-left: 5px;
525 | border-radius: 10px;
526 | border: 1px solid lightgreen;
527 | text-align: center;
528 | background-color: white;
529 | }
530 | .context_box {
531 | text-align: left;
532 | border: 1px solid lightgrey;
533 | padding: 10px;
534 | }
535 | .context_box img {
536 | float: left;
537 | margin-right: 10px;
538 | }
539 | #reply_to, .post_form {
540 | width: 100%;
541 | box-sizing: border-box;
542 | }
543 | .rating_form, .post_form {
544 | text-align: left;
545 | }
546 | .rating_form {
547 | display: none;
548 | }
549 | .rsvp_form {
550 | display: none;
551 | }
552 | .rating_form input, .rsvp_form input {
553 | width: 100%;
554 | box-sizing: border-box;
555 | }
556 | [contenteditable=true]:empty:not(:focus):before{
557 | content: attr(placeholder);
558 | color: grey;
559 | }
560 |
561 | [placeholder]:empty:focus::before {
562 | content: "";
563 | }
564 | #in_editor_tooltip {
565 | background-color: white !important;
566 | }
567 | .data_message_scroll {
568 | max-height: 200px;
569 | overflow: hidden;
570 | overflow-y: scroll;
571 | z-index: 1;
572 | position: fixed;
573 | border: 2px solid lightgrey;
574 | border-radius: 10px;
575 | }
576 | #data_message li {
577 | border: none;
578 | box-shadow: none;
579 | border-bottom: 1px solid lightgrey;
580 | margin-bottom: 0;
581 | }
582 | textarea, #content {
583 | padding: 10px;
584 | border-radius: 10px;
585 | border: 2px solid lightgrey;
586 | margin-bottom: 10px;
587 | display: inline-block;
588 | text-align: left;
589 | min-width: 50%;
590 | min-height: 200px;
591 | font-size: 1.0625rem;
592 | box-sizing: border-box;
593 | }
594 | aside, .feed li, #settings, .bottom-navigation, .content_box, .main_content_box {
595 | padding: 10px;
596 | background-color: white;
597 | box-shadow: 2px 2px lightblue;
598 | border: 1px solid lightgrey;
599 | margin-bottom: 20px;
600 | box-sizing: border-box;
601 | max-width: 100%;
602 | display: block;
603 | }
604 | .inner_content_box {
605 | margin: auto;
606 | width: 50em;
607 | }
608 | .success {
609 | border: 1px solid lightgreen;
610 | background: white;
611 | text-align: center;
612 | padding: 10px;
613 | }
614 | #settings ul li {
615 | padding-bottom: 30px;
616 | }
617 | aside button {
618 | width: 100%;
619 | }
620 | ul {
621 | margin-top: 0;
622 | }
623 | aside li {
624 | margin-bottom: 10px;
625 | }
626 | .reaction a {
627 | border-bottom: none;
628 | }
629 | .reaction a:focus {
630 | border-bottom: none !important;
631 | }
632 | footer img {
633 | display: inline-block;
634 | }
635 | footer a {
636 | border-bottom: none;
637 | }
638 | footer a:focus {
639 | background: initial;
640 | border-bottom: 2px solid darkblue;
641 | }
642 | .sticky_footer {
643 | position: fixed;
644 | left: 0;
645 | bottom: 0;
646 | width: 100%;
647 | background-color: white;
648 | border-top: 2px solid lightgrey;
649 | text-align: center;
650 | padding-bottom: 10px;
651 | }
652 | .search_list ul {
653 | margin: 0;
654 | padding: 0 !important;
655 | }
656 | .search_list ul {
657 | border: none;
658 | box-shadow: none;
659 | }
660 | .search_list li {
661 | float: left;
662 | margin: 10px;
663 | border: 2px solid lightgrey;
664 | padding-top: 10px;
665 | padding-bottom: 10px;
666 | }
667 | code {
668 | word-wrap: break-word;
669 | }
670 | video {
671 | width: 100%;
672 | }
673 | /* Code adapted from https://www.w3schools.com/howto/howto_css_modals.asp */
674 | .modal {
675 | z-index: 1;
676 | position: fixed;
677 | left: 0;
678 | top: 0;
679 | width: 100%;
680 | height: 100%;
681 | overflow: auto;
682 | background-color: rgba(0,0,0,0.4);
683 | box-sizing: border-box;
684 | }
685 | .modal_content {
686 | background-color: #fefefe;
687 | margin: 15% auto;
688 | padding: 20px;
689 | border: 1px solid #888;
690 | width: 35em;
691 | border-radius: 10px;
692 | box-sizing: border-box;
693 | }
694 | .close {
695 | color: #aaa;
696 | float: right;
697 | font-size: 28px;
698 | font-weight: bold;
699 | }
700 |
701 | .close:hover,
702 | .close:focus {
703 | color: black;
704 | text-decoration: none;
705 | cursor: pointer;
706 | }
707 | hr {
708 | border: 1px solid lightgray;
709 | }
710 | .search_button {
711 | display: inline;
712 | }
713 | .search_bar {
714 | width: 80%;
715 | }
716 | details, summary {
717 | max-width: 100%;
718 | display: inline-block;
719 | box-sizing: border-box;
720 | }
721 | .dragover, .data_message {
722 | border: 3px solid darkblue;
723 | background-color: lightblue;
724 | }
725 | .data_message {
726 | padding: 10px;
727 | border-radius: 10px;
728 | text-align: center;
729 | }
730 | details {
731 | margin-bottom: 20px;
732 | }
733 | audio {
734 | width: 100%;
735 | margin-top: 15px;
736 | border-radius: 10px;
737 | }
738 | .js {
739 | display: none;
740 | }
741 | .avatar {
742 | width: 50px;
743 | height: 50px;
744 | }
745 | .unread {
746 | box-shadow: 7px 7px lightgreen;
747 | border: 2px solid grey;
748 | }
--------------------------------------------------------------------------------
/static/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/capjamesg/cinnamon/3dfb2c05dd118c3d7780e91fd05e543e996e7d32/static/favicon.ico
--------------------------------------------------------------------------------
/static/icons/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/capjamesg/cinnamon/3dfb2c05dd118c3d7780e91fd05e543e996e7d32/static/icons/.DS_Store
--------------------------------------------------------------------------------
/static/icons/100.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/capjamesg/cinnamon/3dfb2c05dd118c3d7780e91fd05e543e996e7d32/static/icons/100.png
--------------------------------------------------------------------------------
/static/icons/1024.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/capjamesg/cinnamon/3dfb2c05dd118c3d7780e91fd05e543e996e7d32/static/icons/1024.png
--------------------------------------------------------------------------------
/static/icons/114.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/capjamesg/cinnamon/3dfb2c05dd118c3d7780e91fd05e543e996e7d32/static/icons/114.png
--------------------------------------------------------------------------------
/static/icons/120.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/capjamesg/cinnamon/3dfb2c05dd118c3d7780e91fd05e543e996e7d32/static/icons/120.png
--------------------------------------------------------------------------------
/static/icons/128.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/capjamesg/cinnamon/3dfb2c05dd118c3d7780e91fd05e543e996e7d32/static/icons/128.png
--------------------------------------------------------------------------------
/static/icons/144.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/capjamesg/cinnamon/3dfb2c05dd118c3d7780e91fd05e543e996e7d32/static/icons/144.png
--------------------------------------------------------------------------------
/static/icons/152.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/capjamesg/cinnamon/3dfb2c05dd118c3d7780e91fd05e543e996e7d32/static/icons/152.png
--------------------------------------------------------------------------------
/static/icons/16.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/capjamesg/cinnamon/3dfb2c05dd118c3d7780e91fd05e543e996e7d32/static/icons/16.png
--------------------------------------------------------------------------------
/static/icons/167.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/capjamesg/cinnamon/3dfb2c05dd118c3d7780e91fd05e543e996e7d32/static/icons/167.png
--------------------------------------------------------------------------------
/static/icons/172.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/capjamesg/cinnamon/3dfb2c05dd118c3d7780e91fd05e543e996e7d32/static/icons/172.png
--------------------------------------------------------------------------------
/static/icons/180.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/capjamesg/cinnamon/3dfb2c05dd118c3d7780e91fd05e543e996e7d32/static/icons/180.png
--------------------------------------------------------------------------------
/static/icons/196.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/capjamesg/cinnamon/3dfb2c05dd118c3d7780e91fd05e543e996e7d32/static/icons/196.png
--------------------------------------------------------------------------------
/static/icons/20.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/capjamesg/cinnamon/3dfb2c05dd118c3d7780e91fd05e543e996e7d32/static/icons/20.png
--------------------------------------------------------------------------------
/static/icons/216.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/capjamesg/cinnamon/3dfb2c05dd118c3d7780e91fd05e543e996e7d32/static/icons/216.png
--------------------------------------------------------------------------------
/static/icons/256.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/capjamesg/cinnamon/3dfb2c05dd118c3d7780e91fd05e543e996e7d32/static/icons/256.png
--------------------------------------------------------------------------------
/static/icons/29.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/capjamesg/cinnamon/3dfb2c05dd118c3d7780e91fd05e543e996e7d32/static/icons/29.png
--------------------------------------------------------------------------------
/static/icons/32.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/capjamesg/cinnamon/3dfb2c05dd118c3d7780e91fd05e543e996e7d32/static/icons/32.png
--------------------------------------------------------------------------------
/static/icons/40.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/capjamesg/cinnamon/3dfb2c05dd118c3d7780e91fd05e543e996e7d32/static/icons/40.png
--------------------------------------------------------------------------------
/static/icons/48.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/capjamesg/cinnamon/3dfb2c05dd118c3d7780e91fd05e543e996e7d32/static/icons/48.png
--------------------------------------------------------------------------------
/static/icons/50.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/capjamesg/cinnamon/3dfb2c05dd118c3d7780e91fd05e543e996e7d32/static/icons/50.png
--------------------------------------------------------------------------------
/static/icons/512.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/capjamesg/cinnamon/3dfb2c05dd118c3d7780e91fd05e543e996e7d32/static/icons/512.png
--------------------------------------------------------------------------------
/static/icons/55.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/capjamesg/cinnamon/3dfb2c05dd118c3d7780e91fd05e543e996e7d32/static/icons/55.png
--------------------------------------------------------------------------------
/static/icons/57.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/capjamesg/cinnamon/3dfb2c05dd118c3d7780e91fd05e543e996e7d32/static/icons/57.png
--------------------------------------------------------------------------------
/static/icons/58.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/capjamesg/cinnamon/3dfb2c05dd118c3d7780e91fd05e543e996e7d32/static/icons/58.png
--------------------------------------------------------------------------------
/static/icons/60.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/capjamesg/cinnamon/3dfb2c05dd118c3d7780e91fd05e543e996e7d32/static/icons/60.png
--------------------------------------------------------------------------------
/static/icons/64.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/capjamesg/cinnamon/3dfb2c05dd118c3d7780e91fd05e543e996e7d32/static/icons/64.png
--------------------------------------------------------------------------------
/static/icons/72.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/capjamesg/cinnamon/3dfb2c05dd118c3d7780e91fd05e543e996e7d32/static/icons/72.png
--------------------------------------------------------------------------------
/static/icons/76.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/capjamesg/cinnamon/3dfb2c05dd118c3d7780e91fd05e543e996e7d32/static/icons/76.png
--------------------------------------------------------------------------------
/static/icons/80.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/capjamesg/cinnamon/3dfb2c05dd118c3d7780e91fd05e543e996e7d32/static/icons/80.png
--------------------------------------------------------------------------------
/static/icons/87.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/capjamesg/cinnamon/3dfb2c05dd118c3d7780e91fd05e543e996e7d32/static/icons/87.png
--------------------------------------------------------------------------------
/static/icons/88.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/capjamesg/cinnamon/3dfb2c05dd118c3d7780e91fd05e543e996e7d32/static/icons/88.png
--------------------------------------------------------------------------------
/static/icons/bell.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/static/icons/create.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/static/icons/follow.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/static/icons/home.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/static/icons/search.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/static/icons/settings.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/static/images/feed.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/capjamesg/cinnamon/3dfb2c05dd118c3d7780e91fd05e543e996e7d32/static/images/feed.png
--------------------------------------------------------------------------------
/static/images/gradient.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/capjamesg/cinnamon/3dfb2c05dd118c3d7780e91fd05e543e996e7d32/static/images/gradient.png
--------------------------------------------------------------------------------
/static/images/wood.avif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/capjamesg/cinnamon/3dfb2c05dd118c3d7780e91fd05e543e996e7d32/static/images/wood.avif
--------------------------------------------------------------------------------
/static/js/editor.js:
--------------------------------------------------------------------------------
1 | var char_count = 0;
2 | var last_char = null;
3 | var xhr = new XMLHttpRequest();
4 | var all_uploaded_photos = "";
5 |
6 | xhr.open('GET', '/static/emojis.json');
7 |
8 | xhr.send();
9 | var emoji_names = [];
10 | var emojis = [];
11 | var showing_person_tags = [];
12 | var showing_hash_tags = [];
13 |
14 | var is_private = false;
15 |
16 | function hide_all_forms() {
17 | var forms = ["#rating_form", "#rsvp_form", "#gif_search_bar", "#reply_to"];
18 | for (var i = 0; i < forms.length; i++) {
19 | // hide each form
20 | var form_item = document.querySelector(forms[i]);
21 | form_item.style.display = "none";
22 | }
23 | }
24 |
25 | xhr.onload = function() {
26 | if (xhr.status === 200) {
27 | emoji_req = JSON.parse(xhr.responseText);
28 | // split json into two lists
29 | for (var i in emoji_req) {
30 | emojis.push(i);
31 | emoji_names.push(emoji_req[i]);
32 | }
33 | }
34 | };
35 |
36 | var form = document.getElementById("content");
37 |
38 | function establish_focus() {
39 | // the following three lines of code ensures focus is preserved at the end of the line
40 | // the code was taken from https://stackoverflow.com/questions/1125292/how-to-move-cursor-to-end-of-contenteditable-entity
41 | // specifically, Juank's comment
42 | form.focus();
43 | // select all the content in the element
44 | document.execCommand('selectAll', false, null);
45 | // collapse selection to the end
46 | document.getSelection().collapseToEnd();
47 | }
48 |
49 | var character_count_element = document.getElementById("character_count");
50 |
51 | form.onkeydown = function(e) {
52 | character_count_element.innerHTML = form.innerText.replace(" ", "").length;
53 | }
54 |
55 | var post_image = document.getElementById("post_image");
56 |
57 | post_image.addEventListener("change", function() {
58 | var file = this.files[0];
59 | uploadFile(file);
60 | });
61 |
62 | // draggable code derived from https://www.smashingmagazine.com/2018/01/drag-drop-file-uploader-vanilla-js/
63 | ;
64 | ['dragenter', 'dragover', 'dragleave', 'drop'].forEach(eventName => {
65 | form.addEventListener(eventName, preventDefaults, false)
66 | })
67 |
68 | function preventDefaults(e) {
69 | e.preventDefault()
70 | e.stopPropagation()
71 | }
72 |
73 | form.addEventListener('dragover', handleDragOver)
74 | form.addEventListener('dragleave', handleDragLeave)
75 |
76 | function handleDragOver(e) {
77 | e.preventDefault()
78 | e.stopPropagation()
79 |
80 | var data_message = document.getElementById("data_message");
81 |
82 | data_message.innerHTML = "Drop an image into the box above to upload it to your site.";
83 | data_message.className = "data_message";
84 |
85 | form.after(data_message);
86 |
87 | form.classList.add('dragover')
88 | }
89 |
90 | var is_writing_contact = false;
91 | var is_writing_hashtag = false;
92 |
93 | var all_images = "";
94 | var showing_emojis = [];
95 |
96 | function insert_into_editor(text, identifier) {
97 | // delete all text after last identifier sign
98 | var last_position = form.innerHTML.lastIndexOf(identifier);
99 |
100 | console.log(form.innerHTML.substring(0, last_position))
101 |
102 | form.innerHTML = form.innerHTML.substring(0, last_position);
103 | if (identifier == "!") {
104 | form.innerHTML += text;
105 | } else {
106 | form.innerHTML += identifier + text;
107 | }
108 | form.innerHTML = form.innerHTML.replace(" ", "");
109 |
110 | document.getElementById("data_message").style.display = "none";
111 |
112 | if (document.getElementById("in_editor_tooltip")) {
113 | document.getElementById("in_editor_tooltip").style.display = "none";
114 | }
115 |
116 | establish_focus()
117 |
118 | if (identifier == "@") {
119 | showing_person_tags = [];
120 | } else if (identifier == "#") {
121 | showing_hash_tags = [];
122 | } else if (identifier == "!") {
123 | showing_emojis = [];
124 | }
125 |
126 | last_char = null;
127 | }
128 |
129 | var gif_search_box = document.getElementById("gif_search_bar");
130 |
131 | gif_search_box.addEventListener("keyup", function(e) {
132 | if (e.keyCode === 32) {
133 | add_gif();
134 | }
135 | });
136 |
137 | function add_gif() {
138 | var gif_api_url = "https://api.gfycat.com/v1/gfycats/search?search_text=" + gif_search_box.value;
139 |
140 | fetch(gif_api_url)
141 | .then(function(response) {
142 | return response.json();
143 | }).then(function(data) {
144 | var gifs = data.gfycats.slice(0, 10);
145 | var gif_list = document.getElementById("gif_list");
146 | // turn list into a flex item
147 | gif_list.style.display = "flex";
148 | gif_list.style.flexWrap = "wrap";
149 | gif_list.innerHTML = "";
150 | for (var i = 0; i < gifs.length; i++) {
151 | var gif_item = document.createElement("div");
152 | gif_item.className = "gif_item";
153 | gif_item.innerHTML = " ";
154 | gif_item.onclick = function() {
155 | var gif_url = this.querySelector("img").src;
156 | form.innerHTML += " ";
157 | document.getElementById("data_message").style.display = "none";
158 | gif_list.innerHTML = "";
159 | // hide search box
160 | gif_search_box.style.display = "none";
161 | establish_focus();
162 | }
163 | gif_list.appendChild(gif_item);
164 | }
165 | });
166 | }
167 |
168 | // listen for @ in form
169 | form.addEventListener("keydown", function(e) {
170 | var data_message = document.getElementById("data_message");
171 | data_message.style.textAlign = "left";
172 |
173 | // if keycode is #
174 | if (e.keyCode == 51) {
175 | is_writing_hashtag = true;
176 | is_writing_contact = false;
177 |
178 | // show data_message
179 | data_message.style.display = "block";
180 | }
181 |
182 | if (e.keyCode == 50) {
183 | is_writing_contact = true;
184 | is_writing_hashtag = false;
185 |
186 | // show data_message
187 | data_message.style.display = "block";
188 | }
189 |
190 | // if :) or :D in form
191 | if (form.innerHTML.includes(":)")) {
192 | is_writing_contact = false;
193 | is_writing_hashtag = false;
194 |
195 | form.innerHTML = form.innerHTML.replace(":)", "🙂");
196 | } else if (form.innerHTML.includes(":D")) {
197 | is_writing_contact = false;
198 | is_writing_hashtag = false;
199 |
200 | form.innerHTML = form.innerHTML.replace(":D", "😂");
201 | }
202 |
203 | if (e.keyCode == 32) {
204 | // remove all brs
205 | form.innerHTML = form.innerHTML.replace(/ /g, "");
206 | if (is_writing_hashtag) {
207 | // get last hashtag and make it blue
208 | var hashtag = form.innerHTML.split("#")[form.innerHTML.split("#").length - 1];
209 |
210 | form.innerHTML = form.innerHTML.replace("#" + hashtag, "#" + hashtag + " ");
211 | }
212 |
213 | if (is_writing_contact) {
214 | // get last contact and make it blue
215 | var contact = form.innerHTML.split("@")[form.innerHTML.split("@").length - 1];
216 |
217 | form.innerHTML = form.innerHTML.replace("@" + contact, "@" + contact + " ");
218 | }
219 |
220 | // if last word started with http:// or https:// and . in url
221 | if (form.innerHTML.split(" ")[form.innerHTML.split(" ").length - 1].split(".")[0].split("http://").length > 1 ||
222 | form.innerHTML.split(" ")[form.innerHTML.split(" ").length - 1].split(".")[0].split("https://").length > 1) {
223 | // get last word and make it blue
224 | var url = form.innerHTML.split(" http")[-1];
225 |
226 | // get index of url
227 | var url_index = form.innerHTML.lastIndexOf(url);
228 | var previous_char = form.innerHTML.substring(url_index - 1, url_index);
229 |
230 | if (previous_char != '"') {
231 | form.innerHTML = form.innerHTML.replace(url, "" + url + " ");
232 | } else {
233 | form.innerHTML = form.innerHTML.replace(url, "" + url + " ");
234 |
235 | // make http request to get context
236 | fetch("/context", {
237 | method: "POST",
238 | headers: {
239 | "Content-Type": "application/json"
240 | },
241 | body: JSON.stringify({
242 | url: url
243 | })
244 | }).then(function(response) {
245 | return response.json();
246 | }).then(function(data) {
247 | var context_box = document.createElement("div");
248 |
249 | all_images += ` `;
250 |
251 | context_box.className = "context_box";
252 |
253 | context_box.innerHTML = `
254 |
255 |
256 | ${data.author.name}
257 | ${data.content.html}
258 |
259 | `;
260 |
261 | form.append(context_box);
262 | });
263 | }
264 | }
265 | establish_focus();
266 |
267 | is_writing_contact = false;
268 | is_writing_hashtag = false;
269 |
270 | data_message.innerHTML = "";
271 | data_message.classList.remove("data_message");
272 | }
273 |
274 | if (is_writing_hashtag) {
275 | editor_substitution("#", hashtags, []);
276 | }
277 |
278 | if (is_writing_contact) {
279 | editor_substitution("@", names, people_tags);
280 | }
281 |
282 | if (!is_writing_contact && !is_writing_hashtag) {
283 | var data_message = document.getElementById("data_message");
284 |
285 | data_message.innerHTML = "";
286 |
287 | form.after(data_message);
288 | }
289 |
290 | if (is_writing_contact && e.keyCode == 9) {
291 | e.preventDefault();
292 |
293 | // get first person tag
294 | var first_person_tag = showing_person_tags[0];
295 |
296 | insert_into_editor(first_person_tag.username, "@");
297 | } else if (is_writing_hashtag && e.keyCode == 9) {
298 | e.preventDefault();
299 |
300 | // get first hashtag
301 | var first_hashtag = showing_hash_tags[0];
302 |
303 | insert_into_editor(first_hashtag, "#");
304 | } else if (last_char == 49 && e.keyCode == 9) {
305 | e.preventDefault();
306 |
307 | // get first hashtag
308 | var first_emoji = showing_emojis[0];
309 |
310 | insert_into_editor(first_emoji, "!");
311 | }
312 |
313 | if (e.keyCode == 49) {
314 | last_char = 49;
315 | } else if (e.keyCode == 221) {
316 | last_char = e.keyCode;
317 | }
318 |
319 | if (last_char == 49) {
320 | editor_substitution("!", emoji_names, emojis);
321 |
322 | if (e.keyCode == 32 && !is_writing_hashtag && !is_writing_contact) {
323 | // if value in emoji
324 | // get last index of :
325 | var last_index = form.innerHTML.lastIndexOf("!");
326 | // get substring from last index to end
327 | var user_emoji_name = form.innerHTML.substring(last_index + 1);
328 |
329 | var valid_emojis_to_show = emoji_names.map(function(emoji_name, index) {
330 | if (emoji_name.startsWith(user_emoji_name)) {
331 | return emojis[index];
332 | }
333 | });
334 |
335 | valid_emojis_to_show = valid_emojis_to_show.filter(item => item != undefined);
336 |
337 | var exact_match = valid_emojis_to_show.filter(item => item == user_emoji_name);
338 |
339 | if (exact_match.length == 1) {
340 | form.innerHTML = form.innerHTML += exact_match[0];
341 | form.innerHTML = form.innerHTML.replace("!" + exact_match, "");
342 | var possible_emojis = document.getElementById("data_message");
343 | possible_emojis.innerHTML = "";
344 | }
345 |
346 | if (valid_emojis_to_show.length > 0) {
347 | form.innerHTML = form.innerHTML += valid_emojis_to_show[0];
348 | form.innerHTML = form.innerHTML.replace("!" + user_emoji_name, "");
349 | var possible_emojis = document.getElementById("data_message");
350 | possible_emojis.innerHTML = "";
351 | }
352 | }
353 | } else if (last_char == 221) {
354 | var last_index = form.innerHTML.lastIndexOf("[]");
355 | // get substring from last index to end
356 | var link = form.innerHTML.substring(last_index + 2, form.innerHTML.length);
357 |
358 | if (e.keyCode == 32 && !is_writing_hashtag && !is_writing_contact) {
359 | form.innerHTML = form.innerHTML.replace("[]" + link, "" + link + " ");
360 | }
361 | } else if (last_char == 32) {
362 | last_char = null;
363 | }
364 | });
365 |
366 | function opaqueImage(image_url) {
367 | var image = document.getElementById(image_url);
368 | image.style.opacity = "0.5";
369 | }
370 |
371 | function removeOpaqueImageStyle(image_url) {
372 | var image = document.getElementById(image_url);
373 | image.style.opacity = "1";
374 | }
375 |
376 | function removeImage(image_url) {
377 | var image = document.getElementById(image_url);
378 | image.remove();
379 | all_uploaded_photos = all_uploaded_photos.replace(" ", "");
380 | }
381 |
382 | function handleDragLeave(e) {
383 | e.preventDefault()
384 | e.stopPropagation()
385 |
386 | var data_message = document.getElementById("data_message");
387 |
388 | data_message.innerHTML = "";
389 | data_message.classList = "";
390 |
391 | form.classList.remove('dragover')
392 | }
393 |
394 | form.addEventListener('drop', handleDrop, false)
395 |
396 | function handleDrop(e) {
397 | let dt = e.dataTransfer
398 | let files = dt.files
399 |
400 | handleFiles(files)
401 | handleDragLeave(e)
402 | }
403 |
404 | function handleFiles(files) {
405 | ([...files]).forEach(uploadFile)
406 | }
407 |
408 | function uploadFile(file) {
409 | let formData = new FormData()
410 |
411 | formData.append('file', file)
412 |
413 | fetch("/media", {
414 | method: 'POST',
415 | body: formData
416 | })
417 | .then(response => response.json())
418 | .then(function(response) {
419 | var photos = document.getElementById("photos");
420 |
421 | var url = response["result"];
422 |
423 | var new_photo = document.createElement("img");
424 | new_photo.classList = "u-photo";
425 | new_photo.src = url;
426 | new_photo.id = url;
427 | new_photo.setAttribute("onclick", "removeImage('" + url + "')");
428 | new_photo.setAttribute("onmouseover", "opaqueImage('" + url + "')");
429 | new_photo.setAttribute("onmouseout", "removeOpaqueImageStyle('" + url + "')");
430 |
431 | all_uploaded_photos += " ";
432 |
433 | // add image
434 | photos.appendChild(new_photo);
435 |
436 | send_notification("Your photo was successfully uploaded.
")
437 | })
438 | .catch(() => {
439 | send_notification("Your photo could not be uploaded.
")
440 | })
441 | }
--------------------------------------------------------------------------------
/static/js/reader.js:
--------------------------------------------------------------------------------
1 | var js_req = document.getElementsByClassName("js");
2 |
3 | for (var i = 0; i < js_req.length; i++) {
4 | js_req[i].style.display = "inline";
5 | }
6 |
7 | function trigger_modal(id, is_editor_box = false) {
8 | var modal = document.getElementById(id);
9 | if (id == "private") {
10 | is_private = !is_private;
11 | }
12 | if (modal.style.display == "none") {
13 | if (is_editor_box) {
14 | hide_all_forms();
15 | }
16 | modal.style.display = "block";
17 | } else {
18 | modal.style.display = "none";
19 | if (is_editor_box) {
20 | hide_all_forms();
21 | }
22 | }
23 | }
24 |
25 | function close_modal(event) {
26 | var modal = document.getElementsByClassName("modal");
27 | for (var i = 0; i < modal.length; i++) {
28 | if (event.target == modal[i]) {
29 | modal[i].style.display = "none";
30 | }
31 | }
32 | }
33 |
34 | function show_video(url, id) {
35 | var iframe = document.createElement("iframe");
36 | iframe.src = url;
37 | iframe.width = "640";
38 | iframe.height = "480";
39 | iframe.frameborder = "0";
40 | iframe.allowfullscreen = "true";
41 | iframe.style.display = "block";
42 | var to_replace = document.getElementById(id);
43 | to_replace.parentNode.replaceChild(iframe, to_replace);
44 | }
45 |
46 | // replace urls on all embedded videos
47 | var all_videos = document.getElementsByClassName("embedded_video");
48 |
49 | for (var i = 0; i < all_videos.length; i++) {
50 | var id = all_videos[i].id;
51 | all_videos[i].href = "#" + id + "-heading";
52 | }
53 |
54 | var all_reaction_links = document.getElementsByClassName("reaction");
55 |
56 | for (var i = 0; i < all_reaction_links.length; i++) {
57 | var id = all_reaction_links[i].id;
58 | all_reaction_links[i].href = "#";
59 | }
60 |
61 | function submit_micropub(id, url) {
62 | var form = document.getElementById(id + "-form");
63 | fetch('/react?is_reply=true', {
64 | method: 'POST',
65 | body: new URLSearchParams({
66 | "h": "entry",
67 | "in-reply-to": url,
68 | "content": form.value,
69 | "uid": id,
70 | "private": is_private
71 | })
72 | }).then(function(response) {
73 | if (response.ok) {
74 | send_notification("Your reply has been sent.");
75 | } else {
76 | send_notification("There was an error sending your reply.");
77 | }
78 | trigger_modal(id + "-textbox");
79 | });
80 | }
81 |
82 | function send_notification(notification_text) {
83 | var notification = document.createElement("section");
84 | var body = document.getElementsByTagName("body")[0];
85 | notification.className = "notification_bar";
86 | notification.innerHTML = "" + notification_text + "";
87 | // add notification to top of body
88 | body.insertBefore(notification, body.firstChild);
89 |
90 | setTimeout(function() {
91 | body.removeChild(notification);
92 | }, 5000);
93 | }
94 |
95 | function post_note(all_uploaded_photos) {
96 | // send form-encoded response to micropub endpoint
97 | var form = document.getElementById("content");
98 |
99 | var in_reply_to = document.getElementById("reply_to");
100 |
101 | var rsvp = document.getElementById("rsvp");
102 |
103 | var rating = document.getElementById("rating");
104 |
105 | if (form.innerText.length < 10) {
106 | send_notification("Your note must be at least 10 characters long.");
107 | return;
108 | }
109 |
110 | var content = form.innerHTML;
111 |
112 | // remove all html tags that are not p, br, or img
113 | content = content.replace(/<(?!p|br|img).*?>/g, "");
114 |
115 | content += all_uploaded_photos
116 |
117 | if (in_reply_to.value || rsvp.value || rating.value) {
118 | var url = "/react?is_reply=true"
119 |
120 | if (rsvp) {
121 | content += '' + rsvp.value + ' ';
122 | }
123 |
124 | if (rating) {
125 | content += '' + rating.value + ' ';
126 | }
127 |
128 | var post_body = new URLSearchParams({
129 | "h": "entry",
130 | "in-reply-to": in_reply_to.value,
131 | "content": content,
132 | "uid": in_reply_to.value,
133 | "private": is_private
134 | });
135 | } else {
136 | var url = "/react?is_reply=note";
137 |
138 | var post_body = new URLSearchParams({
139 | "h": "entry",
140 | "content": content,
141 | "private": is_private
142 | })
143 | }
144 |
145 | fetch(url, {
146 | method: 'POST',
147 | body: post_body,
148 | }).then(function(response) {
149 | if (response.ok) {
150 | send_notification("Your post has been created.");
151 | form.value = "";
152 | } else {
153 | send_notification("There was an error sending your reply.");
154 | }
155 | });
156 | }
157 |
158 | function send_reaction(reaction, reaction_name, post_url, post_id) {
159 | fetch('/react', {
160 | method: 'POST',
161 | body: new URLSearchParams({
162 | "h": "entry",
163 | "reaction": reaction,
164 | "url": post_url,
165 | "uid": post_id
166 | })
167 | }).then(function(response) {
168 | // if status code == 200
169 | if (response.status == 200) {
170 | send_notification("Your " + reaction_name + " has been sent.");
171 | }
172 | var reaction_link = document.getElementById(post_id + "-" + reaction);
173 | reaction_link.classList.add("reacted");
174 | })
175 | }
176 |
177 | function send_unfollow(url, id) {
178 | fetch('/unfollow', {
179 | method: 'POST',
180 | body: new URLSearchParams({
181 | "channel": id,
182 | "url": url
183 | })
184 | }).then(function(response) {
185 | // if status code == 200
186 | if (response.status == 200) {
187 | send_notification("You have unfollowed the feed.");
188 | }
189 | var list_item = document.getElementById(id);
190 |
191 | list_item.parentNode.removeChild(list_item);
192 | })
193 | }
194 |
195 | function editor_substitution(substitution_character, mapping_list, list_items = []) {
196 | var data_message = document.getElementById("data_message");
197 |
198 | data_message.classList.remove("data_message");
199 |
200 | var last_index = form.innerHTML.lastIndexOf(substitution_character);
201 | // get substring from last index to end
202 | var user_input = form.innerHTML.substring(last_index + 1).replace(" ", "");
203 |
204 | var to_show = [];
205 |
206 | if (user_input.length + 1 > 1) {
207 | mapping_list.map(function(item, index) {
208 | if (item.startsWith(user_input)) {
209 | if (substitution_character == "@") {
210 | var to_add = people_tags[item.toLowerCase()];
211 |
212 | to_add["username"] = item;
213 |
214 | to_show.push(to_add);
215 | } else if (substitution_character == "#") {
216 | to_show.push(item);
217 | } else if (substitution_character == "!") {
218 | var name_item = list_items[index];
219 | to_show.push(item + " (" + name_item + ")")
220 | }
221 | }
222 | });
223 |
224 | var valid_to_show = to_show.filter(item => item != undefined);
225 |
226 | // get first five to show
227 | var to_show_5 = valid_to_show.slice(0, 5);
228 |
229 | data_message.style.textAlign = "left";
230 |
231 | var main_html = "
";
268 | data_message.innerHTML = main_html;
269 | data_message.classList.add("data_message_scroll");
270 |
271 | // display: block
272 | data_message.style.display = "block";
273 |
274 | form.after(data_message);
275 | }
276 | }
--------------------------------------------------------------------------------
/static/robots.txt:
--------------------------------------------------------------------------------
1 | User-agent *
2 | Disallow: /
--------------------------------------------------------------------------------
/templates/404.html:
--------------------------------------------------------------------------------
1 | {% extends "base.html" %}
2 | {% block content %}
3 | {% if error == 404 %}
4 |
5 | 404
6 | This page could not be found.
7 |
8 | {% elif error == 405 %}
9 |
10 | 405
11 | This method is not supported.
12 |
13 | {% else %}
14 |
15 | Error
16 | There was an error and this page could not be loaded.
17 |
18 | {% endif %}
19 | {% endblock %}
--------------------------------------------------------------------------------
/templates/auth.html:
--------------------------------------------------------------------------------
1 | {% extends "base.html" %}
2 | {% block content %}
3 |
4 | Cinnamon
5 | Sign in is powered by IndieAuth which means you can log in to the Microsub dashboard with your domain name.
6 | Enter your domain name below to log in to the dashboard.
7 | Only approved domains can sign in with IndieAuth below.
8 | {% with messages = get_flashed_messages() %}
9 | {% if messages %}
10 |
11 | {{ messages[0] }}
12 |
13 | {% endif %}
14 | {% endwith %}
15 |
20 |
21 | {% endblock %}
--------------------------------------------------------------------------------
/templates/base.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | {% if title %}{{ title }}{% else %}Cinnamon{% endif %}
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
88 |
89 |
90 | {% include "show_error.html" %}
91 | Skip to main content
92 |
93 |
94 | {% if session.get("me") %}
95 | {% if channel_name %}
96 | {{ channel_name }} Posts
97 | {% elif title %}
98 | {{ title }}
99 | {% else %}
100 |
101 | {% endif %}
102 | {% else %}
103 | Cinnamon
104 | {% endif %}
105 | {% if session.get("me") %}
106 |
116 |
117 | {% else %}
118 |
122 | {% endif %}
123 |
124 |
125 |
126 |
Follow Someone
127 |
Enter a website below to follow the owner's content.
128 |
133 |
134 |
135 |
168 |
169 | {% if session.get("me") %}
170 |
171 |
×
172 |
Settings
173 |
179 |
180 | {% endif %}
181 | {% block content %}
182 | {% endblock %}
183 |
184 |
185 | {% if session.get("me") %}
186 |
197 | {% endif %}
198 | Go Back to the Top
199 |
200 |
201 |
--------------------------------------------------------------------------------
/templates/client/create.html:
--------------------------------------------------------------------------------
1 | {% extends "base.html" %}
2 | {% block content %}
3 |
44 | {% endblock %}
--------------------------------------------------------------------------------
/templates/client/discover.html:
--------------------------------------------------------------------------------
1 | {% extends "base.html" %}
2 | {% block content %}
3 |
4 | Discover People
5 |
13 |
20 |
22 |
23 |
63 | {% endblock %}
--------------------------------------------------------------------------------
/templates/client/feed_item.html:
--------------------------------------------------------------------------------
1 |
2 | {% if w.get("title") and channels[1]['uid'] != channel_id %}
3 |
4 | {% endif %}
5 | {% if channels[1]['uid'] != channel_id %}
6 | {% if w.get("photo") and w.get("video") and (w["video"][0]["url"].startswith("https://youtube.com") or w["video"][0]["url"].startswith("https://www.youtube.com")) %}
7 |
8 | Click the image above to show the video.
9 | {% elif w.get("photo") and w.get("content") and "img" not in w.get("content").get("html", "") %}
10 |
11 | {% endif %}
12 | {% if w.get("author") %}
13 |
25 | {% elif w.get("published") %}
26 | Published on {{ w["published"] | strftime }}.
27 | {% endif %}
28 | {% endif %}
29 | {% if w.get("video")
30 | and (not w["video"][0]["url"].startswith("https://youtube.com")
31 | and not w["video"][0]["url"].startswith("https://www.youtube.com"))
32 | and w.get("content")
33 | and "video" not in w.get("content").get("html", "") %}
34 |
35 |
36 |
37 | {% elif w.get("audio") %}
38 |
39 |
40 |
41 | {% endif %}
42 | {% if w.get("content") and w["content"].get("html") and show_all_content != True %}
43 | {% set content = w["content"]["html"] %}
44 | {{ content | safe }} {% if content | length == 75 %}...{% endif %} {{ " ".join(w["content"]["text"].split(" ")[75:]) }}
45 | {% elif show_all_content == False %}
46 | Read the full post.
47 | {% else %}
48 |
49 | {{ w["content"]["html"] | safe }}
50 |
51 | {% endif %}
52 | {% if channels[1]['uid'] == channel_id %}
53 | ({{ w["published"] | strftime }})
54 | {% endif %}
55 |
56 | {% if channels[1]['uid'] != channel_id or "replied to" in w.get("content", {}).get("html") or "mentioned in" in w.get("content", {}).get("html") %}
57 | {% if "create" in session.get("scopes", "") and session.get('micropub_url') %}
58 | ❤️
59 | 🔖
60 | 💬
61 | {% endif %}
62 | {% endif %}
63 | {% if channels[1]['uid'] != channel_id %}
64 | 🗑️
65 |
66 | {% if show_all_content != True and not w.get('_id', '').startswith("http") %}
67 | Read Full Post 📚
68 | {% elif show_all_content != True and w.get('_id', '').startswith("http") %}
69 | Read Full Post 📚
70 | {% endif %}
71 |
72 | {% if session.get('micropub_url') %}
73 |
74 |
Reply content:
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 | {% endif %}
83 | {% if w.get("reactions", {}).get("replies") %}
84 | Your Replies
85 | {% for reply in w["reactions"]["replies"] %}
86 |
87 | {{ reply["content"] }}
88 | ({{ reply["url"] }} )
89 |
90 | {% endfor %}
91 | {% endif %}
92 | {% endif %}
93 |
94 |
95 |
--------------------------------------------------------------------------------
/templates/client/preview.html:
--------------------------------------------------------------------------------
1 | {% extends "base.html" %}
2 | {% block content %}
3 |
4 | Subscribe to {{ feed["feed"]["title"] }}
5 | {% if feed["feed"].get("icon") %}
6 |
7 | {% endif %}
8 |
21 | Most recent entries
22 | {% if feed.get("items") | length > 0 %}
23 |
24 | {% for w in feed.get("items") %}
25 | {% include "client/feed_item.html" %}
26 | {% endfor %}
27 |
28 | {% else %}
29 | This feed does not currently have any entries.
30 | {% endif %}
31 |
32 | {% endblock %}
--------------------------------------------------------------------------------
/templates/client/read_article.html:
--------------------------------------------------------------------------------
1 | {% extends "base.html" %}
2 | {% block content %}
3 |
6 |
7 | {% include "client/feed_item.html" %}
8 |
9 | {% endblock %}
--------------------------------------------------------------------------------
/templates/client/reader.html:
--------------------------------------------------------------------------------
1 | {% extends "base.html" %}
2 | {% block content %}
3 |
4 |
5 |
73 | {% else %}
74 |
75 | There are no posts in this channel. Check back later to see if any posts are added.
76 | {% endif %}
77 |
78 |
79 |
116 |
117 | {% endblock %}
--------------------------------------------------------------------------------
/templates/client/search.html:
--------------------------------------------------------------------------------
1 | {% extends "base.html" %}
2 | {% block content %}
3 |
4 | Search Your Feed
5 |
13 |
20 |
22 |
23 |
95 | {% endblock %}
--------------------------------------------------------------------------------
/templates/client/settings.html:
--------------------------------------------------------------------------------
1 | {% extends "base.html" %}
2 | {% block content %}
3 |
4 |
Your settings ⚙️
5 |
6 | {% if session.get("h-card", {}).get("name") %}
7 | Your name
8 | {{ session.get("h-card", {}).get("name") }}
9 | Your bio
10 | {{ session.get("h-card", {}).get("note") }}
11 | Your website
12 | You are signed in as {{ session.get("me") }}.
13 | {% else %}
14 | Your website
15 | You are signed in as {{ session.get("me") }}.
16 | {% endif %}
17 |
18 | Advanced Options
19 | Server Location
20 | Your Microsub server is located at: {{ session.get("server_url") }}
21 | Scopes
22 | {% if session.get("scopes", "") and session.get("scopes", "").split(" ") | length > 0 %}
23 | Your have granted permission for the following scopes: {{ session.get("scopes", "").replace(" ", ", ") }}
24 | {% else %}
25 | You have not granted any scopes.
26 | {% endif %}
27 |
28 |
29 |
30 | Bookmarklets 🔖
31 |
32 | You can use the following bookmarklets to subscribe to a feed or save an article for later reading:
33 |
34 | Subscribe 📝
35 |
36 | Read Later 📚
37 |
38 |
39 | {% endblock %}
--------------------------------------------------------------------------------
/templates/index.html:
--------------------------------------------------------------------------------
1 | {% extends "base.html" %}
2 | {% block content %}
3 | {% if session.get("me") %}
4 |
5 | You are logged in as {{ session.get("me").replace("https://", "").replace("http://", "").strip("/") }}.
6 | Go to your social reader.
7 |
8 | {% endif %}
9 |
10 |
11 |
12 | Cinnamon is a social reader and server run by {{ config.ME }}.
13 | This site lets you read and manage your social web subscriptions (for approved users only).
14 | What is Microsub?
15 | Microsub is a draft standard specification that separates the roll of a social reader and server. Using the Microsub standard, social readers and servers can be developed independently. This means that users get more choice in terms of the application that they use to look at the content they follow.
16 | James, the creator of this tool, uses this server to subscribe to and manage the blogs and people he follows. He used to use clients such as Monocle (web) and Indigenous (iOS) to follow websites. He recently implemented his own client on this server that he can use to read content from the authors he follows.
17 | How can I learn more about Microsub?
18 | The IndieWeb wiki contains the information you'll need to have about Microsub to get started. The official editor's draft of the specification is also available on the IndieWeb wiki.
19 | Some useful resources to look at are the following pages:
20 |
26 | Can I host my own Microsub client and server?
27 | Yes! You can host your own Microsub endpoint. This client and server is open-sourced so you can use this one if you would like. This is the ideal configuration if you choose to use a static generator like Jekyll on your site.
28 | You can deploy a version of this Microsub client and server by going to the project GitHub page and following the instructions.
29 | You can find other Microsub clients and servers on the IndieWeb wiki .
30 |
31 | {% endblock %}
--------------------------------------------------------------------------------
/templates/server/dashboard.html:
--------------------------------------------------------------------------------
1 | {% extends "base.html" %}
2 | {% block content %}
3 |
4 | Lists
5 | Create a new list
6 | {% if feeds %}
7 | Here are the channels in your feed:
8 |
23 | {% else %}
24 | You have not created any channels yet.
25 | {% endif %}
26 |
27 |
28 |
29 |
×
30 |
Create a List
31 |
Lists make it easy to categorise people you follow so you can easily keep track of the information that matters most to you.
32 |
38 |
39 |
40 | {% endblock %}
--------------------------------------------------------------------------------
/templates/server/following.html:
--------------------------------------------------------------------------------
1 | {% extends "base.html" %}
2 | {% block content %}
3 |
4 | Discover People
5 |
13 |
20 | {% if feeds %}
21 |
22 | {% for feed in feeds %}
23 |
24 |
35 |
36 |
Unfollow ❌
37 | {% if feed.get("muted") == 0 and "mute" in session.get("scopes", "") %}
38 |
43 | {% elif feed.get("muted") != 0 and "block" in session.get("scopes", "") %}
44 |
50 | {% endif %}
51 | {% if feed.get("blocked") == 0 and "block" in session.get("scopes", "") %}
52 |
58 | {% elif feed.get("blocked") != 0 and "block" in session.get("scopes", "") %}
59 |
64 |
65 | {% endif %}
66 |
67 | {% endfor %}
68 |
69 | {% else %}
70 |
73 | {% endif %}
74 |
121 |
122 | {% endblock %}
--------------------------------------------------------------------------------
/templates/setup.html:
--------------------------------------------------------------------------------
1 | {% extends "base.html" %}
2 | {% block content %}
3 | {% if session.get("me") %}
4 |
9 | {% endif %}
10 |
11 |
12 | Cinnamon Setup Instructions
13 | This Microsub endpoint is run by {{ config.ME }}.
14 | This endpoint is not open for registration. Please only follow the instructions below if you have already deployed your own endpoint. Otherwise, consider looking for a public Microsub server and client .
15 | How to use this tool
16 | If you already have a Microsub server set up and connected to your website, you can go ahead and use the reader component of this service immediately. All you have to do is click "Login" and sign in with your domain name.
17 | If you do not have a Microsub server set up, don't worry. You can use this site as a server!
18 | To do so, add the following HTML tag to the head tag on your website home page:
19 | <link rel="microsub" href="https://microsub.jamesg.blog/endpoint">
20 | That tag will let this service know you are using microsub.jamesg.blog as a Microsub server.
21 | Once you have added the tag, you are ready to sign in! Click "Login" in the navigation bar to get started.
22 |
23 | {% endblock %}
--------------------------------------------------------------------------------
/templates/show_error.html:
--------------------------------------------------------------------------------
1 |
2 | {% with messages = get_flashed_messages() %}
3 | {% if messages %}
4 |
5 | {% for message in messages %}
6 | {{ message }}
7 | {% endfor %}
8 |
9 |
14 | {% endif %}
15 | {% endwith %}
--------------------------------------------------------------------------------
/tox.ini:
--------------------------------------------------------------------------------
1 | [tox]
2 | skipsdist = True
3 | basepython = python3.9
4 | ignore_basepython_conflict = True
5 | envlist = py39, flake8, typecheck,lint
6 | recreate = False
7 |
8 | [testenv]
9 | envdir = {toxworkdir}/env
10 | whitelist_externals=*
11 | passenv =
12 | *
13 | PYTHONPATH
14 | setenv =
15 | ENV_FILE={env:ENV_FILE:.env.tox}
16 | PYTHONPATH = {toxinidir}
17 | deps =
18 | -r requirements_dev.txt
19 |
20 | [testenv:py39]
21 | commands = pytest {posargs}
22 |
23 | [testenv:typecheck]
24 | deps =
25 | {[testenv]deps}
26 | commands =
27 | mypy --config-file=tox.ini .
28 |
29 | [testenv:lint]
30 | deps =
31 | {[testenv]deps}
32 | commands =
33 | flake8
34 | black --check src/ tests/
35 | isort --check-only src/ tests/
36 |
37 | [testenv:fmt]
38 | deps =
39 | {[testenv]deps}
40 | commands =
41 | black src/ tests/
42 | isort src/ tests/
43 |
44 | [flake8]
45 | exclude = .tox,venv
46 | max-line-length = 120
47 | max-complexity = 10
48 | ignore = E203,W503,W504,I001
49 | enable-extensions=G
50 |
51 | [mypy]
52 | python_version = 3.9
53 | ignore_missing_imports = True
54 | incremental = True
55 | check_untyped_defs = True
56 |
--------------------------------------------------------------------------------
/wsgi.py:
--------------------------------------------------------------------------------
1 | from __init__ import create_app
2 |
3 | app = create_app()
4 |
5 | app.run(debug=True)
6 |
--------------------------------------------------------------------------------