├── .devcontainer
└── devcontainer.json
├── .github
├── FUNDING.yml
└── workflows
│ └── test-build-deploy.yml
├── .gitignore
├── Dockerfile
├── LICENSE
├── README.md
├── app.py
├── blobs
└── .gitignore
├── compose.production.yaml
├── docker-compose.tensorflow-serving.yml
├── docker-compose.yml
├── requirements.txt
├── rss_lambda
├── __init__.py
├── image_recog
│ ├── abstract_expensive_rss_lambda.py
│ ├── file_cache.py
│ ├── image_recog.py
│ └── yolov3.py
├── merger
│ └── merger.py
├── simple_filters
│ ├── filter_lambda.py
│ └── simple_filters.py
├── test_image_recog.py
├── test_merger.py
├── test_simple_filters.py
├── test_to_image_feed.py
├── test_utils.py
├── to_image_feed
│ └── to_image_feed.py
└── utils
│ ├── image_utils.py
│ ├── process_rss_text.py
│ └── rss_lambda_error.py
├── run.sh
├── static
├── favicon.png
├── icon.png
├── index.html
└── style.css
└── test-rss.xml
/.devcontainer/devcontainer.json:
--------------------------------------------------------------------------------
1 | // For format details, see https://aka.ms/devcontainer.json. For config options, see the
2 | // README at: https://github.com/devcontainers/templates/tree/main/src/python
3 | {
4 | "name": "rss-lambda",
5 | // Or use a Dockerfile or Docker Compose file. More info: https://containers.dev/guide/dockerfile
6 | "image": "mcr.microsoft.com/devcontainers/python:1-3.12-bullseye",
7 |
8 | // Features to add to the dev container. More info: https://containers.dev/features.
9 | // "features": {},
10 |
11 | // Use 'forwardPorts' to make a list of ports inside the container available locally.
12 | // "forwardPorts": [],
13 |
14 | // Use 'postCreateCommand' to run commands after the container is created.
15 | "postCreateCommand": "pip3 install --user -r requirements.txt",
16 | "features": {
17 | "ghcr.io/devcontainers/features/docker-in-docker:2": {}
18 | },
19 | "customizations": {
20 | "vscode": {
21 | "extensions": [
22 | "pcbowers.alpine-intellisense"
23 | ]
24 | }
25 | }
26 |
27 | // Configure tool-specific properties.
28 | // "customizations": {},
29 |
30 | // Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root.
31 | // "remoteUser": "root"
32 | }
33 |
--------------------------------------------------------------------------------
/.github/FUNDING.yml:
--------------------------------------------------------------------------------
1 | # These are supported funding model platforms
2 |
3 | patreon: sekaisoft
4 | buy_me_a_coffee: sekaisoft
5 |
--------------------------------------------------------------------------------
/.github/workflows/test-build-deploy.yml:
--------------------------------------------------------------------------------
1 | name: Test, build and deploy
2 |
3 | on:
4 | push:
5 | branches:
6 | - master
7 | pull_request:
8 | branches:
9 | - master
10 |
11 | jobs:
12 | test:
13 | runs-on: ubuntu-latest
14 | steps:
15 | - name: Check out code
16 | uses: actions/checkout@v4
17 | - name: Set up Python
18 | uses: actions/setup-python@v5
19 | with:
20 | python-version: 3.12
21 | - name: Install dependencies
22 | run: |
23 | python -m pip install --upgrade pip
24 | pip install -r requirements.txt
25 | - name: Run tests
26 | run: |
27 | python -m unittest discover
28 |
29 | build:
30 | runs-on: ubuntu-latest
31 | needs: test
32 | if: github.event_name == 'push'
33 | steps:
34 | - name: Checkout code
35 | uses: actions/checkout@v4
36 | - name: Log in to the Github Container registry
37 | uses: docker/login-action@v3
38 | with:
39 | registry: ghcr.io
40 | username: ${{ github.actor }}
41 | password: ${{ secrets.GITHUB_TOKEN }}
42 | - name: Build and push container
43 | id: build
44 | uses: docker/build-push-action@v5
45 | with:
46 | context: .
47 | platforms: linux/amd64
48 | tags: |
49 | ghcr.io/${{ github.repository }}:latest-amd64
50 | ghcr.io/${{ github.repository }}:${{ github.sha }}-amd64
51 | push: true
52 | - name: Export digest
53 | run: |
54 | mkdir -p /tmp/digests
55 | digest="${{ steps.build.outputs.digest }}"
56 | touch "/tmp/digests/${digest#sha256:}"
57 | - name: Upload digest
58 | uses: actions/upload-artifact@v4
59 | with:
60 | name: digests-amd64
61 | path: /tmp/digests/*
62 | if-no-files-found: error
63 | retention-days: 1
64 |
65 | build-arm:
66 | runs-on: ubuntu-24.04-arm
67 | needs: test
68 | if: github.event_name == 'push'
69 | steps:
70 | - name: Checkout code
71 | uses: actions/checkout@v4
72 | - name: Log in to the Github Container registry
73 | uses: docker/login-action@v3
74 | with:
75 | registry: ghcr.io
76 | username: ${{ github.actor }}
77 | password: ${{ secrets.GITHUB_TOKEN }}
78 | - name: Build and push
79 | id: build
80 | uses: docker/build-push-action@v5
81 | with:
82 | context: .
83 | platforms: linux/arm64
84 | tags: |
85 | ghcr.io/${{ github.repository }}:latest-arm64
86 | ghcr.io/${{ github.repository }}:${{ github.sha }}-arm64
87 | push: true
88 | - name: Export digest
89 | run: |
90 | mkdir -p /tmp/digests
91 | digest="${{ steps.build.outputs.digest }}"
92 | touch "/tmp/digests/${digest#sha256:}"
93 | - name: Upload digest
94 | uses: actions/upload-artifact@v4
95 | with:
96 | name: digests-arm64
97 | path: /tmp/digests/*
98 | if-no-files-found: error
99 | retention-days: 1
100 |
101 | merge:
102 | runs-on: ubuntu-latest
103 | needs:
104 | - build
105 | - build-arm
106 | if: github.event_name == 'push'
107 | steps:
108 | - name: Download digests
109 | uses: actions/download-artifact@v4
110 | with:
111 | path: /tmp/digests
112 | pattern: digests-*
113 | merge-multiple: true
114 | - name: Set up Docker Buildx
115 | uses: docker/setup-buildx-action@v3
116 | - name: Log in to the Github Container registry
117 | uses: docker/login-action@v3
118 | with:
119 | registry: ghcr.io
120 | username: ${{ github.actor }}
121 | password: ${{ secrets.GITHUB_TOKEN }}
122 | - name: Create manifest list and push
123 | working-directory: /tmp/digests
124 | run: |
125 | docker buildx imagetools create -t ghcr.io/${{ github.repository }}:latest $(printf 'ghcr.io/${{ github.repository }}@sha256:%s ' *)
126 | docker buildx imagetools create -t ghcr.io/${{ github.repository }}:${{ github.sha }} \
127 | ghcr.io/${{ github.repository }}:${{ github.sha }}-amd64 \
128 | ghcr.io/${{ github.repository }}:${{ github.sha }}-arm64
129 | deploy:
130 | runs-on: ubuntu-latest
131 | needs: merge
132 | if: github.event_name == 'push'
133 | steps:
134 | - name: Setup Tailscale
135 | id: tailscale
136 | uses: tailscale/github-action@v2
137 | with:
138 | oauth-client-id: ${{ secrets.TS_OAUTH_CLIENT_ID }}
139 | oauth-secret: ${{ secrets.TS_OAUTH_SECRET }}
140 | tags: tag:ops
141 | - name: Pull and restart container
142 | run: |
143 | ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null "nixos@gibraltar" \
144 | "mkdir -p /home/nixos/rss-lambda && \
145 | wget -O /home/nixos/rss-lambda/compose.yaml https://raw.githubusercontent.com/sekai-soft/rss-lambda/${{ github.sha }}/compose.production.yaml && \
146 | sed -i 's|ghcr.io/sekai-soft/rss-lambda:latest|ghcr.io/sekai-soft/rss-lambda:${{ github.sha }}|g' /home/nixos/rss-lambda/compose.yaml && \
147 | docker compose -f /home/nixos/rss-lambda/compose.yaml pull rss-lambda && \
148 | docker compose -f /home/nixos/rss-lambda/compose.yaml up -d"
149 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Created by https://www.toptal.com/developers/gitignore/api/windows,macos,linux,python,visualstudiocode
2 | # Edit at https://www.toptal.com/developers/gitignore?templates=windows,macos,linux,python,visualstudiocode
3 |
4 | ### Linux ###
5 | *~
6 |
7 | # temporary files which can be created if a process still has a handle open of a deleted file
8 | .fuse_hidden*
9 |
10 | # KDE directory preferences
11 | .directory
12 |
13 | # Linux trash folder which might appear on any partition or disk
14 | .Trash-*
15 |
16 | # .nfs files are created when an open file is removed but is still being accessed
17 | .nfs*
18 |
19 | ### macOS ###
20 | # General
21 | .DS_Store
22 | .AppleDouble
23 | .LSOverride
24 |
25 | # Icon must end with two \r
26 | Icon
27 |
28 |
29 | # Thumbnails
30 | ._*
31 |
32 | # Files that might appear in the root of a volume
33 | .DocumentRevisions-V100
34 | .fseventsd
35 | .Spotlight-V100
36 | .TemporaryItems
37 | .Trashes
38 | .VolumeIcon.icns
39 | .com.apple.timemachine.donotpresent
40 |
41 | # Directories potentially created on remote AFP share
42 | .AppleDB
43 | .AppleDesktop
44 | Network Trash Folder
45 | Temporary Items
46 | .apdisk
47 |
48 | ### macOS Patch ###
49 | # iCloud generated files
50 | *.icloud
51 |
52 | ### Python ###
53 | # Byte-compiled / optimized / DLL files
54 | __pycache__/
55 | *.py[cod]
56 | *$py.class
57 |
58 | # C extensions
59 | *.so
60 |
61 | # Distribution / packaging
62 | .Python
63 | build/
64 | develop-eggs/
65 | dist/
66 | downloads/
67 | eggs/
68 | .eggs/
69 | lib/
70 | lib64/
71 | parts/
72 | sdist/
73 | var/
74 | wheels/
75 | share/python-wheels/
76 | *.egg-info/
77 | .installed.cfg
78 | *.egg
79 | MANIFEST
80 |
81 | # PyInstaller
82 | # Usually these files are written by a python script from a template
83 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
84 | *.manifest
85 | *.spec
86 |
87 | # Installer logs
88 | pip-log.txt
89 | pip-delete-this-directory.txt
90 |
91 | # Unit test / coverage reports
92 | htmlcov/
93 | .tox/
94 | .nox/
95 | .coverage
96 | .coverage.*
97 | .cache
98 | nosetests.xml
99 | coverage.xml
100 | *.cover
101 | *.py,cover
102 | .hypothesis/
103 | .pytest_cache/
104 | cover/
105 |
106 | # Translations
107 | *.mo
108 | *.pot
109 |
110 | # Django stuff:
111 | *.log
112 | local_settings.py
113 | db.sqlite3
114 | db.sqlite3-journal
115 |
116 | # Flask stuff:
117 | instance/
118 | .webassets-cache
119 |
120 | # Scrapy stuff:
121 | .scrapy
122 |
123 | # Sphinx documentation
124 | docs/_build/
125 |
126 | # PyBuilder
127 | .pybuilder/
128 | target/
129 |
130 | # Jupyter Notebook
131 | .ipynb_checkpoints
132 |
133 | # IPython
134 | profile_default/
135 | ipython_config.py
136 |
137 | # pyenv
138 | # For a library or package, you might want to ignore these files since the code is
139 | # intended to run in multiple environments; otherwise, check them in:
140 | # .python-version
141 |
142 | # pipenv
143 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
144 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
145 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
146 | # install all needed dependencies.
147 | #Pipfile.lock
148 |
149 | # poetry
150 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
151 | # This is especially recommended for binary packages to ensure reproducibility, and is more
152 | # commonly ignored for libraries.
153 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
154 | #poetry.lock
155 |
156 | # pdm
157 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
158 | #pdm.lock
159 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
160 | # in version control.
161 | # https://pdm.fming.dev/#use-with-ide
162 | .pdm.toml
163 |
164 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
165 | __pypackages__/
166 |
167 | # Celery stuff
168 | celerybeat-schedule
169 | celerybeat.pid
170 |
171 | # SageMath parsed files
172 | *.sage.py
173 |
174 | # Environments
175 | .env
176 | .venv
177 | env/
178 | venv/
179 | ENV/
180 | env.bak/
181 | venv.bak/
182 |
183 | # Spyder project settings
184 | .spyderproject
185 | .spyproject
186 |
187 | # Rope project settings
188 | .ropeproject
189 |
190 | # mkdocs documentation
191 | /site
192 |
193 | # mypy
194 | .mypy_cache/
195 | .dmypy.json
196 | dmypy.json
197 |
198 | # Pyre type checker
199 | .pyre/
200 |
201 | # pytype static type analyzer
202 | .pytype/
203 |
204 | # Cython debug symbols
205 | cython_debug/
206 |
207 | # PyCharm
208 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
209 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
210 | # and can be added to the global gitignore or merged into this file. For a more nuclear
211 | # option (not recommended) you can uncomment the following to ignore the entire idea folder.
212 | #.idea/
213 |
214 | ### Python Patch ###
215 | # Poetry local configuration file - https://python-poetry.org/docs/configuration/#local-configuration
216 | poetry.toml
217 |
218 | # ruff
219 | .ruff_cache/
220 |
221 | # LSP config files
222 | pyrightconfig.json
223 |
224 | ### VisualStudioCode ###
225 | .vscode/*
226 | !.vscode/settings.json
227 | !.vscode/tasks.json
228 | !.vscode/launch.json
229 | !.vscode/extensions.json
230 | !.vscode/*.code-snippets
231 |
232 | # Local History for Visual Studio Code
233 | .history/
234 |
235 | # Built Visual Studio Code Extensions
236 | *.vsix
237 |
238 | ### VisualStudioCode Patch ###
239 | # Ignore all local history of files
240 | .history
241 | .ionide
242 |
243 | ### Windows ###
244 | # Windows thumbnail cache files
245 | Thumbs.db
246 | Thumbs.db:encryptable
247 | ehthumbs.db
248 | ehthumbs_vista.db
249 |
250 | # Dump file
251 | *.stackdump
252 |
253 | # Folder config file
254 | [Dd]esktop.ini
255 |
256 | # Recycle Bin used on file shares
257 | $RECYCLE.BIN/
258 |
259 | # Windows Installer files
260 | *.cab
261 | *.msi
262 | *.msix
263 | *.msm
264 | *.msp
265 |
266 | # Windows shortcuts
267 | *.lnk
268 |
269 | # End of https://www.toptal.com/developers/gitignore/api/windows,macos,linux,python,visualstudiocode
270 |
271 | cache
272 | file_cache
273 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.12-slim-bullseye
2 |
3 | WORKDIR /app
4 |
5 | COPY ./requirements.txt /app/requirements.txt
6 |
7 | RUN pip install --no-cache-dir -r requirements.txt
8 |
9 | RUN pip install gunicorn==22.0.0
10 |
11 | COPY . /app
12 |
13 | ENTRYPOINT ["/app/run.sh"]
14 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2024 Sekaisoft LLC
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # RSS-lambda
2 |
3 | RSS-lambda transforms RSS feeds without RSS client lock-in
4 |
5 | ## Motivation
6 |
7 | There are RSS clients that can perform transformations on RSS feeds, e.g. only keep entries with certain keywords, or translate texts of the entries
8 |
9 | However, using those features from the RSS clients will create RSS client lock-in that prevents you from moving to another RSS client if you desire
10 |
11 | RSS-lambda is an application that perform transformations on the server-side instead so that you can freely move to another RSS client while keeping the transformations. It's also self-hostable so that you don't even need to rely on the official server instance!
12 |
13 | ## Usage
14 |
15 | There is an official server instance available at [rss-lambda.xyz](https://rss-lambda.xyz)
16 |
17 | From the web UI, you can tell it what you want to do with the RSS feed, and it will generate a RSS feed URL after your specified transformation
18 |
19 | Transformations include
20 | * Filter a rss feed by including entries with certain keywords in their titles
21 | * Filter a rss feed by excluding entries with certain keywords in their titles
22 | * Filter a rss feed by including entries with certain keywords in their contents
23 | * Filter a rss feed by excluding entries with certain keywords in their contents
24 | * Filter a rss feed by only including entries with image(s) in their contents
25 | * Merge with other feeds
26 | * Convert a feed to an image only feed
27 | * (BETA) Filter a rss feed by only including entries with human image(s) in their contents
28 | * (BETA) Filter a rss feed by only including entries with cat image(s) in their contents
29 | * (BETA) Filter a rss feed by only including entries with dog image(s) in their contents
30 |
31 | ## Self-hosting
32 |
33 | You can use the following `docker-compose.yml` to self-host the application
34 | ```yaml
35 | services:
36 | app:
37 | restart: always
38 | ports:
39 | - "5000:5000"
40 | image: ghcr.io/sekai-soft/rss-lambda:latest
41 | ```
42 |
43 | The web UI will be exposed at port 5000
44 |
45 | The image recognition endpoints are not enabled by default. In order to enable them, you need to
46 |
47 | 1. Create a `blobs` folder under docker compose root folder
48 |
49 | 2. Download [the yolo v3 inference model](https://s3.us-west-1.wasabisys.com/rss-lambda-blobs/yolov3.zip) and unzip it in the `blobs` folder, e.g. the folder structure should look like `./blobs/yolov3/1/...` after unzipping
50 |
51 | 3. Use the following `docker-compose.yml` file instead to run the application
52 | ```yaml
53 | services:
54 | app:
55 | restart: always
56 | ports:
57 | - "5000:5000"
58 | image: ghcr.io/sekai-soft/rss-lambda:latest
59 | volumes:
60 | - ./blobs:/app/blobs
61 | - ./cache:/app/cache
62 | - ./file_cache:/app/file_cache
63 | tfserving:
64 | restart: always
65 | ports:
66 | - '8501:8501'
67 | image: bitnami/tensorflow-serving:latest
68 | volumes:
69 | - ./blobs/yolov3:/bitnami/model-data
70 | environment:
71 | - TENSORFLOW_SERVING_MODEL_NAME=yolov3
72 | ```
73 |
74 | ## Development
75 |
76 | Open in VSCode, then run `flask run --reload`
77 |
78 | The webapp will be available at [localhost:5000](http://localhost:5000)
79 |
80 | To enable image recognition endpoints
81 |
82 | 1. Create a `blobs` folder under project root folder
83 |
84 | 2. Download [the yolo v3 inference model](https://s3.us-west-1.wasabisys.com/rss-lambda-blobs/yolov3.zip) and unzip it in the `blobs` folder, e.g. the folder structure should look like `./blobs/yolov3/1/...` after unzipping
85 |
86 | 3. Run `docker compose -f docker-compose.tensorflow-serving.yml up` in another terminal window
87 |
88 | Run unit tests `python -m unittest discover`
89 |
--------------------------------------------------------------------------------
/app.py:
--------------------------------------------------------------------------------
1 | import os
2 | import requests
3 | import sentry_sdk
4 | from typing import Union
5 | from flask import Flask, request, Response, send_from_directory, send_file
6 | from urllib.parse import unquote, urlparse
7 | from rss_lambda.simple_filters.simple_filters import \
8 | filter_by_title_including_substrings,\
9 | filter_by_title_excluding_substrings,\
10 | filter_by_description_including_substrings,\
11 | filter_by_description_excluding_substrings,\
12 | filter_by_description_containing_image
13 | from rss_lambda.utils.rss_lambda_error import RSSLambdaError
14 | from rss_lambda.image_recog.image_recog import image_recog
15 | from rss_lambda.merger.merger import merger
16 | from rss_lambda.to_image_feed.to_image_feed import to_image_feed
17 |
18 |
19 | if os.getenv('SENTRY_DSN'):
20 | print("Sentry enabled")
21 | sentry_sdk.init(dsn=os.getenv('SENTRY_DSN'))
22 | else:
23 | print("Sentry disabled")
24 |
25 |
26 | max_params = 50
27 |
28 | app = Flask(__name__)
29 |
30 | @app.route("/")
31 | def index():
32 | return send_from_directory('static', 'index.html')
33 |
34 |
35 | def download_feed(rss_url: str, headers) -> Union[str, Response]:
36 | try:
37 | res = requests.get(rss_url, headers={
38 | 'User-Agent': headers.get('User-Agent', '')
39 | })
40 | if res.status_code >= 400 and res.status_code < 600:
41 | return Response(res.content, res.status_code)
42 | return res.text
43 | except Exception as _:
44 | return Response("Failed to download the feed", 500)
45 |
46 |
47 | @app.route("/rss")
48 | def _rss():
49 | # parse url
50 | url = request.args.get('url', default=None)
51 | if not url:
52 | return "No url provided", 400
53 | url = unquote(url)
54 | parsed_url = urlparse(url)
55 | if not all([parsed_url.scheme, parsed_url.netloc]):
56 | return "Invalid url", 400
57 |
58 | # parse op
59 | op = request.args.get('op', default=None)
60 | if not op:
61 | return "No op provided", 400
62 |
63 | params = request.args.getlist('param')
64 | if len(params) > max_params:
65 | return f"Too many params, max {max_params} params allowed", 400
66 | try:
67 | if op == "filter_title_incl_substrs":
68 | if not params:
69 | return "No param provided", 400
70 | rss_text_or_res = download_feed(url, request.headers)
71 | if isinstance(rss_text_or_res, str):
72 | return Response(filter_by_title_including_substrings(rss_text_or_res, params), mimetype='application/xml')
73 | return rss_text_or_res
74 | elif op == "filter_title_excl_substrs":
75 | if not params:
76 | return "No param provided", 400
77 | rss_text_or_res = download_feed(url, request.headers)
78 | if isinstance(rss_text_or_res, str):
79 | return Response(filter_by_title_excluding_substrings(rss_text_or_res, params), mimetype='application/xml')
80 | return rss_text_or_res
81 | elif op == "filter_desc_incl_substrs":
82 | if not params:
83 | return "No param provided", 400
84 | rss_text_or_res = download_feed(url, request.headers)
85 | if isinstance(rss_text_or_res, str):
86 | return Response(filter_by_description_including_substrings(rss_text_or_res, params), mimetype='application/xml')
87 | return rss_text_or_res
88 | elif op == "filter_desc_excl_substrs":
89 | if not params:
90 | return "No param provided", 400
91 | rss_text_or_res = download_feed(url, request.headers)
92 | if isinstance(rss_text_or_res, str):
93 | return Response(filter_by_description_excluding_substrings(rss_text_or_res, params), mimetype='application/xml')
94 | return rss_text_or_res
95 | elif op == "filter_desc_cont_img":
96 | if params:
97 | return "No param expected", 400
98 | rss_text_or_res = download_feed(url, request.headers)
99 | if isinstance(rss_text_or_res, str):
100 | return Response(filter_by_description_containing_image(rss_text_or_res), mimetype='application/xml')
101 | return rss_text_or_res
102 | else:
103 | return f"Unknown op {op}", 400
104 | except RSSLambdaError as e:
105 | return e.message, 500
106 |
107 |
108 | @app.route("/rss_image_recog")
109 | def _rss_image_recog():
110 | # parse url
111 | url = request.args.get('url', default=None)
112 | if not url:
113 | return "No url provided", 400
114 | url = unquote(url)
115 | parsed_url = urlparse(url)
116 | if not all([parsed_url.scheme, parsed_url.netloc]):
117 | return "Invalid url", 400
118 | rss_text_or_res = download_feed(url, request.headers)
119 | if not isinstance(rss_text_or_res, str):
120 | return rss_text_or_res
121 |
122 | # parse class_id
123 | class_id = request.args.get('class_id', default=None)
124 | if not class_id:
125 | return "No class_id provided", 400
126 |
127 | # Hack for Reeder (iOS)
128 | if class_id.endswith("/rss"):
129 | class_id = class_id[:-4]
130 | if class_id.endswith("/feed"):
131 | class_id = class_id[:-5]
132 |
133 | class_id = int(class_id)
134 |
135 | try:
136 | return Response(image_recog(rss_text_or_res, class_id, url), mimetype='application/xml')
137 | except RSSLambdaError as e:
138 | return e.message, 500
139 |
140 |
141 | @app.route("/rss_merger")
142 | def _rss_merger():
143 | # parse urls
144 | urls = request.args.getlist('url')
145 | if not urls:
146 | return "No url provided", 400
147 | if len(urls) == 1:
148 | return "At least two urls are required", 400
149 | urls = [unquote(url) for url in urls]
150 | for url in urls:
151 | parsed_url = urlparse(url)
152 | if not all([parsed_url.scheme, parsed_url.netloc]):
153 | return "Invalid url", 400
154 |
155 | rss_texts = [download_feed(url, request.headers) for url in urls]
156 | for rss_text_or_res in rss_texts:
157 | if not isinstance(rss_text_or_res, str):
158 | return rss_text_or_res
159 |
160 | try:
161 | return Response(merger(rss_texts), mimetype='application/xml')
162 | except RSSLambdaError as e:
163 | return e.message, 500
164 |
165 |
166 | @app.route("/to_image_feed")
167 | def _to_image_feed():
168 | # parse url
169 | url = request.args.get('url', default=None)
170 | if not url:
171 | return "No url provided", 400
172 | url = unquote(url)
173 | parsed_url = urlparse(url)
174 | if not all([parsed_url.scheme, parsed_url.netloc]):
175 | return "Invalid url", 400
176 | rss_text_or_res = download_feed(url, request.headers)
177 | if not isinstance(rss_text_or_res, str):
178 | return rss_text_or_res
179 |
180 | try:
181 | return Response(to_image_feed(rss_text_or_res), mimetype='application/xml')
182 | except RSSLambdaError as e:
183 | return e.message, 500
184 |
185 |
186 | @app.route("/test_rss")
187 | def _test_rss():
188 | return send_file('test-rss.xml', 'application/xml')
189 |
--------------------------------------------------------------------------------
/blobs/.gitignore:
--------------------------------------------------------------------------------
1 | yolov3
2 |
--------------------------------------------------------------------------------
/compose.production.yaml:
--------------------------------------------------------------------------------
1 | name: rss-lambda
2 | services:
3 | rss-lambda:
4 | image: ghcr.io/sekai-soft/rss-lambda:latest
5 | container_name: rss-lambda
6 | restart: unless-stopped
7 | volumes:
8 | - ./blobs:/app/blobs
9 | - ./cache:/app/cache
10 | - ./file_cache:/app/file_cache
11 | environment:
12 | - TFSERVING_ROOT=http://tfserving:8501
13 | env_file:
14 | - env
15 | tfserving:
16 | image: bitnami/tensorflow-serving:latest
17 | restart: unless-stopped
18 | volumes:
19 | - ./blobs/yolov3:/bitnami/model-data
20 | environment:
21 | - TENSORFLOW_SERVING_MODEL_NAME=yolov3
22 | cloudflared:
23 | image: cloudflare/cloudflared
24 | restart: unless-stopped
25 | command: tunnel run rss-lambda
26 | env_file:
27 | - env
28 |
--------------------------------------------------------------------------------
/docker-compose.tensorflow-serving.yml:
--------------------------------------------------------------------------------
1 | services:
2 | tensorflow-serving:
3 | image: bitnami/tensorflow-serving:latest
4 | ports:
5 | - "8501:8501"
6 | volumes:
7 | - ./blobs/yolov3:/bitnami/model-data
8 | environment:
9 | - TENSORFLOW_SERVING_MODEL_NAME=yolov3
10 |
--------------------------------------------------------------------------------
/docker-compose.yml:
--------------------------------------------------------------------------------
1 | services:
2 | app:
3 | # restart: always
4 | ports:
5 | - "5000:5000"
6 | # image: ghcr.io/sekai-soft/rss-lambda:latest
7 | build: .
8 | volumes:
9 | - ./blobs:/app/blobs
10 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | Flask==3.0.0
2 | feedparser==6.0.11
3 | requests==2.31.0
4 | lxml==5.0.0
5 | beautifulsoup4==4.12.3
6 | sentry-sdk[flask]==1.43.0
7 | pillow==11.0.0
8 | tensorflow==2.17.0
--------------------------------------------------------------------------------
/rss_lambda/__init__.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | logging.basicConfig(
4 | level=logging.INFO,
5 | format='%(asctime)s %(levelname)s: %(message)s',
6 | datefmt='%Y-%m-%d %H:%M:%S'
7 | )
8 |
--------------------------------------------------------------------------------
/rss_lambda/image_recog/abstract_expensive_rss_lambda.py:
--------------------------------------------------------------------------------
1 | import os
2 | import os.path
3 | import logging
4 | import hashlib
5 | import datetime
6 | from typing import Any, List
7 | from multiprocessing import Process
8 | from lxml import etree
9 | from ..utils.process_rss_text import process_rss_text, ParsedRssText
10 |
11 | stale_cache_threshold_seconds = 5 * 60 # 5 minutes
12 |
13 | _cache_root_path = os.path.join('cache')
14 | os.makedirs(_cache_root_path, exist_ok=True)
15 |
16 | def _get_cache_path(hash_key: str, suffix: str) -> str:
17 | return os.path.join(_cache_root_path, f"{hash_key}-{suffix}")
18 |
19 | def _cache_exists(hash_key: str, suffix: str) -> bool:
20 | return os.path.isfile(_get_cache_path(hash_key, suffix))
21 |
22 | def _write_cache(hash_key: str, suffix: str, content: str):
23 | with open(_get_cache_path(hash_key, suffix), 'w') as f:
24 | f.write(content)
25 |
26 | def _read_cache(hash_key: str, suffix: str) -> str:
27 | with open(_get_cache_path(hash_key, suffix)) as f:
28 | return f.read()
29 |
30 | def _remove_cache(hash_key: str, suffix: str):
31 | os.remove(_get_cache_path(hash_key, suffix))
32 |
33 | def _cache_is_stale(hash_key: str, suffix: str) -> bool:
34 | creation_time = datetime.datetime.fromtimestamp(os.path.getmtime(_get_cache_path(hash_key, suffix)))
35 | return (datetime.datetime.now() - creation_time).total_seconds() > stale_cache_threshold_seconds
36 |
37 | def _empty_list(rss_text: str) -> str:
38 | def processor(parsed_rss_text: ParsedRssText):
39 | parent = parsed_rss_text.parent
40 | items = parsed_rss_text.items
41 |
42 | # remove all items
43 | for item in items:
44 | parent.remove(item)
45 |
46 | # add item for notice
47 | if items:
48 | notice_item_element = etree.Element(items[0].tag)
49 |
50 | title_element = etree.Element('title')
51 | title_element.text = 'Processing, please refresh later...'
52 | notice_item_element.append(title_element)
53 |
54 | guid_element = etree.Element('guid')
55 | guid_element.text = "Processing, please refresh later..."
56 | notice_item_element.append(guid_element)
57 |
58 | parent.append(notice_item_element)
59 |
60 | return process_rss_text(rss_text, processor)
61 |
62 | ORIGINAL_CACHE_SUFFIX = 'original'
63 | PROCESSED_CACHE_SUFFIX = 'processed'
64 | PROCESSING_LOCK_CACHE_SUFFIX = 'processing-lock'
65 |
66 | def abstract_expensive_rss_lambda(rss_text: str, expensive_operation, hash: str, extra_args: List[Any]) -> str:
67 | # obtain hash keY
68 | h = hashlib.new('sha256')
69 | h.update(hash.encode())
70 | hash_key = h.hexdigest()
71 |
72 | if not _cache_exists(hash_key, ORIGINAL_CACHE_SUFFIX):
73 | # original cache does not exist, start processing (use absence of processed cache as lock)
74 | logging.info(f"(first processing) original cache does not exist for {hash}, start processing")
75 | _write_cache(hash_key, ORIGINAL_CACHE_SUFFIX, rss_text)
76 |
77 | def _process():
78 | processed_rss_text = expensive_operation(rss_text, *extra_args)
79 | _write_cache(hash_key, PROCESSED_CACHE_SUFFIX, processed_rss_text)
80 | logging.info(f"(first processing) processed and cached {hash}")
81 | Process(target=_process).start()
82 |
83 | return _empty_list(rss_text)
84 |
85 | if not _cache_exists(hash_key, PROCESSED_CACHE_SUFFIX):
86 | if _cache_is_stale(hash_key, ORIGINAL_CACHE_SUFFIX):
87 | # original cache is stale, remove and reprocess
88 | logging.info(f"(first processing) original cache is stale for {hash}, removing")
89 | _remove_cache(hash_key, ORIGINAL_CACHE_SUFFIX)
90 | return _empty_list(rss_text)
91 |
92 | # original cache exists but processed cache does not exist. it is being processed, return empty list.
93 | logging.info(f"(first processing) processed cache does not exist for {hash} so it's still processing")
94 | return _empty_list(rss_text)
95 |
96 | processed_cache = _read_cache(hash_key, PROCESSED_CACHE_SUFFIX)
97 | if _read_cache(hash_key, ORIGINAL_CACHE_SUFFIX) == rss_text:
98 | # original cache exists and was not updated, return processed cache
99 | logging.info(f"original cache exists for {hash} and was not updated, returning processed cache")
100 | return processed_cache
101 |
102 | if _cache_exists(hash_key, PROCESSING_LOCK_CACHE_SUFFIX):
103 | if _cache_is_stale(hash_key, PROCESSING_LOCK_CACHE_SUFFIX):
104 | # original cache exists but was updated and processing lock is stale, remove and reprocess
105 | logging.info(f"original cache exists for {hash} but was updated and processing lock is stale, removing")
106 | _remove_cache(hash_key, PROCESSING_LOCK_CACHE_SUFFIX)
107 | return processed_cache
108 |
109 | # original cache exists but was updated and is still processing, return processed cache
110 | logging.info(f"original cache exists for {hash} but was updated and is still processing")
111 | return processed_cache
112 |
113 | # original cache exists but was updated and hasn't been processed yet, start processing and return processed cache
114 | logging.info(f"original cache exists for {hash} but was updated, start processing")
115 | _write_cache(hash_key, PROCESSING_LOCK_CACHE_SUFFIX, 'locked')
116 | def _process():
117 | processed_rss_text = expensive_operation(rss_text, *extra_args)
118 | _write_cache(hash_key, ORIGINAL_CACHE_SUFFIX, rss_text)
119 | _write_cache(hash_key, PROCESSED_CACHE_SUFFIX, processed_rss_text)
120 | _remove_cache(hash_key, PROCESSING_LOCK_CACHE_SUFFIX)
121 | logging.info(f"processed and cached {hash}")
122 | Process(target=_process).start()
123 |
124 | return processed_cache
125 |
--------------------------------------------------------------------------------
/rss_lambda/image_recog/file_cache.py:
--------------------------------------------------------------------------------
1 | import hashlib
2 | import inspect
3 | import os
4 | import pickle
5 | import logging
6 |
7 | # https://github.com/sweepai/sweep/blob/main/docs/public/file_cache.py
8 |
9 | DISABLE_CACHE = False
10 |
11 | MAX_DEPTH = 6
12 | if DISABLE_CACHE:
13 | print("File cache is disabled.")
14 |
15 |
16 | def recursive_hash(value, depth=0, ignore_params=[]):
17 | """Hash primitives recursively with maximum depth."""
18 | if depth > MAX_DEPTH:
19 | return hashlib.md5("max_depth_reached".encode()).hexdigest()
20 |
21 | if isinstance(value, (int, float, str, bool, bytes)):
22 | return hashlib.md5(str(value).encode()).hexdigest()
23 | elif isinstance(value, (list, tuple)):
24 | return hashlib.md5(
25 | "".join(
26 | [recursive_hash(item, depth + 1, ignore_params) for item in value]
27 | ).encode()
28 | ).hexdigest()
29 | elif isinstance(value, dict):
30 | return hashlib.md5(
31 | "".join(
32 | [
33 | recursive_hash(key, depth + 1, ignore_params)
34 | + recursive_hash(val, depth + 1, ignore_params)
35 | for key, val in value.items()
36 | if key not in ignore_params
37 | ]
38 | ).encode()
39 | ).hexdigest()
40 | elif hasattr(value, "__dict__") and value.__class__.__name__ not in ignore_params:
41 | return recursive_hash(value.__dict__, depth + 1, ignore_params)
42 | else:
43 | return hashlib.md5("unknown".encode()).hexdigest()
44 |
45 |
46 | def hash_code(code):
47 | return hashlib.md5(code.encode()).hexdigest()
48 |
49 |
50 | def file_cache(ignore_params=[], verbose=False):
51 | """Decorator to cache function output based on its inputs, ignoring specified parameters.
52 | Ignore parameters are used to avoid caching on non-deterministic inputs, such as timestamps.
53 | We can also ignore parameters that are slow to serialize/constant across runs, such as large objects.
54 | """
55 |
56 | def decorator(func):
57 | if DISABLE_CACHE:
58 | if verbose:
59 | print("Cache is disabled for function: " + func.__name__)
60 | return func
61 | func_source_code_hash = hash_code(inspect.getsource(func))
62 |
63 | def wrapper(*args, **kwargs):
64 | cache_dir = "file_cache"
65 | os.makedirs(cache_dir, exist_ok=True)
66 |
67 | # Convert args to a dictionary based on the function's signature
68 | args_names = func.__code__.co_varnames[: func.__code__.co_argcount]
69 | args_dict = dict(zip(args_names, args))
70 |
71 | # Remove ignored params
72 | kwargs_clone = kwargs.copy()
73 | for param in ignore_params:
74 | args_dict.pop(param, None)
75 | kwargs_clone.pop(param, None)
76 |
77 | # Create hash based on argument names, argument values, and function source code
78 | arg_hash = (
79 | recursive_hash(args_dict, ignore_params=ignore_params)
80 | + recursive_hash(kwargs_clone, ignore_params=ignore_params)
81 | + func_source_code_hash
82 | )
83 | cache_file = os.path.join(
84 | cache_dir, f"{func.__module__}_{func.__name__}_{arg_hash}.pickle"
85 | )
86 |
87 | try:
88 | # If cache exists, load and return it
89 | if os.path.exists(cache_file):
90 | if verbose:
91 | print("Used cache for function: " + func.__name__ + " and args hash: " + arg_hash)
92 | with open(cache_file, "rb") as f:
93 | return pickle.load(f)
94 | except Exception:
95 | logging.info("Unpickling failed")
96 |
97 | # Otherwise, call the function and save its result to the cache
98 | result = func(*args, **kwargs)
99 | try:
100 | with open(cache_file, "wb") as f:
101 | pickle.dump(result, f)
102 | except Exception as e:
103 | logging.info(f"Pickling failed: {e}")
104 | return result
105 |
106 | return wrapper
107 |
108 | return decorator
109 |
--------------------------------------------------------------------------------
/rss_lambda/image_recog/image_recog.py:
--------------------------------------------------------------------------------
1 | from ..utils.process_rss_text import process_rss_text, ParsedRssText
2 | from ..utils.image_utils import extract_images_from_description, create_item_element_with_image, extract_link
3 | from .yolov3 import yolov3
4 | from .abstract_expensive_rss_lambda import abstract_expensive_rss_lambda
5 |
6 | def _expensive_operation(rss_text: str, class_id: int) -> str:
7 | def processor(parsed_rss_text: ParsedRssText):
8 | root = parsed_rss_text.root
9 | parent = parsed_rss_text.parent
10 | items = parsed_rss_text.items
11 |
12 | matched_images = []
13 | for item in items:
14 | images = extract_images_from_description(item, root.nsmap)
15 | for image in images:
16 | img_src = image.get('src')
17 | if yolov3(img_src, class_id):
18 | matched_images.append(create_item_element_with_image(
19 | img_src,
20 | item.tag,
21 | extract_link(item, root.nsmap)))
22 |
23 | # remove all items and appended kept items
24 | for item in items:
25 | parent.remove(item)
26 | for item in matched_images:
27 | parent.append(item)
28 |
29 | return process_rss_text(rss_text, processor)
30 |
31 | def image_recog(rss_text: str, class_id: int, url: str) -> str:
32 | hash = url + ":" + str(class_id) + ":tf"
33 |
34 | return abstract_expensive_rss_lambda(
35 | rss_text,
36 | _expensive_operation,
37 | hash,
38 | [class_id])
39 |
--------------------------------------------------------------------------------
/rss_lambda/image_recog/yolov3.py:
--------------------------------------------------------------------------------
1 | import os
2 | import logging
3 | import time
4 | import requests
5 | import json
6 | from ..utils.image_utils import download_image
7 | from .file_cache import file_cache
8 |
9 | os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
10 | import tensorflow as tf
11 |
12 | tfserving_root = os.getenv("TFSERVING_ROOT", "http://localhost:8501")
13 | size = 320
14 |
15 |
16 | @file_cache(verbose=True)
17 | def yolov3(image_url: str, desired_class_id: int) -> bool:
18 | start_time = time.time()
19 |
20 | # Downlaod image
21 | image_path = download_image(image_url)
22 | if image_path is None:
23 | logging.error(f"failed to download image from {image.get('src')}")
24 | return False
25 |
26 | # Decode image
27 | image = tf.image.decode_image(open(image_path, 'rb').read(), channels=3)
28 | image = tf.expand_dims(image, axis=0)
29 | image = tf.image.resize(image, (size, size))
30 | image = image / 255
31 |
32 | # Make request
33 | data = {
34 | "signature_name": "serving_default",
35 | "instances": image.numpy().tolist()
36 | }
37 | resp = requests.post(f"{tfserving_root}/v1/models/yolov3:predict", json=data)
38 | resp = json.loads(resp.content.decode('utf-8'))['predictions'][0]
39 |
40 | res = False
41 | valid_predictions = resp['yolo_nms_3']
42 | for i in range(valid_predictions):
43 | clazz = resp['yolo_nms_2'][i]
44 | if clazz == desired_class_id:
45 | res = True
46 | break
47 |
48 | logging.info(f"yolov3 tf took {time.time() - start_time} seconds")
49 |
50 | return res
51 |
--------------------------------------------------------------------------------
/rss_lambda/merger/merger.py:
--------------------------------------------------------------------------------
1 | from typing import List, Optional
2 | from ..utils.process_rss_text import parse_rss_text, wrap_items_to_rss_text
3 |
4 |
5 | def _get_guid(item) -> Optional[str]:
6 | guid_e = item.find('guid')
7 | if guid_e is not None:
8 | return guid_e.text
9 | return None
10 |
11 |
12 | def merger(rss_texts: List[str]) -> str:
13 | parsed_rss_texts = list(map(parse_rss_text, rss_texts))
14 |
15 | final_items = []
16 | appeared_guids = set()
17 | for parsed_rss_text in parsed_rss_texts:
18 | for item in parsed_rss_text.items:
19 | guid = _get_guid(item)
20 | if guid is None:
21 | final_items.append(item)
22 | elif guid not in appeared_guids:
23 | final_items.append(item)
24 | appeared_guids.add(guid)
25 |
26 | first_parsed_rss_text = parsed_rss_texts[0]
27 | for item in first_parsed_rss_text.items:
28 | first_parsed_rss_text.parent.remove(item)
29 | for item in final_items:
30 | first_parsed_rss_text.parent.append(item)
31 |
32 | return wrap_items_to_rss_text(rss_texts[0], first_parsed_rss_text)
33 |
--------------------------------------------------------------------------------
/rss_lambda/simple_filters/filter_lambda.py:
--------------------------------------------------------------------------------
1 | from typing import Callable, Optional, Dict
2 | from lxml import etree
3 | from ..utils.process_rss_text import ParsedRssText, process_rss_text
4 |
5 | def filter_lambda(
6 | rss_text: str,
7 | rss_item_lambda: Callable[[etree.Element, Dict], Optional[etree.Element]]
8 | ) -> str:
9 | def processor(parsed_rss_text: ParsedRssText):
10 | root = parsed_rss_text.root
11 | parent = parsed_rss_text.parent
12 | items = parsed_rss_text.items
13 |
14 | # Filter the items or entries
15 | transformed_items = list(map(lambda item: rss_item_lambda(item, root.nsmap), items))
16 |
17 | # Remove all original items and appended kept items
18 | for item in items:
19 | parent.remove(item)
20 | for item in transformed_items:
21 | if item is not None:
22 | parent.append(item)
23 |
24 | return process_rss_text(rss_text, processor)
25 |
--------------------------------------------------------------------------------
/rss_lambda/simple_filters/simple_filters.py:
--------------------------------------------------------------------------------
1 | from typing import Optional, List, Dict, Callable
2 | from lxml import etree
3 | from .filter_lambda import filter_lambda
4 | from ..utils.image_utils import extract_images_from_description
5 |
6 | def _filter_by_title_including_substrings(e: etree.Element, root_nsmap: Dict, included_substrings: List[str]) -> Optional[etree.Element]:
7 | title_e = e.find('title', root_nsmap)
8 | if title_e is None:
9 | return e
10 | title = title_e.text
11 | if title is None:
12 | return e
13 | for substr in included_substrings:
14 | if substr in title:
15 | return e
16 | return None
17 |
18 | def filter_by_title_including_substrings(rss_text: str, included_substrings: List[str]) -> str:
19 | return filter_lambda(rss_text, lambda e, root_nsmap: _filter_by_title_including_substrings(e, root_nsmap, included_substrings))
20 |
21 | def _filter_by_title_excluding_substrings(e: etree.Element, root_nsmap: Dict, excluded_substrings: List[str]) -> Optional[etree.Element]:
22 | title_e = e.find('title', root_nsmap)
23 | if title_e is None:
24 | return e
25 | title = title_e.text
26 | if title is None:
27 | return e
28 | for substr in excluded_substrings:
29 | if substr in title:
30 | return None
31 | return e
32 |
33 | def filter_by_title_excluding_substrings(rss_text: str, excluded_substrings: List[str]) -> str:
34 | return filter_lambda(rss_text, lambda e, root_nsmap: _filter_by_title_excluding_substrings(e, root_nsmap, excluded_substrings))
35 |
36 | def _filter_by_description_including_substrings(e: etree.Element, root_nsmap: Dict, included_substrings: List[str]) -> Optional[etree.Element]:
37 | description_e = e.find('description', root_nsmap)
38 |
39 | media_description_e = None
40 | media_group_e = e.find('media:group', root_nsmap)
41 | if media_group_e is not None:
42 | media_description_e = media_group_e.find('media:description', root_nsmap)
43 |
44 | description = None
45 | if media_description_e is not None:
46 | description = media_description_e.text
47 | elif description_e is not None:
48 | description = description_e.text
49 |
50 | if description is None:
51 | return e
52 | for substr in included_substrings:
53 | if substr in description:
54 | return e
55 | return None
56 |
57 | def filter_by_description_including_substrings(rss_text: str, included_substrings: List[str]) -> str:
58 | return filter_lambda(rss_text, lambda e, root_nsmap: _filter_by_description_including_substrings(e, root_nsmap, included_substrings))
59 |
60 | def _filter_by_description_excluding_substrings(e: etree.Element, root_nsmap: Dict, excluded_substrings: List[str]) -> Optional[etree.Element]:
61 | description_e = e.find('description', root_nsmap)
62 | if description_e is None:
63 | return e
64 | description = description_e.text
65 | if description is None:
66 | return e
67 | for substr in excluded_substrings:
68 | if substr in description:
69 | return None
70 | return e
71 |
72 | def filter_by_description_excluding_substrings(rss_text: str, excluded_substrings: List[str]) -> str:
73 | return filter_lambda(rss_text, lambda e, root_nsmap: _filter_by_description_excluding_substrings(e, root_nsmap, excluded_substrings))
74 |
75 | def _filter_by_description_containing_image(e: etree.Element, root_nsmap: Dict) -> Optional[etree.Element]:
76 | images = extract_images_from_description(e, root_nsmap)
77 | if len(images) == 0:
78 | return None
79 | return e
80 |
81 | def filter_by_description_containing_image(rss_text: str) -> str:
82 | return filter_lambda(rss_text, _filter_by_description_containing_image)
83 |
--------------------------------------------------------------------------------
/rss_lambda/test_image_recog.py:
--------------------------------------------------------------------------------
1 | import os.path
2 | import shutil
3 | import time
4 | import unittest
5 | from unittest.mock import patch
6 | from .image_recog.image_recog import image_recog
7 | from .test_utils import nitter_rss20_response
8 |
9 | rss_text = nitter_rss20_response([
10 | '
some random text
',
11 | 'also some random texts
but without images haha
',
12 | 'also some random texts
but without images haha 2222
',
13 | 'also some random texts but with images hahahaha

',
14 | 'also some random texts but with images hahahaha
',
15 | ])
16 |
17 | nitter_rss20_processing_response = """
18 |
19 |
20 |
21 | twitter_handle / @twitter_handle
22 | http://nitter.example.com/twitter_handle
23 | Twitter feed for: @twitter_handle. Generated by nitter.example.com
24 |
25 | en-us
26 | 40
27 | - Processing, please refresh later...Processing, please refresh later...
28 | """
29 |
30 | nitter_rss20_processed_response = """
31 |
32 |
33 |
34 | twitter_handle / @twitter_handle
35 | http://nitter.example.com/twitter_handle
36 | Twitter feed for: @twitter_handle. Generated by nitter.example.com
37 |
38 | en-us
39 | 40
40 | - Image]]>https://nitter.example.com/twitter_handle/pic/pic1.jpghttp://nitter.example.com/twitter_handle/status/-1#m
- Image]]>https://nitter.example.com/twitter_handle/pic/pic3.jpghttp://nitter.example.com/twitter_handle/status/-1#m
41 | """
42 |
43 | rss_text_2 = nitter_rss20_response([
44 | 'some random text
',
45 | 'also some random texts
but without images haha
',
46 | 'also some random texts
but without images haha 2222
',
47 | 'also some random texts but with images hahahaha

',
48 | 'also some random texts but with images hahahaha
',
49 | 'also some random texts but with images hahahaha
',
50 | ])
51 |
52 | nitter_rss20_processed_response_2 = """
53 |
54 |
55 |
56 | twitter_handle / @twitter_handle
57 | http://nitter.example.com/twitter_handle
58 | Twitter feed for: @twitter_handle. Generated by nitter.example.com
59 |
60 | en-us
61 | 40
62 | - Image]]>https://nitter.example.com/twitter_handle/pic/pic1.jpghttp://nitter.example.com/twitter_handle/status/-1#m
- Image]]>https://nitter.example.com/twitter_handle/pic/pic3.jpghttp://nitter.example.com/twitter_handle/status/-1#m
- Image]]>https://nitter.example.com/twitter_handle/pic/pic4.jpghttp://nitter.example.com/twitter_handle/status/-1#m
63 | """
64 |
65 | def fake_yolov3(image_path: str, desired_class_id: int):
66 | time.sleep(0.1)
67 | return image_path != 'https://nitter.example.com/twitter_handle/pic/pic2.jpg'
68 |
69 |
70 | class ImageRecognitionTestCase(unittest.TestCase):
71 | def setUp(self):
72 | if os.path.exists('cache'):
73 | shutil.rmtree('cache')
74 | os.makedirs('cache')
75 | self.maxDiff = None
76 |
77 | @patch('rss_lambda.image_recog.image_recog.yolov3', wraps=fake_yolov3)
78 | def test_image_recognition(self, _):
79 | with self.assertLogs('root', level='INFO') as log_context_manager:
80 | # first call should return processing response but processing had been kicked off
81 | self.assertEqual(
82 | image_recog(rss_text, 0, 'http://nitter.example.com/twitter_handle'),
83 | nitter_rss20_processing_response)
84 |
85 | # subsequent calls should return processing response; we'll assert that no duplicate processing is kicked off later via log lines
86 | for _ in range(5):
87 | self.assertEqual(
88 | image_recog(rss_text, 0, 'http://nitter.example.com/twitter_handle'),
89 | nitter_rss20_processing_response)
90 |
91 | time.sleep(1)
92 | # by this time processing should have been completed; should return processed response
93 | self.assertEqual(
94 | image_recog(rss_text, 0, 'http://nitter.example.com/twitter_handle'),
95 | nitter_rss20_processed_response)
96 |
97 | # subsequent calls should return the same processed response as long as input rss text doesn't change
98 | for _ in range(5):
99 | self.assertEqual(
100 | image_recog(rss_text, 0, 'http://nitter.example.com/twitter_handle'),
101 | nitter_rss20_processed_response)
102 |
103 | # first call with updated rss text should return the first processed response but processing had been kicked off
104 | self.assertEqual(
105 | image_recog(rss_text_2, 0, 'http://nitter.example.com/twitter_handle'),
106 | nitter_rss20_processed_response)
107 |
108 | # subsequent calls should return the first processed response; we'll assert that no duplicate processing is kicked off later via log lines
109 | for _ in range(5):
110 | self.assertEqual(
111 | image_recog(rss_text_2, 0, 'http://nitter.example.com/twitter_handle'),
112 | nitter_rss20_processed_response)
113 | time.sleep(1)
114 | # by this time processing should have been completed; should return second processed response
115 | self.assertEqual(
116 | image_recog(rss_text_2, 0, 'http://nitter.example.com/twitter_handle'),
117 | nitter_rss20_processed_response_2)
118 |
119 | # processing should only be triggered twice, once for each rss text
120 | processing_triggerd = 0
121 | for log in log_context_manager.output:
122 | if 'start processing' in log:
123 | processing_triggerd += 1
124 | self.assertEqual(processing_triggerd, 2)
125 |
--------------------------------------------------------------------------------
/rss_lambda/test_merger.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | from typing import List, Tuple
3 | from .merger.merger import merger
4 |
5 |
6 | def _nyt_rss_response(guid_and_pub_dates: List[Tuple[str, str]]):
7 | def guid_and_pub_date_to_xml(guid_and_pub_date: Tuple[str, str]) -> str:
8 | guid, pub_date = guid_and_pub_date[0], guid_and_pub_date[1]
9 | return f"""-
10 | {guid}
11 | {guid}
12 | {pub_date}
13 |
"""
14 |
15 | return f"""
16 |
17 |
18 | NYT > Most Popular
19 | {'\n'.join(map(guid_and_pub_date_to_xml, guid_and_pub_dates))}
20 |
21 | """
22 |
23 |
24 | class RssMergerTestCase(unittest.TestCase):
25 | def setUp(self):
26 | self.maxDiff = None
27 |
28 | def test_rss_merger(self):
29 | rss_text_1 = _nyt_rss_response([
30 | ('https://nyt.example.com/trump1.html', '2024-01-06T07:07:14+0000'),
31 | ('https://nyt.example.com/harris1.html', '2024-01-05T07:07:14+0000'),
32 | ])
33 | rss_text_2 = _nyt_rss_response([
34 | ('https://nyt.example.com/harris2.html', '2024-01-06T06:07:14+0000'),
35 | ('https://nyt.example.com/trump1.html', '2024-01-06T07:07:14+0000'),
36 | ])
37 | self.assertEqual(
38 | merger([rss_text_1, rss_text_2]),
39 | _nyt_rss_response([
40 | ('https://nyt.example.com/trump1.html', '2024-01-06T07:07:14+0000'),
41 | ('https://nyt.example.com/harris1.html', '2024-01-05T07:07:14+0000'),
42 | ('https://nyt.example.com/harris2.html', '2024-01-06T06:07:14+0000'),
43 | ])
44 | )
45 |
--------------------------------------------------------------------------------
/rss_lambda/test_simple_filters.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | from typing import List
3 | from .simple_filters.simple_filters import \
4 | filter_by_title_including_substrings,\
5 | filter_by_title_excluding_substrings,\
6 | filter_by_description_including_substrings,\
7 | filter_by_description_excluding_substrings,\
8 | filter_by_description_containing_image
9 | from .test_utils import nitter_rss20_response
10 |
11 |
12 | def _youtube_atom_response(titles: List[str]):
13 | def title_to_xml(title: str) -> str:
14 | return f"""
15 | yt:video:bbbbbb
16 | bbbbbb
17 | aaaaaa
18 | {title}
19 |
20 |
21 | channel title
22 | https://www.youtube.com/channel/aaaaaa
23 |
24 | {'2024-01-06T07:07:14+0000'}
25 | {'2024-01-06T07:07:14+0000'}
26 |
27 | {title}
28 |
29 |
30 | description
31 |
32 |
33 |
34 |
35 |
36 | """
37 |
38 | return f"""
39 |
40 |
41 | yt:channel:aaaaaa
42 | aaaaaa
43 | channel title
44 |
45 |
46 | channel title
47 | https://www.youtube.com/channel/aaaaaa
48 |
49 | {'2024-01-06T07:07:14+0000'}
50 | {'\n'.join(map(title_to_xml, titles))}
51 | """
52 |
53 |
54 | class SimpleFiltersTestCase(unittest.TestCase):
55 | def setUp(self):
56 | self.maxDiff = None
57 |
58 | def test_filter_by_title_including_substrings(self):
59 | rss_text = _youtube_atom_response([
60 | 'title 1',
61 | 'title 2 but INCLUDE ME',
62 | ])
63 | self.assertEqual(
64 | filter_by_title_including_substrings(rss_text, ['INCLUDE ME']),
65 | _youtube_atom_response([
66 | 'title 2 but INCLUDE ME',
67 | ])
68 | )
69 |
70 | def test_filter_by_title_excluding_substrings(self):
71 | rss_text = _youtube_atom_response([
72 | 'title 1',
73 | 'title 2 but EXCLUDE ME',
74 | ])
75 | self.assertEqual(
76 | filter_by_title_excluding_substrings(rss_text, ['EXCLUDE ME']),
77 | _youtube_atom_response([
78 | 'title 1',
79 | ])
80 | )
81 |
82 | # def test_filter_by_description_including_substrings(self):
83 | # rss_text = nitter_rss20_response([
84 | # 'some random text
',
85 | # 'also some random texts but INCLUDE ME hahaha
',
86 | # ])
87 | # self.assertEqual(
88 | # filter_by_description_including_substrings(rss_text, ['INCLUDE ME']),
89 | # nitter_rss20_response([
90 | # 'also some random texts but INCLUDE ME hahaha
',
91 | # ])
92 | # )
93 |
94 | def test_filter_by_description_excluding_substrings(self):
95 | rss_text = nitter_rss20_response([
96 | 'some random text
',
97 | 'also some random texts but EXCLUDE ME hahaha
',
98 | ])
99 | self.assertEqual(
100 | filter_by_description_excluding_substrings(rss_text, ['EXCLUDE ME']),
101 | nitter_rss20_response([
102 | 'some random text
',
103 | ])
104 | )
105 |
106 | def test_filter_by_description_containing_image(self):
107 | rss_text = nitter_rss20_response([
108 | 'some random text
',
109 | 'also some random texts
but without images haha
',
110 | 'also some random texts
but without images haha 2222
',
111 | 'also some random texts but with images hahahaha
',
112 | ])
113 | self.assertEqual(
114 | filter_by_description_containing_image(rss_text),
115 | nitter_rss20_response([
116 | 'also some random texts but with images hahahaha
',
117 | ])
118 | )
119 |
--------------------------------------------------------------------------------
/rss_lambda/test_to_image_feed.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | from typing import List, Tuple
3 | from .to_image_feed.to_image_feed import to_image_feed
4 | from .test_utils import nitter_rss20_response
5 |
6 |
7 | def nitter_rss20_response_with_guid(description_html_and_guids: List[Tuple[str, str]]):
8 | def description_html_to_xml(description_html_and_guid: Tuple[str, str]) -> str:
9 | return f"""-
10 | title
11 | @twitter_handle
12 |
13 | {"Sat, 06 Jan 2024 07:06:54 GMT"}
14 | {description_html_and_guid[1]}
15 | http://nitter.example.com/twitter_handle/status/-1#m
16 |
"""
17 |
18 | return f"""
19 |
20 |
21 |
22 | twitter_handle / @twitter_handle
23 | http://nitter.example.com/twitter_handle
24 | Twitter feed for: @twitter_handle. Generated by nitter.example.com
25 |
26 | en-us
27 | 40
28 | {'\n'.join(map(description_html_to_xml, description_html_and_guids))}
29 |
30 | """
31 |
32 |
33 | class ToImageFeedTestCase(unittest.TestCase):
34 | def setUp(self):
35 | self.maxDiff = None
36 |
37 | def test_to_image_feed(self):
38 | rss_text = nitter_rss20_response([
39 | 'some random text
',
40 | 'also some random texts
but without images haha
',
41 | 'also some random texts
but without images haha 2222
',
42 | 'also some random texts but with images hahahaha

',
43 | 'also some random texts but with images hahahaha
',
44 | 'also some random texts but with a lot of images hahahaha




',
45 | ])
46 | self.assertEqual(
47 | to_image_feed(rss_text),
48 | nitter_rss20_response_with_guid([
49 | ('
', "http://nitter.example.com/twitter_handle/status/-1#m#0082835d2bff049a1bc59e9b84ff09b80ab63957d6d7281ce2def79789848e42"),
50 | ('
', "http://nitter.example.com/twitter_handle/status/-1#m#f1a353fb4ea5acda598a4f7883e83c17771cf876e2a63b3969d441e4679279d7"),
51 | ('
', "http://nitter.example.com/twitter_handle/status/-1#m#50e91838b86fadd360fe9d17e896f08da8fece8e9f3b8256789f3ae3941e720e"),
52 | ('
', "http://nitter.example.com/twitter_handle/status/-1#m#cbc0f68a50eee9919e567c817816c2caa96fcfd7d2732e9c4ed9f48331a7f449"),
53 | ('
', "http://nitter.example.com/twitter_handle/status/-1#m#8f3ad6b81214c231a6f53a37dac1455d4b04e9be0f7e2bdfadf1ac5698cb2f96"),
54 | ('
', "http://nitter.example.com/twitter_handle/status/-1#m#23c5c34f8122b70044e28a2902e666e51d6baf92360b39e39cf6114b621ca626"),
55 | ('
', "http://nitter.example.com/twitter_handle/status/-1#m#362a27ca9602d2cdf012faeeae96594aa2080f2070b6f072a00a58917ee2139c"),
56 | ])
57 | )
58 |
--------------------------------------------------------------------------------
/rss_lambda/test_utils.py:
--------------------------------------------------------------------------------
1 | from typing import List
2 |
3 |
4 | def nitter_rss20_response(description_htmls: List[str]):
5 | def description_html_to_xml(description_html: str) -> str:
6 | return f"""-
7 | title
8 | @twitter_handle
9 |
10 | {"Sat, 06 Jan 2024 07:06:54 GMT"}
11 | http://nitter.example.com/twitter_handle/status/-1#m
12 | http://nitter.example.com/twitter_handle/status/-1#m
13 |
"""
14 |
15 | return f"""
16 |
17 |
18 |
19 | twitter_handle / @twitter_handle
20 | http://nitter.example.com/twitter_handle
21 | Twitter feed for: @twitter_handle. Generated by nitter.example.com
22 |
23 | en-us
24 | 40
25 | {'\n'.join(map(description_html_to_xml, description_htmls))}
26 |
27 | """
28 |
--------------------------------------------------------------------------------
/rss_lambda/to_image_feed/to_image_feed.py:
--------------------------------------------------------------------------------
1 | import copy
2 | import hashlib
3 | from lxml import etree
4 | from ..utils.process_rss_text import ParsedRssText, process_rss_text
5 | from ..utils.image_utils import extract_images_from_description
6 |
7 | MAX_IMAGES_PER_ITEM = 4
8 |
9 | def to_image_feed(rss_text: str) -> str:
10 | def processor(parsed_rss_text: ParsedRssText):
11 | root = parsed_rss_text.root
12 | parent = parsed_rss_text.parent
13 | items = parsed_rss_text.items
14 |
15 | def handle_image(item, image):
16 | image_link = image.get('src')
17 |
18 | new_item = copy.deepcopy(item)
19 | new_description = new_item.find('description', root.nsmap)
20 | new_description.text = etree.CDATA(f'
')
21 |
22 | sha256_hash = hashlib.sha256()
23 | sha256_hash.update(image_link.encode('utf-8'))
24 | hashed_image_link = sha256_hash.hexdigest()
25 |
26 | new_guid = new_item.find('guid', root.nsmap)
27 | new_guid.text += f"#{hashed_image_link}"
28 |
29 | parent.append(new_item)
30 |
31 | def handle_item(item):
32 | description_e = item.find('description', root.nsmap)
33 | if description_e is None:
34 | return
35 | description = description_e.text
36 | if description is None:
37 | return
38 | images = extract_images_from_description(item, root.nsmap)
39 | if not images:
40 | return
41 | for image in images[: MAX_IMAGES_PER_ITEM]:
42 | handle_image(item, image)
43 |
44 | for item in items:
45 | handle_item(item)
46 | parent.remove(item)
47 |
48 | return process_rss_text(rss_text, processor)
49 |
--------------------------------------------------------------------------------
/rss_lambda/utils/image_utils.py:
--------------------------------------------------------------------------------
1 | import os
2 | import os.path
3 | import logging
4 | import tempfile
5 | import requests
6 | from typing import Optional, Dict, List
7 | from urllib.parse import urlparse
8 | from lxml import etree
9 | from bs4 import BeautifulSoup
10 |
11 | def is_cdata(s: str) -> bool:
12 | return s.startswith('')
13 |
14 | def extract_images_from_description(e: etree.Element, root_nsmap: Dict) -> List[etree.Element]:
15 | description_e = e.find('description', root_nsmap)
16 | if description_e is None:
17 | return []
18 | try:
19 | description_text = description_e.text
20 | if is_cdata(description_text):
21 | description_text = description_text[9:-3]
22 | soup = BeautifulSoup(description_text, 'html.parser')
23 | return soup.find_all('img')
24 | except Exception as ex:
25 | logging.error(f'failed to parse description text: {description_e.text}, error: {ex}')
26 | return []
27 |
28 | def download_image(src: str) -> Optional[str]:
29 | # Parse the URL to get the file extension
30 | parsed_url = urlparse(src)
31 | file_extension = os.path.splitext(parsed_url.path)[1]
32 | if not file_extension:
33 | file_extension = '.jpg'
34 |
35 | # Create a temporary file with the correct extension
36 | with tempfile.NamedTemporaryFile(delete=False, suffix=file_extension) as temp_file:
37 | # Download the image
38 | response = requests.get(src)
39 | if response.status_code == 200:
40 | temp_file.write(response.content)
41 | return temp_file.name
42 | else:
43 | logging.error(f"failed to download image from {src}: HTTP status {response.status_code}")
44 | return None
45 |
46 | def create_item_element_with_image(img_src: str, item_element_tag: str, original_link: Optional[str]=None) -> etree.Element:
47 | item_element = etree.Element(item_element_tag)
48 |
49 | title_element = etree.Element('title')
50 | title_element.text = 'Image'
51 | item_element.append(title_element)
52 |
53 | description_element = etree.Element('description')
54 | description_element.text = etree.CDATA(f'
')
55 | item_element.append(description_element)
56 |
57 | guid_element = etree.Element('guid')
58 | guid_element.text = img_src
59 | item_element.append(guid_element)
60 |
61 | link_element = etree.Element('link')
62 | link_element.text = original_link if original_link else img_src
63 | item_element.append(link_element)
64 |
65 | return item_element
66 |
67 | def extract_link(e: etree.Element, root_nsmap: Dict) -> Optional[str]:
68 | link_e = e.find('link', root_nsmap)
69 | if link_e is None:
70 | return None
71 | return link_e.text
--------------------------------------------------------------------------------
/rss_lambda/utils/process_rss_text.py:
--------------------------------------------------------------------------------
1 | import feedparser
2 | from typing import List, Callable
3 | from dataclasses import dataclass
4 | from lxml import etree
5 | from .rss_lambda_error import RSSLambdaError
6 |
7 | supported_feed_versions = ["rss20", "atom10", "atom03"]
8 | xml_declaration = ''
9 |
10 |
11 | @dataclass
12 | class ParsedRssText:
13 | root: etree.Element
14 | parent: etree.Element
15 | items: List[etree.Element]
16 |
17 |
18 | def parse_rss_text(rss_text: str) -> ParsedRssText:
19 | # Determine if it's a valid and supported feed
20 | feed = feedparser.parse(rss_text)
21 | if not feed.entries:
22 | raise RSSLambdaError(f"Did not detect any entries in feed")
23 | if feed.version not in supported_feed_versions:
24 | raise RSSLambdaError(f"Unsupported feed version: {feed.version}")
25 |
26 | # Parse the feed and find the parent element of the items or entries
27 | lxml_parser = etree.XMLParser(strip_cdata=False)
28 | root = etree.fromstring(rss_text.encode('utf-8'), parser=lxml_parser)
29 | if feed.version == 'rss20':
30 | parent = root.find('./channel')
31 | items = parent.findall('item')
32 | elif feed.version in ['atom10', 'atom03']:
33 | parent = root
34 | items = parent.findall('{http://www.w3.org/2005/Atom}entry')
35 | else:
36 | raise RSSLambdaError(f"Escaped unsupported feed version: {feed.version}")
37 |
38 | return ParsedRssText(
39 | root=root,
40 | parent=parent,
41 | items=items)
42 |
43 |
44 | def wrap_items_to_rss_text(rss_text: str, parsed_rss_text: ParsedRssText) -> str:
45 | # Return processed feed
46 | if xml_declaration in rss_text:
47 | return xml_declaration + '\n' + etree.tostring(parsed_rss_text.root, encoding='unicode')
48 | else:
49 | return etree.tostring(parsed_rss_text.root, encoding='unicode')
50 |
51 |
52 | def process_rss_text(rss_text: str, processor: Callable[[ParsedRssText], None]) -> str:
53 | parsed_rss_text = parse_rss_text(rss_text)
54 | processor(parsed_rss_text)
55 | return wrap_items_to_rss_text(rss_text, parsed_rss_text)
56 |
--------------------------------------------------------------------------------
/rss_lambda/utils/rss_lambda_error.py:
--------------------------------------------------------------------------------
1 | class RSSLambdaError(Exception):
2 | def __init__(self, message):
3 | self.message = message
4 | super().__init__(self.message)
5 |
--------------------------------------------------------------------------------
/run.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -e
3 |
4 | PORT="${PORT:=5000}"
5 | gunicorn --bind 0.0.0.0:${PORT} --workers 1 app:app --timeout 300
6 |
--------------------------------------------------------------------------------
/static/favicon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sekai-soft/rss-lambda/14bff39704f49b9f456c8ab1ef202bf796e1321f/static/favicon.png
--------------------------------------------------------------------------------
/static/icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sekai-soft/rss-lambda/14bff39704f49b9f456c8ab1ef202bf796e1321f/static/icon.png
--------------------------------------------------------------------------------
/static/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | RSS-lambda
8 |
9 |
10 |
11 |
12 |
13 |
107 |
RSS-lambda transforms RSS feeds without RSS client lock-in
108 | 1. Enter feed URL
109 |
115 | 2.
116 |
151 |
152 |
153 | item !== param)"
157 | x-text="param + ' ⌫'">
158 |
159 |
160 |
161 |
169 | 3. This is the feed URL after operation
170 |
177 |
178 | copied = false, 3000)"
184 | >
185 | Open in Inoreader
190 |
193 |
194 |
--------------------------------------------------------------------------------
/static/style.css:
--------------------------------------------------------------------------------
1 | body {
2 | font-family: Helvetica, Arial, sans-serif;
3 | background: #1A1A1A;
4 | color: #F5F5F5;
5 | margin-left: 1rem;
6 | margin-right: 1rem;
7 | }
8 |
9 | /* Hide scrollbar for Chrome, Safari, and Opera */
10 | body::-webkit-scrollbar {
11 | display: none;
12 | }
13 |
14 | /* Hide scrollbar for Firefox */
15 | body {
16 | scrollbar-width: none;
17 | -ms-overflow-style: none; /* IE and Edge */
18 | }
19 |
20 | .fullscreen {
21 | height: 100vh;
22 | }
23 |
24 | .centered-container {
25 | display: flex;
26 | flex-direction: column;
27 | justify-content: center;
28 | align-items: center;
29 | }
30 |
31 | .code-font {
32 | font-family: 'Courier New', monospace;
33 | }
34 |
35 | p {
36 | font-size: 2em;
37 | line-height: 1.5;
38 | }
39 |
40 | .logo {
41 | display: inline-block;
42 | vertical-align: middle;
43 | height: 0.75em;
44 | width: auto;
45 | }
46 |
47 | input {
48 | font-size: 2em;
49 | line-height: 1.5;
50 | }
51 |
52 | select {
53 | font-size: 1em;
54 | }
55 |
56 | .tag {
57 | font-size: 2em;
58 | margin-bottom: 0.5em;
59 | }
60 |
61 | a {
62 | font-size: 2em;
63 | line-height: 1.5;
64 | }
65 |
66 | /** font sizes for tablet **/
67 | @media screen and (max-width: 768px) {
68 | p {
69 | font-size: 2em;
70 | line-height: 1.5;
71 | }
72 |
73 | .logo {
74 | display: inline-block;
75 | vertical-align: middle;
76 | height: 0.75em;
77 | width: auto;
78 | }
79 |
80 | input {
81 | font-size: 2em;
82 | line-height: 1.5;
83 | }
84 |
85 | .tag {
86 | font-size: 2em;
87 | }
88 |
89 | a {
90 | font-size: 2em;
91 | line-height: 1.5;
92 | }
93 | }
94 |
95 | /** font sizes for phone **/
96 | @media screen and (max-width: 414px) {
97 | p {
98 | font-size: 1.5em;
99 | line-height: 1.5;
100 | }
101 |
102 | .logo {
103 | display: inline-block;
104 | vertical-align: middle;
105 | height: 0.5em;
106 | width: auto;
107 | }
108 |
109 | input {
110 | font-size: 1.5em;
111 | line-height: 1.5;
112 | }
113 |
114 | .tag {
115 | font-size: 1.5em;
116 | }
117 |
118 | a {
119 | font-size: 1.5em;
120 | line-height: 1.5;
121 | }
122 | }
123 |
124 | input {
125 | color: #F5F5F5;
126 | border: none;
127 | border-bottom: 2px solid #999999;
128 | background-color: #1A1A1A;
129 | width: 75%;
130 | }
131 |
132 | .short-input {
133 | width: 25%;
134 | }
135 |
136 | input::placeholder {
137 | color: #999999;
138 | }
139 |
140 | input:focus {
141 | outline: none;
142 | }
143 |
144 | select {
145 | color: #F5F5F5;
146 | border: none;
147 | background-color: #1A1A1A;
148 | max-width: 75%;
149 | }
150 |
151 | select:focus {
152 | outline: none;
153 | }
154 |
155 | .tag {
156 | color: #F5F5F5;
157 | background-color: #1A1A1A;
158 | border: 2px solid #F5F5F5;
159 | border-radius: 8px;
160 | padding: 0.5em;
161 | margin-right: 0.5em;
162 | display: inline-block;
163 | cursor: pointer;
164 | }
165 |
166 | a {
167 | color: #F5F5F5;
168 | }
169 |
170 | .long-url {
171 | overflow-wrap: anywhere;
172 | }
--------------------------------------------------------------------------------
/test-rss.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | コスプレ / @KTachibana_M
6 | http://nitter.example.com/i/lists/1846091864849764752
7 | Twitter feed for: コスプレ by @KTachibana_M. Generated by nitter.example.com
8 |
9 | en-us
10 | 40
11 | -
12 | RT by @miinyan73: バニーのふとももをどうぞ🐰🖤
13 | @miinyan73
14 | バニーのふとももをどうぞ🐰🖤
15 |
]]>
16 | Fri, 18 Oct 2024 10:10:25 GMT
17 | http://nitter.example.com/miinyan73/status/1847218669413404699#m
18 | http://nitter.example.com/miinyan73/status/1847218669413404699#m
19 |
20 | -
21 | おはマリン🏴☠️
22 |
23 | 今日明日は久々の大井コスイベですね✨
24 | 残念ながらわたしは行けないのですが、天気もなんとかもちそうだし(今日はめちゃめちゃ暑いが)素敵なイベントになりますように!
25 | 次回は参加するぞー🐴
26 |
27 | ホロライブ/宝鐘マリン
28 | @cosplay_110
29 | おはマリン🏴☠️
30 |
31 | 今日明日は久々の大井コスイベですね✨
32 | 残念ながらわたしは行けないのですが、天気もなんとかもちそうだし(今日はめちゃめちゃ暑いが)素敵なイベントになりますように!
33 | 次回は参加するぞー🐴
34 |
35 | ホロライブ/宝鐘マリン
36 |
]]>
37 | Fri, 18 Oct 2024 23:31:12 GMT
38 | http://nitter.example.com/cosplay_110/status/1847420195142586385#m
39 | http://nitter.example.com/cosplay_110/status/1847420195142586385#m
40 |
41 | -
42 | おはようございます☀️
43 | @uchino0212uk
44 | おはようございます☀️
45 |
]]>
46 | Fri, 18 Oct 2024 23:24:32 GMT
47 | http://nitter.example.com/uchino0212uk/status/1847418517165134067#m
48 | http://nitter.example.com/uchino0212uk/status/1847418517165134067#m
49 |
50 | -
51 | サンキャッチャー用意しなきゃいけないのがいちばん大変かも😇😇😇
52 |
53 | 銀テならあるけど金テ無いんだが〜!😵💫
54 | @milktea_new
55 | サンキャッチャー用意しなきゃいけないのがいちばん大変かも😇😇😇
56 |
57 | 銀テならあるけど金テ無いんだが〜!😵💫
58 |
59 |
]]>
60 | Fri, 18 Oct 2024 23:20:40 GMT
61 | http://nitter.example.com/milktea_new/status/1847417542782177329#m
62 | http://nitter.example.com/milktea_new/status/1847417542782177329#m
63 |
64 | -
65 | 真剣にプリキュアの衣装作れるようになりたい
66 | @gon_pyon
67 | 真剣にプリキュアの衣装作れるようになりたい]]>
68 | Fri, 18 Oct 2024 23:13:16 GMT
69 | http://nitter.example.com/gon_pyon/status/1847415681408782619#m
70 | http://nitter.example.com/gon_pyon/status/1847415681408782619#m
71 |
72 | -
73 | RT by @gon_pyon: 【Cosplay】
74 | ᧔💜᧓••┈┈┈┈••᧔💜᧓••┈┈┈┈••᧔💜᧓
75 |
76 | みんなの笑顔で彩る世界!
77 | キュアフレンディ!
78 |
79 | ᧔💜᧓••┈┈┈┈••᧔💜᧓••┈┈┈┈••᧔💜᧓
80 |
81 | Studio: @studio_ha_n_na
82 | @Kotono__cos
83 | 【Cosplay】
84 | ᧔💜᧓••┈┈┈┈••᧔💜᧓••┈┈┈┈••᧔💜᧓
85 |
86 | みんなの笑顔で彩る世界!
87 | キュアフレンディ!
88 |
89 | ᧔💜᧓••┈┈┈┈••᧔💜᧓••┈┈┈┈••᧔💜᧓
90 |
91 | Studio: @studio_ha_n_na
92 |
]]>
93 | Fri, 11 Oct 2024 10:30:04 GMT
94 | http://nitter.example.com/Kotono__cos/status/1844686902550786149#m
95 | http://nitter.example.com/Kotono__cos/status/1844686902550786149#m
96 |
97 | -
98 | 最近自作が多いから自作レイヤーかと思われるかもしれないですが自作始めたてレイヤーですね
99 |
100 | 既製品を改造するところから始めています…
101 | 完全自作はまだです
102 | @milktea_new
103 | 最近自作が多いから自作レイヤーかと思われるかもしれないですが自作始めたてレイヤーですね
104 |
105 | 既製品を改造するところから始めています…
106 | 完全自作はまだです]]>
107 | Fri, 18 Oct 2024 23:12:36 GMT
108 | http://nitter.example.com/milktea_new/status/1847415514697769394#m
109 | http://nitter.example.com/milktea_new/status/1847415514697769394#m
110 |
111 | -
112 | RT by @gon_pyon: 【Cosplay】
113 |
114 | 『 あなたの声をきかせて 』
115 |
116 | わんだふるぷりきゅあ!
117 | /キュアフレンディ
118 |
119 | Studio / @studio_ha_n_na
120 | Photo / @makiko_camera
121 | #つながるわんだふる0901
122 | @Kotono__cos
123 | 【Cosplay】
124 |
125 | 『 あなたの声をきかせて 』
126 |
127 | わんだふるぷりきゅあ!
128 | /キュアフレンディ
129 |
130 | Studio / @studio_ha_n_na
131 | Photo / @makiko_camera
132 | #つながるわんだふる0901
133 |
134 |
]]>
135 | Fri, 18 Oct 2024 11:14:19 GMT
136 | http://nitter.example.com/Kotono__cos/status/1847234751171805256#m
137 | http://nitter.example.com/Kotono__cos/status/1847234751171805256#m
138 |
139 | -
140 | 🐈⬛🧶
141 | @tsunacos
142 | 🐈⬛🧶
143 |
]]>
144 | Fri, 18 Oct 2024 10:58:59 GMT
145 | http://nitter.example.com/tsunacos/status/1847230892453871698#m
146 | http://nitter.example.com/tsunacos/status/1847230892453871698#m
147 |
148 | -
149 | R to @tsunacos: おはよ~~^⁔ ܸ. ̫ .⁔^
150 | @tsunacos
151 | おはよ~~^⁔ ܸ. ̫ .⁔^]]>
152 | Fri, 18 Oct 2024 23:12:14 GMT
153 | http://nitter.example.com/tsunacos/status/1847415421366128683#m
154 | http://nitter.example.com/tsunacos/status/1847415421366128683#m
155 |
156 | -
157 | お仕事なので返信遅れます!!
158 | @milktea_new
159 | お仕事なので返信遅れます!!]]>
160 | Fri, 18 Oct 2024 23:11:00 GMT
161 | http://nitter.example.com/milktea_new/status/1847415112946372952#m
162 | http://nitter.example.com/milktea_new/status/1847415112946372952#m
163 |
164 | -
165 | おはよう🐓❤️https://fantia.jp/posts/3060297
166 | @raimu22118
167 | おはよう🐓❤️fantia.jp/posts/3060297
168 |
]]>
169 | Fri, 18 Oct 2024 23:03:59 GMT
170 | http://nitter.example.com/raimu22118/status/1847413346120978813#m
171 | http://nitter.example.com/raimu22118/status/1847413346120978813#m
172 |
173 | -
174 | 明日のむくみ取りの為にレモン系のドリンク買って帰ろ!
175 | @milktea_new
176 | 明日のむくみ取りの為にレモン系のドリンク買って帰ろ!]]>
177 | Fri, 18 Oct 2024 23:01:28 GMT
178 | http://nitter.example.com/milktea_new/status/1847412712395198511#m
179 | http://nitter.example.com/milktea_new/status/1847412712395198511#m
180 |
181 | -
182 | RT by @gon_pyon: プレシャスコメコメ🍨
183 | @fumufumuu227
184 | プレシャスコメコメ🍨
185 |
]]>
186 | Fri, 18 Oct 2024 11:27:06 GMT
187 | http://nitter.example.com/fumufumuu227/status/1847237971092914525#m
188 | http://nitter.example.com/fumufumuu227/status/1847237971092914525#m
189 |
190 | -
191 | おはる🌞
192 | 今日はとっても楽しみにしていた日です❣️❣️❣️❣️
193 | @haru_no_inu
194 | おはる🌞
195 | 今日はとっても楽しみにしていた日です❣️❣️❣️❣️]]>
196 | Fri, 18 Oct 2024 22:52:42 GMT
197 | http://nitter.example.com/haru_no_inu/status/1847410506707190117#m
198 | http://nitter.example.com/haru_no_inu/status/1847410506707190117#m
199 |
200 | -
201 | 表紙の構図再現…!!できてうれしい🙋✨️✨️✨️
202 | @tsubasabi
203 | 表紙の構図再現…!!できてうれしい🙋✨️✨️✨️
204 |
nitter.example.com/ksk_johan_cos/status/1847404015094497569#m
]]>
205 | Fri, 18 Oct 2024 22:46:45 GMT
206 | http://nitter.example.com/tsubasabi/status/1847409009722347970#m
207 | http://nitter.example.com/tsubasabi/status/1847409009722347970#m
208 |
209 | -
210 | RT by @tsubasabi: これの再現をやらせていただきました。
211 |
212 | 5年も前から望んでいたものです・・・w
213 | @ksk_johan_cos
214 | これの再現をやらせていただきました。
215 |
216 | 5年も前から望んでいたものです・・・w
217 |
]]>
218 | Fri, 18 Oct 2024 22:28:31 GMT
219 | http://nitter.example.com/ksk_johan_cos/status/1847404419320820210#m
220 | http://nitter.example.com/ksk_johan_cos/status/1847404419320820210#m
221 |
222 | -
223 | RT by @tsubasabi: 【cosplay】
224 |
225 | 無職転生
226 |
227 | 再会
228 |
229 | #1012無職転生大型併せ
230 |
231 | Photo とろ様
232 | @ksk_johan_cos
233 | 【cosplay】
234 |
235 | 無職転生
236 |
237 | 再会
238 |
239 | #1012無職転生大型併せ
240 |
241 | Photo とろ様
242 |
]]>
243 | Fri, 18 Oct 2024 22:26:54 GMT
244 | http://nitter.example.com/ksk_johan_cos/status/1847404015094497569#m
245 | http://nitter.example.com/ksk_johan_cos/status/1847404015094497569#m
246 |
247 | -
248 | お?良くなってきたかな?と思ってからの回復速度はそこそこ早い
249 | @manatsukin
250 | お?良くなってきたかな?と思ってからの回復速度はそこそこ早い]]>
251 | Fri, 18 Oct 2024 22:44:04 GMT
252 | http://nitter.example.com/manatsukin/status/1847408334330351881#m
253 | http://nitter.example.com/manatsukin/status/1847408334330351881#m
254 |
255 | -
256 | やっと少しずつ外出れるくらいになってきた…!長い戦いだったなぁ、まだ戦うけど!!
257 |
258 | 止まってた通販の発送、ゆっくりですが再開します!長らくお待たせしてしまってすみませんでした😭
259 | @manatsukin
260 | やっと少しずつ外出れるくらいになってきた…!長い戦いだったなぁ、まだ戦うけど!!
261 |
262 | 止まってた通販の発送、ゆっくりですが再開します!長らくお待たせしてしまってすみませんでした😭]]>
263 | Fri, 18 Oct 2024 22:43:37 GMT
264 | http://nitter.example.com/manatsukin/status/1847408219146375557#m
265 | http://nitter.example.com/manatsukin/status/1847408219146375557#m
266 |
267 | -
268 | おはよ!沖縄の海楽しむぞー🏝️✨
269 | @ymnnkn_okura
270 | おはよ!沖縄の海楽しむぞー🏝️✨
271 |
nitter.example.com/ymnnkn_okura/status/1847225495341215982#m
]]>
272 | Fri, 18 Oct 2024 22:36:21 GMT
273 | http://nitter.example.com/ymnnkn_okura/status/1847406392552145310#m
274 | http://nitter.example.com/ymnnkn_okura/status/1847406392552145310#m
275 |
276 | -
277 | RT by @XaiabP: よそ見してると逃げちゃうよ。
278 |
279 | @LALKA_THE_EDEN
280 | @XaiabP
281 | よそ見してると逃げちゃうよ。
282 |
283 | @LALKA_THE_EDEN
284 |
285 |
]]>
286 | Fri, 18 Oct 2024 10:58:19 GMT
287 | http://nitter.example.com/XaiabP/status/1847230725743153501#m
288 | http://nitter.example.com/XaiabP/status/1847230725743153501#m
289 |
290 | -
291 | おはよう!!!!!
292 | #ホロリック のある朝!!!!
293 | @uki_kabo
294 | おはよう!!!!!
295 | #ホロリック のある朝!!!!]]>
296 | Fri, 18 Oct 2024 22:32:02 GMT
297 | http://nitter.example.com/uki_kabo/status/1847405303220097138#m
298 | http://nitter.example.com/uki_kabo/status/1847405303220097138#m
299 |
300 | -
301 | RT by @uki_kabo: 【Cosplay】
302 |
303 | hololive / 星街すいせい☄️
304 | 東京タワー×ホロライブコラボ衣装
305 |
306 | Photo : みやびさん(@tnsk6400 )
307 | #うきcos
308 | @uki_kabo
309 | 【Cosplay】
310 |
311 | hololive / 星街すいせい☄️
312 | 東京タワー×ホロライブコラボ衣装
313 |
314 | Photo : みやびさん(@tnsk6400 )
315 | #うきcos
316 |
]]>
317 | Fri, 18 Oct 2024 09:29:11 GMT
318 | http://nitter.example.com/uki_kabo/status/1847208292269060479#m
319 | http://nitter.example.com/uki_kabo/status/1847208292269060479#m
320 |
321 | -
322 | RT by @uki_kabo: ⚠️コスプレ動画注意
323 |
324 | #ホロリック パフォ動画✨
325 | ※保存・転載禁止
326 |
327 | KING / 一条莉々華🌃
328 | 🎥ソラさん(@RASLPQTYfan55 )
329 | @uki_kabo
330 | ⚠️コスプレ動画注意
331 |
332 | #ホロリック パフォ動画✨
333 | ※保存・転載禁止
334 |
335 | KING / 一条莉々華🌃
336 | 🎥ソラさん(@RASLPQTYfan55 )
337 |
]]>
338 | Fri, 18 Oct 2024 12:42:12 GMT
339 | http://nitter.example.com/uki_kabo/status/1847256870513442871#m
340 | http://nitter.example.com/uki_kabo/status/1847256870513442871#m
341 |
342 | -
343 | RT by @yukaaaaa0: .
344 | 白猫さん🐈🐾
345 | @yukaaaaa0
346 | .
347 | 白猫さん🐈🐾
348 |
]]>
349 | Fri, 18 Oct 2024 11:20:33 GMT
350 | http://nitter.example.com/yukaaaaa0/status/1847236320391053434#m
351 | http://nitter.example.com/yukaaaaa0/status/1847236320391053434#m
352 |
353 | -
354 | 今日は船長の撮影頑張ってくるよ〜🏴☠️💘
355 | 朝早い☀️☀️
356 | @tennensui_cos
357 | 今日は船長の撮影頑張ってくるよ〜🏴☠️💘
358 | 朝早い☀️☀️]]>
359 | Fri, 18 Oct 2024 22:13:15 GMT
360 | http://nitter.example.com/tennensui_cos/status/1847400580010168555#m
361 | http://nitter.example.com/tennensui_cos/status/1847400580010168555#m
362 |
363 | -
364 | RT by @banko003: ꧁─────────────────꧂
365 |
366 | 🥀Bisquedoll OSAKA 🥀
367 |
368 | 大阪のみなさん、ごきげんよう。
369 |
370 | ꧁─────────────────꧂
371 | @BisquedollOSAKA
372 | ꧁─────────────────꧂
373 |
374 | 🥀Bisquedoll OSAKA 🥀
375 |
376 | 大阪のみなさん、ごきげんよう。
377 |
378 | ꧁─────────────────꧂
379 |
]]>
380 | Fri, 18 Oct 2024 12:29:51 GMT
381 | http://nitter.example.com/BisquedollOSAKA/status/1847253762064409000#m
382 | http://nitter.example.com/BisquedollOSAKA/status/1847253762064409000#m
383 |
384 | -
385 | RT by @haru_no_inu: 先日、撮影した写真で自作Tシャツを作り、ご本人に見せることがでしました。
386 |
387 | 欲しい!って言ってもらえて嬉しかったです!
388 |
389 | やっぱ推し活って楽しいわ…
390 | (新作作成中)
391 |
392 | モデル:はるの犬さん @haru_no_inu
393 | @someone19960703
394 | 先日、撮影した写真で自作Tシャツを作り、ご本人に見せることがでしました。
395 |
396 | 欲しい!って言ってもらえて嬉しかったです!
397 |
398 | やっぱ推し活って楽しいわ…
399 | (新作作成中)
400 |
401 | モデル:はるの犬さん @haru_no_inu
402 |
]]>
403 | Fri, 18 Oct 2024 11:14:43 GMT
404 | http://nitter.example.com/someone19960703/status/1847234854246867352#m
405 | http://nitter.example.com/someone19960703/status/1847234854246867352#m
406 |
407 | -
408 | アホみたいに寝まくってた
409 | @K_hum03_D
410 | アホみたいに寝まくってた]]>
411 | Fri, 18 Oct 2024 21:58:30 GMT
412 | http://nitter.example.com/K_hum03_D/status/1847396864334975390#m
413 | http://nitter.example.com/K_hum03_D/status/1847396864334975390#m
414 |
415 | -
416 | 可愛すぎる〜🥹🐿️🐿️❣️RP
417 | @_hcmrO_
418 | 可愛すぎる〜🥹🐿️🐿️❣️RP]]>
419 | Fri, 18 Oct 2024 21:36:50 GMT
420 | http://nitter.example.com/_hcmrO_/status/1847391411899158653#m
421 | http://nitter.example.com/_hcmrO_/status/1847391411899158653#m
422 |
423 | -
424 | RT by @_hcmrO_: ⚠️擬人化
425 |
426 | 🥜Chip 'n Dale🐿️
427 |
428 | #Dハロ仮装 #Dハロ仮装2024
429 | @MFscuf
430 | ⚠️擬人化
431 |
432 | 🥜Chip 'n Dale🐿️
433 |
434 | #Dハロ仮装 #Dハロ仮装2024
435 |
]]>
436 | Thu, 17 Oct 2024 09:19:52 GMT
437 | http://nitter.example.com/MFscuf/status/1846843563914809452#m
438 | http://nitter.example.com/MFscuf/status/1846843563914809452#m
439 |
440 | -
441 | おはようございます(🍠•᎑•🍠)
442 | @riku_cos_9
443 | おはようございます(🍠•᎑•🍠)]]>
444 | Fri, 18 Oct 2024 20:55:32 GMT
445 | http://nitter.example.com/riku_cos_9/status/1847381021916090569#m
446 | http://nitter.example.com/riku_cos_9/status/1847381021916090569#m
447 |
448 | -
449 | ちょっとだけowやってから寝るわ!😄
450 | @enako_cos
451 | ちょっとだけowやってから寝るわ!😄
452 |
nitter.example.com/jpPlayOverwatch/status/1846946331061113121#m
]]>
453 | Fri, 18 Oct 2024 19:41:21 GMT
454 | http://nitter.example.com/enako_cos/status/1847362352532115959#m
455 | http://nitter.example.com/enako_cos/status/1847362352532115959#m
456 |
457 | -
458 | 今日(昨日?)は雑誌の撮影、そして夜からさっきまで収録でした🙏
459 |
460 | 長い1日だった〜!明るくなる前に帰る🏃♂️🏃♂️🏃♂️
461 | @enako_cos
462 | 今日(昨日?)は雑誌の撮影、そして夜からさっきまで収録でした🙏
463 |
464 | 長い1日だった〜!明るくなる前に帰る🏃♂️🏃♂️🏃♂️]]>
465 | Fri, 18 Oct 2024 19:34:03 GMT
466 | http://nitter.example.com/enako_cos/status/1847360515485671477#m
467 | http://nitter.example.com/enako_cos/status/1847360515485671477#m
468 |
469 | -
470 | さむぃ。けど上着洗ってるからない。
471 | @mirai_cos_
472 | さむぃ。けど上着洗ってるからない。]]>
473 | Fri, 18 Oct 2024 19:25:14 GMT
474 | http://nitter.example.com/mirai_cos_/status/1847358294828208365#m
475 | http://nitter.example.com/mirai_cos_/status/1847358294828208365#m
476 |
477 | -
478 | RT by @mirai_cos_: ⚠️コスプレ
479 | ⚠️カラコン加工
480 |
481 | 艦これ/大井
482 | @ao_okiao
483 | ⚠️コスプレ
484 | ⚠️カラコン加工
485 |
486 | 艦これ/大井
487 |
]]>
488 | Fri, 18 Oct 2024 15:45:52 GMT
489 | http://nitter.example.com/ao_okiao/status/1847303088010661967#m
490 | http://nitter.example.com/ao_okiao/status/1847303088010661967#m
491 |
492 | -
493 | ソーダちゃんのおっppが大きすぎて衣装改良しなきゃはち切れる😭😭でもここだけは譲れない…😭がんばる…!!
494 | @muunya66
495 | ソーダちゃんのおっppが大きすぎて衣装改良しなきゃはち切れる😭😭でもここだけは譲れない…😭がんばる…!!]]>
496 | Fri, 18 Oct 2024 17:18:40 GMT
497 | http://nitter.example.com/muunya66/status/1847326442532851764#m
498 | http://nitter.example.com/muunya66/status/1847326442532851764#m
499 |
500 | -
501 | 地雷伸び良いな
502 | @abab_abab_0
503 | 地雷伸び良いな]]>
504 | Fri, 18 Oct 2024 16:53:43 GMT
505 | http://nitter.example.com/abab_abab_0/status/1847320164058222915#m
506 | http://nitter.example.com/abab_abab_0/status/1847320164058222915#m
507 |
508 | -
509 | RT by @gngn_P: ブルアカ水着ミユ…
510 |
511 | むずかしすぎたので夜中にこっそり🐰
512 | #鴨コス #ブルアカ
513 | photo @marienyanco
514 | @chiarincle
515 | ブルアカ水着ミユ…
516 |
517 | むずかしすぎたので夜中にこっそり🐰
518 | #鴨コス #ブルアカ
519 | photo @marienyanco
520 |
]]>
521 | Fri, 18 Oct 2024 15:57:51 GMT
522 | http://nitter.example.com/chiarincle/status/1847306104998637953#m
523 | http://nitter.example.com/chiarincle/status/1847306104998637953#m
524 |
525 | -
526 | 準備も終わったし部屋も片付けたーっ!!!!!
527 | @gngn_P
528 | 準備も終わったし部屋も片付けたーっ!!!!!]]>
529 | Fri, 18 Oct 2024 15:58:11 GMT
530 | http://nitter.example.com/gngn_P/status/1847306187907444903#m
531 | http://nitter.example.com/gngn_P/status/1847306187907444903#m
532 |
533 | -
534 | 化かすつもりが化かされるかんじのたぬき
535 | @manatsukin
536 | 化かすつもりが化かされるかんじのたぬき
537 |
]]>
538 | Fri, 18 Oct 2024 15:38:59 GMT
539 | http://nitter.example.com/manatsukin/status/1847301355976982582#m
540 | http://nitter.example.com/manatsukin/status/1847301355976982582#m
541 |
542 | -
543 | わいが、明日までに体調を整えてかつ、9時過ぎに家に帰宅し、ウィッグをちょんぎることを成功させることが出来るかどうかに全てがかかってる。
544 | @amimutam
545 | わいが、明日までに体調を整えてかつ、9時過ぎに家に帰宅し、ウィッグをちょんぎることを成功させることが出来るかどうかに全てがかかってる。]]>
546 | Fri, 18 Oct 2024 15:37:11 GMT
547 | http://nitter.example.com/amimutam/status/1847300905152237784#m
548 | http://nitter.example.com/amimutam/status/1847300905152237784#m
549 |
550 | -
551 | progressive Asuna cosplay
552 | @iruka_inzm
553 | progressive Asuna cosplay
554 |
]]>
555 | Fri, 18 Oct 2024 15:35:17 GMT
556 | http://nitter.example.com/iruka_inzm/status/1847300425915232310#m
557 | http://nitter.example.com/iruka_inzm/status/1847300425915232310#m
558 |
559 | -
560 | 帰って、ウィッグします🫰🏻
561 | @srmmm03
562 | 帰って、ウィッグします🫰🏻]]>
563 | Fri, 18 Oct 2024 15:34:25 GMT
564 | http://nitter.example.com/srmmm03/status/1847300209929589203#m
565 | http://nitter.example.com/srmmm03/status/1847300209929589203#m
566 |
567 | -
568 | RT by @azu_kichi_cos: コスプレ
569 |
570 | 全身
571 |
572 | 彼方ちゃんだよ~🐏
573 |
574 | #影の写真館
575 | #シャチはじ1018可愛くていいね
576 | @_game_name_
577 | コスプレ
578 |
579 | 全身
580 |
581 | 彼方ちゃんだよ~🐏
582 |
583 | #影の写真館
584 | #シャチはじ1018可愛くていいね
585 |
]]>
586 | Fri, 18 Oct 2024 15:15:16 GMT
587 | http://nitter.example.com/_game_name_/status/1847295389323632685#m
588 | http://nitter.example.com/_game_name_/status/1847295389323632685#m
589 |
590 | -
591 | ありえないほど金欠なのに学マス供給もシャニマス供給もすごくて金欠が加速している
592 |
593 | 本気でやばい
594 |
595 | ボーナス全部消えるレベルで金欠
596 | @milktea_new
597 | ありえないほど金欠なのに学マス供給もシャニマス供給もすごくて金欠が加速している
598 |
599 | 本気でやばい
600 |
601 | ボーナス全部消えるレベルで金欠]]>
602 | Fri, 18 Oct 2024 15:23:47 GMT
603 | http://nitter.example.com/milktea_new/status/1847297533397413946#m
604 | http://nitter.example.com/milktea_new/status/1847297533397413946#m
605 |
606 | -
607 | あつ森、ボルトくんが好きすぎて
608 | @milktea_new
609 | あつ森、ボルトくんが好きすぎて]]>
610 | Fri, 18 Oct 2024 15:19:32 GMT
611 | http://nitter.example.com/milktea_new/status/1847296461241008465#m
612 | http://nitter.example.com/milktea_new/status/1847296461241008465#m
613 |
614 | -
615 | 本日はお酒を飲みに行っておりましたので
616 | お写真の投稿はお休みさせて頂きます!
617 |
618 | こちらのお酒はゴッドファーザーです☺️🥃
619 | @azuki_to_milk
620 | 本日はお酒を飲みに行っておりましたので
621 | お写真の投稿はお休みさせて頂きます!
622 |
623 | こちらのお酒はゴッドファーザーです☺️🥃
624 |
]]>
625 | Fri, 18 Oct 2024 15:19:22 GMT
626 | http://nitter.example.com/azuki_to_milk/status/1847296419218272688#m
627 | http://nitter.example.com/azuki_to_milk/status/1847296419218272688#m
628 |
629 | -
630 | RT by @shioringo_227: #杜野凛世生誕祭2024
631 | #杜野凛世誕生祭2024
632 | @WalkingBoring
633 | #杜野凛世生誕祭2024
634 | #杜野凛世誕生祭2024
635 |
]]>
636 | Fri, 18 Oct 2024 15:15:55 GMT
637 | http://nitter.example.com/WalkingBoring/status/1847295554503786738#m
638 | http://nitter.example.com/WalkingBoring/status/1847295554503786738#m
639 |
640 | -
641 | ほのぼの〜☺️🌸て読んでたら最後のページで鳥肌 ううう
642 | @shioringo_227
643 | ほのぼの〜☺️🌸て読んでたら最後のページで鳥肌 ううう]]>
644 | Fri, 18 Oct 2024 15:17:52 GMT
645 | http://nitter.example.com/shioringo_227/status/1847296044369084891#m
646 | http://nitter.example.com/shioringo_227/status/1847296044369084891#m
647 |
648 | -
649 | RT by @shioringo_227: 【最新話更新のお知らせ】
650 | 『アイドルマスター シャイニーカラーズ 事務的光空記録』
651 | 第9.5話が公開されました✨
652 | https://www.sunday-webry.com/episode/2550912964773394965
653 | 感想は #ジムシャニ までお願いします!
654 | #シャニマス #サンデーうぇぶり
655 | @jimushiny_oa
656 | 【最新話更新のお知らせ】
657 | 『アイドルマスター シャイニーカラーズ 事務的光空記録』
658 | 第9.5話が公開されました✨
659 | sunday-webry.com/episode/255…
660 | 感想は #ジムシャニ までお願いします!
661 | #シャニマス #サンデーうぇぶり
662 |
]]>
663 | Fri, 18 Oct 2024 15:01:03 GMT
664 | http://nitter.example.com/jimushiny_oa/status/1847291810131095597#m
665 | http://nitter.example.com/jimushiny_oa/status/1847291810131095597#m
666 |
667 | -
668 | RT by @haru_no_inu: 実際長かったです笑
669 | @clown13_AW
670 | 実際長かったです笑
671 |
nitter.example.com/haru_no_inu/status/1847221032081633575#m
]]>
672 | Fri, 18 Oct 2024 10:32:12 GMT
673 | http://nitter.example.com/clown13_AW/status/1847224151062507719#m
674 | http://nitter.example.com/clown13_AW/status/1847224151062507719#m
675 |
676 | -
677 | RT by @haru_no_inu: 周りが白ってのが良いですね✨
678 | @akatuki_7563
679 | 周りが白ってのが良いですね✨
680 |
nitter.example.com/haru_no_inu/status/1847221032081633575#m
]]>
681 | Fri, 18 Oct 2024 10:25:56 GMT
682 | http://nitter.example.com/akatuki_7563/status/1847222577401671890#m
683 | http://nitter.example.com/akatuki_7563/status/1847222577401671890#m
684 |
685 | -
686 | 土日だけどっかのアラームがあひるに設定されてて癒される時ある(結構終盤の時間なので焦るけど)
687 | @milktea_new
688 | 土日だけどっかのアラームがあひるに設定されてて癒される時ある(結構終盤の時間なので焦るけど)]]>
689 | Fri, 18 Oct 2024 15:07:05 GMT
690 | http://nitter.example.com/milktea_new/status/1847293331027747103#m
691 | http://nitter.example.com/milktea_new/status/1847293331027747103#m
692 |
693 | -
694 | RT by @t_cos_214: ♠配信告知♠
695 | 10/19 21:00-
696 | 1年半ぶりに新衣装お披露目します🥳🤍
697 | さて、どんな衣装か予想しててください。
698 |
699 | 【#空澄新衣装】寒くなってきたね。おにゅ~のお洋服見て~!【空澄セナ/ぶいすぽっ!】 https://www.youtube.com/live/1Ttn_XYSX8A?si=nx689fLIkOnWGODi
700 | @YouTubeより
701 | @sena_asumi
702 | ♠配信告知♠
703 | 10/19 21:00-
704 | 1年半ぶりに新衣装お披露目します🥳🤍
705 | さて、どんな衣装か予想しててください。
706 |
707 | 【#空澄新衣装】寒くなってきたね。おにゅ~のお洋服見て~!【空澄セナ/ぶいすぽっ!】 youtube.com/live/1Ttn_XYSX8A…
708 | @YouTubeより
709 |
]]>
710 | Fri, 18 Oct 2024 15:00:19 GMT
711 | http://nitter.example.com/sena_asumi/status/1847291626345099633#m
712 | http://nitter.example.com/sena_asumi/status/1847291626345099633#m
713 |
714 | -
715 | RT by @ayari_oxo: 新刊の単品販売開始しました♡ ̖́-
716 |
717 | https://ayarioxo.base.shop/
718 | @ayari_oxo
719 | 新刊の単品販売開始しました♡ ̖́-
720 |
721 | ayarioxo.base.shop/
722 |
]]>
723 | Fri, 11 Oct 2024 13:37:52 GMT
724 | http://nitter.example.com/ayari_oxo/status/1844734163490156645#m
725 | http://nitter.example.com/ayari_oxo/status/1844734163490156645#m
726 |
727 | -
728 | ショタと男子の間、やりがちな気がする(伝われ)
729 | @milktea_new
730 | ショタと男子の間、やりがちな気がする(伝われ)]]>
731 | Fri, 18 Oct 2024 15:05:25 GMT
732 | http://nitter.example.com/milktea_new/status/1847292910083183097#m
733 | http://nitter.example.com/milktea_new/status/1847292910083183097#m
734 |
735 | -
736 | x.com/i/spaces/1djGXrZqjoexZ
737 | @srmmm03
738 | x.com/i/spaces/1djGXrZqjoexZ]]>
739 | Fri, 18 Oct 2024 15:03:52 GMT
740 | http://nitter.example.com/srmmm03/status/1847292519127843188#m
741 | http://nitter.example.com/srmmm03/status/1847292519127843188#m
742 |
743 | -
744 | 息を吸うように無意識に園田智代子って言ってる時がある
745 | @milktea_new
746 | 息を吸うように無意識に園田智代子って言ってる時がある]]>
747 | Fri, 18 Oct 2024 15:01:13 GMT
748 | http://nitter.example.com/milktea_new/status/1847291853298872663#m
749 | http://nitter.example.com/milktea_new/status/1847291853298872663#m
750 |
751 | -
752 | 美人ってまじでどんなキャラにもなれるんだ……スゴすぎる
753 | @milktea_new
754 | 美人ってまじでどんなキャラにもなれるんだ……スゴすぎる]]>
755 | Fri, 18 Oct 2024 14:58:59 GMT
756 | http://nitter.example.com/milktea_new/status/1847291292210086393#m
757 | http://nitter.example.com/milktea_new/status/1847291292210086393#m
758 |
759 | -
760 | 私のために集まってくれた天才女たち見てください👊
761 | #シャチはじ1018
762 | @azu_kichi_cos
763 | 私のために集まってくれた天才女たち見てください👊
764 | #シャチはじ1018
765 |
]]>
766 | Fri, 18 Oct 2024 14:57:56 GMT
767 | http://nitter.example.com/azu_kichi_cos/status/1847291027109101922#m
768 | http://nitter.example.com/azu_kichi_cos/status/1847291027109101922#m
769 |
770 | -
771 | R to @azu_kichi_cos: エマちゃんタグ付けNGになってた…(@S_sametaro)
772 | @azu_kichi_cos
773 | エマちゃんタグ付けNGになってた…(@S_sametaro)]]>
774 | Fri, 18 Oct 2024 14:58:24 GMT
775 | http://nitter.example.com/azu_kichi_cos/status/1847291143094214812#m
776 | http://nitter.example.com/azu_kichi_cos/status/1847291143094214812#m
777 |
778 | -
779 | 明日併せやのにほろ酔いなってるの意味わからんすぎる😸ちょっとだけスペやってええか
780 | @srmmm03
781 | 明日併せやのにほろ酔いなってるの意味わからんすぎる😸ちょっとだけスペやってええか]]>
782 | Fri, 18 Oct 2024 14:58:10 GMT
783 | http://nitter.example.com/srmmm03/status/1847291085854495099#m
784 | http://nitter.example.com/srmmm03/status/1847291085854495099#m
785 |
786 | -
787 | 斑鳩ルカのコスプレの為だけにGIVENCHYの黒リップ買ったんだよな
788 |
789 | 勿体ないから他の衣装もやりたい(リップの為だけじゃないです)
790 |
791 | あとは黒リップ使うキャラ居るか????
792 | イミテーションブラックくらいしかないやろ
793 | @milktea_new
794 | 斑鳩ルカのコスプレの為だけにGIVENCHYの黒リップ買ったんだよな
795 |
796 | 勿体ないから他の衣装もやりたい(リップの為だけじゃないです)
797 |
798 | あとは黒リップ使うキャラ居るか????
799 | イミテーションブラックくらいしかないやろ]]>
800 | Fri, 18 Oct 2024 14:57:57 GMT
801 | http://nitter.example.com/milktea_new/status/1847291029348901283#m
802 | http://nitter.example.com/milktea_new/status/1847291029348901283#m
803 |
804 | -
805 | 109のことね、ニットワンピースの時点でハードル高いのに首周りとか腕のフリルとか絶対に市販では被らないお洋服で可愛い〜!って気持ちとレイヤー死にそう!って気持ち
806 | @milktea_new
807 | 109のことね、ニットワンピースの時点でハードル高いのに首周りとか腕のフリルとか絶対に市販では被らないお洋服で可愛い〜!って気持ちとレイヤー死にそう!って気持ち]]>
808 | Fri, 18 Oct 2024 14:56:38 GMT
809 | http://nitter.example.com/milktea_new/status/1847290698036580683#m
810 | http://nitter.example.com/milktea_new/status/1847290698036580683#m
811 |
812 | -
813 | ゆずちゃんにぼしさんのBIS行きたかったすぎる🥲🥲🥲
814 | @srmmm03
815 | ゆずちゃんにぼしさんのBIS行きたかったすぎる🥲🥲🥲]]>
816 | Fri, 18 Oct 2024 14:55:08 GMT
817 | http://nitter.example.com/srmmm03/status/1847290321270706388#m
818 | http://nitter.example.com/srmmm03/status/1847290321270706388#m
819 |
820 | -
821 | RT by @srmmm03: 今日は黒猫従兄妹🐈⬛🐈⬛
822 | @yyzzmmrr
823 | 今日は黒猫従兄妹🐈⬛🐈⬛
824 |
]]>
825 | Fri, 18 Oct 2024 09:02:10 GMT
826 | http://nitter.example.com/yyzzmmrr/status/1847201495990579325#m
827 | http://nitter.example.com/yyzzmmrr/status/1847201495990579325#m
828 |
829 | -
830 | ようさんとりあさんに包まれてる時()いっつも可愛いね〜!赤ちゃんだね〜!って言われるので
831 | もしかして私可愛いばぶ…?って幻覚する
832 | スタイル良すぎる美人達による洗脳(やぶさかでない)
833 | @milktea_new
834 | ようさんとりあさんに包まれてる時()いっつも可愛いね〜!赤ちゃんだね〜!って言われるので
835 | もしかして私可愛いばぶ…?って幻覚する
836 | スタイル良すぎる美人達による洗脳(やぶさかでない)]]>
837 | Fri, 18 Oct 2024 14:54:39 GMT
838 | http://nitter.example.com/milktea_new/status/1847290199606444361#m
839 | http://nitter.example.com/milktea_new/status/1847290199606444361#m
840 |
841 | -
842 | 池ハロ26日……間に合えばりりゃハロウィンやっちゃおうかな…???☺️☺️☺️
843 | (前日なんだから間に合ってるやろ)😃
844 | @milktea_new
845 | 池ハロ26日……間に合えばりりゃハロウィンやっちゃおうかな…???☺️☺️☺️
846 | (前日なんだから間に合ってるやろ)😃]]>
847 | Fri, 18 Oct 2024 14:51:58 GMT
848 | http://nitter.example.com/milktea_new/status/1847289526378713300#m
849 | http://nitter.example.com/milktea_new/status/1847289526378713300#m
850 |
851 | -
852 | ファンティア有料プラン明日2回更新です🪄
853 |
854 | おやすみ💤
855 | @raimu22118
856 | ファンティア有料プラン明日2回更新です🪄
857 |
858 | おやすみ💤
859 |
]]>
860 | Fri, 18 Oct 2024 14:49:48 GMT
861 | http://nitter.example.com/raimu22118/status/1847288982016786474#m
862 | http://nitter.example.com/raimu22118/status/1847288982016786474#m
863 |
864 | -
865 | 20の併せ万全にしたいのに普通に明日仕事なんだよな!!!!
866 |
867 | わはは!!!
868 |
869 | 絶対定時で帰る
870 | 爆速で全て直してやる!!!!!
871 | @milktea_new
872 | 20の併せ万全にしたいのに普通に明日仕事なんだよな!!!!
873 |
874 | わはは!!!
875 |
876 | 絶対定時で帰る
877 | 爆速で全て直してやる!!!!!]]>
878 | Fri, 18 Oct 2024 14:48:57 GMT
879 | http://nitter.example.com/milktea_new/status/1847288767750811913#m
880 | http://nitter.example.com/milktea_new/status/1847288767750811913#m
881 |
882 | -
883 | 男装だけまとめできた…!
884 | 女装増えすぎてるからのんびりやるぞ😇
885 | 電車長い時にやろう
886 | @milktea_new
887 | 男装だけまとめできた…!
888 | 女装増えすぎてるからのんびりやるぞ😇
889 | 電車長い時にやろう
890 |
]]>
891 | Fri, 18 Oct 2024 14:46:18 GMT
892 | http://nitter.example.com/milktea_new/status/1847288099753410995#m
893 | http://nitter.example.com/milktea_new/status/1847288099753410995#m
894 |
895 | -
896 | RT by @nyan_o912: うにプロ写真集(大嘘)
897 | @mochimocchi66
898 | うにプロ写真集(大嘘)
899 |
]]>
900 | Fri, 18 Oct 2024 14:32:38 GMT
901 | http://nitter.example.com/mochimocchi66/status/1847284661191676070#m
902 | http://nitter.example.com/mochimocchi66/status/1847284661191676070#m
903 |
904 | -
905 | マジックプリズムのパンプキンヘッドって今年は買えますか?( ´•ω•` )
906 |
907 | これが欲しくてハロウィンイベントを楽しみにしてたんです( ´•ω•` )知ってる方いたら教えてください( ´•ω•` )
908 | @uniharu0626
909 | マジックプリズムのパンプキンヘッドって今年は買えますか?( ´•ω•` )
910 |
911 | これが欲しくてハロウィンイベントを楽しみにしてたんです( ´•ω•` )知ってる方いたら教えてください( ´•ω•` )]]>
912 | Fri, 18 Oct 2024 14:37:57 GMT
913 | http://nitter.example.com/uniharu0626/status/1847285996423200968#m
914 | http://nitter.example.com/uniharu0626/status/1847285996423200968#m
915 |
916 | -
917 | 廣井きくりさんまだまだやりたい
918 | @2_netcat
919 | 廣井きくりさんまだまだやりたい]]>
920 | Fri, 18 Oct 2024 14:35:12 GMT
921 | http://nitter.example.com/2_netcat/status/1847285304446877799#m
922 | http://nitter.example.com/2_netcat/status/1847285304446877799#m
923 |
924 | -
925 | RT by @aisu_meru3: 今週日曜日となりましたゲストレイヤーとして出させて頂くホロコスオンリーにて出すグッズのお品書きです‼️
926 |
927 | どれも配布部数少なめとなっておりますのでお求めの方はお早めに🙏✨
928 |
929 | #ホロコスオンリー
930 | @Mkspere
931 | 今週日曜日となりましたゲストレイヤーとして出させて頂くホロコスオンリーにて出すグッズのお品書きです‼️
932 |
933 | どれも配布部数少なめとなっておりますのでお求めの方はお早めに🙏✨
934 |
935 | #ホロコスオンリー
936 |
]]>
937 | Fri, 18 Oct 2024 10:16:29 GMT
938 | http://nitter.example.com/Mkspere/status/1847220196316250567#m
939 | http://nitter.example.com/Mkspere/status/1847220196316250567#m
940 |
941 | -
942 | 先日乗った飛行機ロコンジェットだったのが嬉しかったのと、機内ドリンクサービスが九州コラボ?でヨーグルッペがあります言われて初めて飲んだんだけどすごく美味しかった…‼
943 | 地域限定なのかなあれ🤔💭🤍
944 | @miinyan73
945 | 先日乗った飛行機ロコンジェットだったのが嬉しかったのと、機内ドリンクサービスが九州コラボ?でヨーグルッペがあります言われて初めて飲んだんだけどすごく美味しかった…‼
946 | 地域限定なのかなあれ🤔💭🤍]]>
947 | Fri, 18 Oct 2024 14:32:05 GMT
948 | http://nitter.example.com/miinyan73/status/1847284521408078193#m
949 | http://nitter.example.com/miinyan73/status/1847284521408078193#m
950 |
951 | -
952 | お仕事後で疲れてるのに来てくれたり衣装自作勢もいたしせつ菜ちゃんと璃奈ちゃんがめちゃくちゃ本気出してきてくれたしで私なんかが主催の撮影でみんないいの…(T_T)(T_T)になっちゃった。ありがとうみんな大好きだよ(T_T)(T_T)
953 | @azu_kichi_cos
954 | お仕事後で疲れてるのに来てくれたり衣装自作勢もいたしせつ菜ちゃんと璃奈ちゃんがめちゃくちゃ本気出してきてくれたしで私なんかが主催の撮影でみんないいの…(T_T)(T_T)になっちゃった。ありがとうみんな大好きだよ(T_T)(T_T)]]>
955 | Fri, 18 Oct 2024 14:31:48 GMT
956 | http://nitter.example.com/azu_kichi_cos/status/1847284452004864394#m
957 | http://nitter.example.com/azu_kichi_cos/status/1847284452004864394#m
958 |
959 | -
960 | RT by @aisu_meru3: ( *°ー°* )
961 | @Metamorumeta
962 | ( *°ー°* )
963 |
]]>
964 | Fri, 18 Oct 2024 12:24:35 GMT
965 | http://nitter.example.com/Metamorumeta/status/1847252436777664579#m
966 | http://nitter.example.com/Metamorumeta/status/1847252436777664579#m
967 |
968 | -
969 | RT by @aisu_meru3: #ホロコスオンリー
970 |
971 | お品書きです!
972 | 手のひらサイズの自撮りミニフォトブックが新刊となります!
973 | 隣にいるイメージで撮ったよ🌙🛸💜
974 |
975 | 🛸💜既刊も少し持っていきます!
976 | チェキはその場でスマホで撮ったツーショでも🙆♂️
977 |
978 | 基本はスペースにいると思いますが、離れる時はツイートします!
979 | @meee_chi
980 | #ホロコスオンリー
981 |
982 | お品書きです!
983 | 手のひらサイズの自撮りミニフォトブックが新刊となります!
984 | 隣にいるイメージで撮ったよ🌙🛸💜
985 |
986 | 🛸💜既刊も少し持っていきます!
987 | チェキはその場でスマホで撮ったツーショでも🙆♂️
988 |
989 | 基本はスペースにいると思いますが、離れる時はツイートします!
990 |
]]>
991 | Fri, 18 Oct 2024 11:39:59 GMT
992 | http://nitter.example.com/meee_chi/status/1847241212362948861#m
993 | http://nitter.example.com/meee_chi/status/1847241212362948861#m
994 |
995 | -
996 | 𓋫 𓏴𓏴 𓏵 𓏴𓏴 𓏵 𝙾𝙿𝙴𝙽 𓏵 𓏴𓏴 𓏵 𓏴𓏴 𓋫
997 | @uniharu0626
998 | 𓋫 𓏴𓏴 𓏵 𓏴𓏴 𓏵 𝙾𝙿𝙴𝙽 𓏵 𓏴𓏴 𓏵 𓏴𓏴 𓋫
999 |
]]>
1000 | Fri, 18 Oct 2024 09:08:43 GMT
1001 | http://nitter.example.com/uniharu0626/status/1847203145459572830#m
1002 | http://nitter.example.com/uniharu0626/status/1847203145459572830#m
1003 |
1004 | -
1005 | R to @uniharu0626: ご来店、ドリンク・チェキありがとうございました☺️
1006 | お寿司まで食べられてとってもよかった🍣◝✩
1007 |
1008 | 次は29日、ハロウィンイベント中の出勤です!
1009 | チェキいっぱい撮ろう〜!👾↝ᡣ𐭩
1010 | @uniharu0626
1011 | ご来店、ドリンク・チェキありがとうございました☺️
1012 | お寿司まで食べられてとってもよかった🍣◝✩
1013 |
1014 | 次は29日、ハロウィンイベント中の出勤です!
1015 | チェキいっぱい撮ろう〜!👾↝ᡣ𐭩]]>
1016 | Fri, 18 Oct 2024 14:29:07 GMT
1017 | http://nitter.example.com/uniharu0626/status/1847283773182025972#m
1018 | http://nitter.example.com/uniharu0626/status/1847283773182025972#m
1019 |
1020 | -
1021 | 誰か撮り行きませんか
1022 | @sirousa1211
1023 | 誰か撮り行きませんか
1024 |
nitter.example.com/BHB_official/status/1847231163070349543#m
]]>
1025 | Fri, 18 Oct 2024 14:21:42 GMT
1026 | http://nitter.example.com/sirousa1211/status/1847281908276973848#m
1027 | http://nitter.example.com/sirousa1211/status/1847281908276973848#m
1028 |
1029 | -
1030 | RT by @tsubasabi: ⚠︎Cosplay / 艦これ
1031 | 金剛
1032 |
1033 | photo.はるはらさん
1034 |
1035 | #ラグコス
1036 | #ラグコス2024
1037 | @tanaka_164
1038 | ⚠︎Cosplay / 艦これ
1039 | 金剛
1040 |
1041 | photo.はるはらさん
1042 |
1043 | #ラグコス
1044 | #ラグコス2024
1045 |
]]>
1046 | Fri, 18 Oct 2024 08:48:11 GMT
1047 | http://nitter.example.com/tanaka_164/status/1847197977095667926#m
1048 | http://nitter.example.com/tanaka_164/status/1847197977095667926#m
1049 |
1050 | -
1051 | RT by @azu_kichi_cos: せつ菜ちゃんで踊ってきた!🌈
1052 | 大好きをダンスで表現してきました🤗❤️
1053 | スクールアイドル最高👍✨✨
1054 |
1055 | #シャチはじ1018
1056 | @ssotwhmr
1057 | せつ菜ちゃんで踊ってきた!🌈
1058 | 大好きをダンスで表現してきました🤗❤️
1059 | スクールアイドル最高👍✨✨
1060 |
1061 | #シャチはじ1018
1062 |
]]>
1063 | Fri, 18 Oct 2024 14:18:01 GMT
1064 | http://nitter.example.com/ssotwhmr/status/1847280979947474966#m
1065 | http://nitter.example.com/ssotwhmr/status/1847280979947474966#m
1066 |
1067 | -
1068 | 廣井 きくりさん
1069 | @2_netcat
1070 | 廣井 きくりさん]]>
1071 | Fri, 18 Oct 2024 14:20:32 GMT
1072 | http://nitter.example.com/2_netcat/status/1847281616823189962#m
1073 | http://nitter.example.com/2_netcat/status/1847281616823189962#m
1074 |
1075 | -
1076 | RT by @2_netcat: ◤ ◥
1077 | GOTOH FAMILY
1078 | meets 廣井きくり
1079 | ◣ ◢
1080 |
1081 | きららMAX12月号の『#ぼっち・ざ・ろっく!』では
1082 | 再び金沢八景に流れ着いた廣井きくりが
1083 | 後藤家におじゃまします‼
1084 | 美智代と直樹の意外な馴れ初めも
1085 | 明らかに🍺
1086 |
1087 | 【COMIC FUZでも好評連載中】
1088 | https://comic-fuz.com/manga/72
1089 | @mangatimekirara
1090 | ◤ ◥
1091 | GOTOH FAMILY
1092 | meets 廣井きくり
1093 | ◣ ◢
1094 |
1095 | きららMAX12月号の『#ぼっち・ざ・ろっく!』では
1096 | 再び金沢八景に流れ着いた廣井きくりが
1097 | 後藤家におじゃまします‼
1098 | 美智代と直樹の意外な馴れ初めも
1099 | 明らかに🍺
1100 |
1101 | 【COMIC FUZでも好評連載中】
1102 | comic-fuz.com/manga/72
1103 |
1104 |
1105 |
1106 |
]]>
1107 | Fri, 18 Oct 2024 14:01:17 GMT
1108 | http://nitter.example.com/mangatimekirara/status/1847276770741100783#m
1109 | http://nitter.example.com/mangatimekirara/status/1847276770741100783#m
1110 |
1111 | -
1112 | れんくんめでたい〜!!✨✨
1113 | 周年グッズも買いました😼
1114 | @t_cos_214
1115 | れんくんめでたい〜!!✨✨
1116 | 周年グッズも買いました😼]]>
1117 | Fri, 18 Oct 2024 14:19:12 GMT
1118 | http://nitter.example.com/t_cos_214/status/1847281279622119514#m
1119 | http://nitter.example.com/t_cos_214/status/1847281279622119514#m
1120 |
1121 | -
1122 | RT by @t_cos_214: 【⏰如月れん 4周年記念グッズ受注販売決定!⏰】
1123 |
1124 | 如月れん(@ren_kisaragi__)4周年記念グッズの受注販売が決定‼️
1125 |
1126 | ▶️受注期間
1127 | 10月18日(金)〜11月1日(金)23:59まで
1128 |
1129 | ▶️ご購入はこちら
1130 | https://store.vspo.jp/products/kisaragiren-ani4th
1131 |
1132 | ▶️担当イラストレーター
1133 | TAKOLEGS 様
1134 | KIRUTA 様
1135 | ときわた 様
1136 | 生川 様
1137 |
1138 | #ぶいすぽストア
1139 | @Vspo77
1140 | 【⏰如月れん 4周年記念グッズ受注販売決定!⏰】
1141 |
1142 | 如月れん(@ren_kisaragi__)4周年記念グッズの受注販売が決定‼️
1143 |
1144 | ▶️受注期間
1145 | 10月18日(金)〜11月1日(金)23:59まで
1146 |
1147 | ▶️ご購入はこちら
1148 | store.vspo.jp/products/kisar…
1149 |
1150 | ▶️担当イラストレーター
1151 | TAKOLEGS 様
1152 | KIRUTA 様
1153 | ときわた 様
1154 | 生川 様
1155 |
1156 | #ぶいすぽストア
1157 |
]]>
1158 | Fri, 18 Oct 2024 11:21:51 GMT
1159 | http://nitter.example.com/Vspo77/status/1847236649127911431#m
1160 | http://nitter.example.com/Vspo77/status/1847236649127911431#m
1161 |
1162 | -
1163 | おそほ〜歩夢で踊ってきた🎀
1164 | 初センター、初おどみた主催で何もかも不安だったけどみんなが支えてくれてなんとか本番までもってこれました😭
1165 | 優しくて天才で素敵なメンバーと一緒に踊れてめちゃくちゃ楽しかった!みんな今日は一日ありがとうございました🌈🌸
1166 | @azu_kichi_cos
1167 | おそほ〜歩夢で踊ってきた🎀
1168 | 初センター、初おどみた主催で何もかも不安だったけどみんなが支えてくれてなんとか本番までもってこれました😭
1169 | 優しくて天才で素敵なメンバーと一緒に踊れてめちゃくちゃ楽しかった!みんな今日は一日ありがとうございました🌈🌸
1170 |
]]>
1171 | Fri, 18 Oct 2024 14:14:55 GMT
1172 | http://nitter.example.com/azu_kichi_cos/status/1847280201421783238#m
1173 | http://nitter.example.com/azu_kichi_cos/status/1847280201421783238#m
1174 |
1175 | -
1176 | RT by @azu_kichi_cos: ちゃお〜🍞(1つ+3枚目)
1177 | 楽しく踊ってきましたっ!
1178 | 何気に虹自体初出しなエマヴェルデです✨
1179 |
1180 | #シャチはじ1018
1181 | @S_sametaro
1182 | ちゃお〜🍞(1つ+3枚目)
1183 | 楽しく踊ってきましたっ!
1184 | 何気に虹自体初出しなエマヴェルデです✨
1185 |
1186 | #シャチはじ1018
1187 |
]]>
1188 | Fri, 18 Oct 2024 14:04:35 GMT
1189 | http://nitter.example.com/S_sametaro/status/1847277599816614385#m
1190 | http://nitter.example.com/S_sametaro/status/1847277599816614385#m
1191 |
1192 | -
1193 | RT by @shioringo_227: 応援広告でSHHis&CoMETIKの皆さんをお祝いしませんか?枠取りが完了しましたので、正式告知します!
1194 | 2025年1月開催[liminal;marginal;eternal]に
1195 | ご出演されるアイドルの皆様へ向けたポスターを都内某所に提出します!プロデューサーの皆様のご参加お待ちしております!
1196 | https://twipla.jp/events/638758
1197 | @Shiny_bouquets
1198 | 応援広告でSHHis&CoMETIKの皆さんをお祝いしませんか?枠取りが完了しましたので、正式告知します!
1199 | 2025年1月開催[liminal;marginal;eternal]に
1200 | ご出演されるアイドルの皆様へ向けたポスターを都内某所に提出します!プロデューサーの皆様のご参加お待ちしております!
1201 | twipla.jp/events/638758
1202 |
]]>
1203 | Fri, 18 Oct 2024 12:04:02 GMT
1204 | http://nitter.example.com/Shiny_bouquets/status/1847247261534941298#m
1205 | http://nitter.example.com/Shiny_bouquets/status/1847247261534941298#m
1206 |
1207 | -
1208 | RT by @shioringo_227: 来年1月の応援広告企画の主催を務めさせていただきます!SHHisとコメティックのアイドル5人を応援しませんか?ご支援とご協力よろしくお願いいたします・・・!
1209 | @milotot_
1210 | 来年1月の応援広告企画の主催を務めさせていただきます!SHHisとコメティックのアイドル5人を応援しませんか?ご支援とご協力よろしくお願いいたします・・・!
1211 |
nitter.example.com/Shiny_bouquets/status/1847247261534941298#m
]]>
1212 | Fri, 18 Oct 2024 12:06:35 GMT
1213 | http://nitter.example.com/milotot_/status/1847247907201990950#m
1214 | http://nitter.example.com/milotot_/status/1847247907201990950#m
1215 |
1216 | -
1217 | RT by @shioringo_227: cosplay
1218 |
1219 | アイドルマスターシャイニーカラーズ
1220 | 緋田美琴
1221 |
1222 | 🩵ツイーディーハピリー🩵
1223 |
1224 | photo:@Yukkii_SnowMan
1225 | @roze_cos
1226 | cosplay
1227 |
1228 | アイドルマスターシャイニーカラーズ
1229 | 緋田美琴
1230 |
1231 | 🩵ツイーディーハピリー🩵
1232 |
1233 | photo:@Yukkii_SnowMan
1234 |
]]>
1235 | Fri, 18 Oct 2024 11:30:49 GMT
1236 | http://nitter.example.com/roze_cos/status/1847238904593932621#m
1237 | http://nitter.example.com/roze_cos/status/1847238904593932621#m
1238 |
1239 | -
1240 | チャイナ廣井………………………
1241 | @shioringo_227
1242 | チャイナ廣井………………………]]>
1243 | Fri, 18 Oct 2024 14:08:45 GMT
1244 | http://nitter.example.com/shioringo_227/status/1847278650145833239#m
1245 | http://nitter.example.com/shioringo_227/status/1847278650145833239#m
1246 |
1247 | -
1248 | うあ⁉️⁉️⁉️
1249 | @shioringo_227
1250 | うあ⁉️⁉️⁉️]]>
1251 | Fri, 18 Oct 2024 14:08:34 GMT
1252 | http://nitter.example.com/shioringo_227/status/1847278604482380010#m
1253 | http://nitter.example.com/shioringo_227/status/1847278604482380010#m
1254 |
1255 | -
1256 | RT by @Cos_sorena: ✨✨ 開催決定 ✨✨
1257 | #学園アイドルマスター
1258 | POP UP STORE in 渋谷109
1259 | ✨✨ ✨ ✨✨✨✨
1260 |
1261 | 「109ファッション」をテーマにした
1262 | "総勢12名"の描きおろしイラストが登場🤍
1263 | 東京・大阪の2都市で開催いたします🏫
1264 |
1265 | 詳細はこちら👇
1266 | https://idolmaster-official.jp/news/01_13110
1267 | #学マス
1268 | @wallma_store
1269 | ✨✨ 開催決定 ✨✨
1270 | #学園アイドルマスター
1271 | POP UP STORE in 渋谷109
1272 | ✨✨ ✨ ✨✨✨✨
1273 |
1274 | 「109ファッション」をテーマにした
1275 | "総勢12名"の描きおろしイラストが登場🤍
1276 | 東京・大阪の2都市で開催いたします🏫
1277 |
1278 | 詳細はこちら👇
1279 | idolmaster-official.jp/news/…
1280 | #学マス
1281 |
1282 |
]]>
1283 | Fri, 18 Oct 2024 08:10:10 GMT
1284 | http://nitter.example.com/wallma_store/status/1847188407140962560#m
1285 | http://nitter.example.com/wallma_store/status/1847188407140962560#m
1286 |
1287 |
1288 |
1289 |
1290 |
--------------------------------------------------------------------------------