├── .github
├── dependabot.yml
└── workflows
│ ├── automerge.yml
│ ├── codeql.yml
│ └── dev.yml
├── .gitignore
├── .gitlab-ci.yml
├── .prettierrc.yml
├── .yarnrc.yml
├── action.js
├── action.yml
├── action
├── feeds
│ ├── database.test.ts
│ ├── database.ts
│ ├── file.test.ts
│ ├── file.ts
│ ├── index.ts
│ ├── opml.test.ts
│ ├── opml.ts
│ ├── parsers#parseAtom.test.ts
│ ├── parsers#parseRss.test.ts
│ ├── parsers.ts
│ └── stubs
│ │ ├── atom1.xml
│ │ ├── atom2.xml
│ │ ├── neizod.rss
│ │ ├── opml.flat.xml
│ │ ├── opml.mixed.xml
│ │ ├── opml.single.xml
│ │ ├── opml.subcategory.xml
│ │ ├── opml.xml
│ │ └── rss1.xml
└── repository.ts
├── app
├── globals.css
├── layout.tsx
├── not-found.tsx
└── page.tsx
├── components.json
├── feeds.opml
├── index.ts
├── lib
├── components
│ ├── BackButton.tsx
│ ├── CategoryList.tsx
│ ├── ItemContent.tsx
│ ├── ItemList.tsx
│ └── ThemeToggle.tsx
├── fixtures
│ └── contents
│ │ ├── cat1
│ │ ├── site1.json
│ │ └── site2.json
│ │ └── cat2
│ │ └── site3.json
├── page.tsx
├── reducers
│ └── path.ts
├── storage
│ ├── file.ts
│ ├── index.ts
│ ├── sqlite.ts
│ └── types.ts
├── utils.test.ts
└── utils.ts
├── next-env.d.ts
├── next.config.ts
├── package.json
├── postcss.config.mjs
├── public
├── favicon.ico
├── logo.svg
├── site.webmanifest
├── sql-wasm.wasm
├── sqlite.worker.js
├── sqlite.worker.js.map
└── vercel.svg
├── readme.md
├── tailwind.config.ts
├── tsconfig.json
└── yarn.lock
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 | updates:
3 | - package-ecosystem: 'npm'
4 | directory: '/'
5 | schedule:
6 | interval: 'weekly'
7 | ignore:
8 | - dependency-name: 'node-fetch'
9 |
--------------------------------------------------------------------------------
/.github/workflows/automerge.yml:
--------------------------------------------------------------------------------
1 | # .github/workflows/automerge.yml
2 |
3 | name: Dependabot auto-merge
4 |
5 | on: pull_request
6 |
7 | permissions:
8 | contents: write
9 | pull-requests: write # Needed if in a private repository
10 |
11 | jobs:
12 | dependabot:
13 | runs-on: ubuntu-latest
14 | if: ${{ github.actor == 'dependabot[bot]' }}
15 | steps:
16 | - name: Enable auto-merge for Dependabot PRs
17 | run: gh pr merge --auto --merge "$PR_URL"
18 | env:
19 | PR_URL: ${{github.event.pull_request.html_url}}
20 | # GitHub provides this variable in the CI env. You don't
21 | # need to add anything to the secrets vault.
22 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
23 |
--------------------------------------------------------------------------------
/.github/workflows/codeql.yml:
--------------------------------------------------------------------------------
1 | # For most projects, this workflow file will not need changing; you simply need
2 | # to commit it to your repository.
3 | #
4 | # You may wish to alter this file to override the set of languages analyzed,
5 | # or to provide custom queries or build logic.
6 | #
7 | # ******** NOTE ********
8 | # We have attempted to detect the languages in your repository. Please check
9 | # the `language` matrix defined below to confirm you have the correct set of
10 | # supported CodeQL languages.
11 | #
12 | name: 'CodeQL'
13 |
14 | on:
15 | push:
16 | branches: ['main']
17 | pull_request:
18 | # The branches below must be a subset of the branches above
19 | branches: ['main']
20 | schedule:
21 | - cron: '18 1 * * 4'
22 |
23 | jobs:
24 | analyze:
25 | name: Analyze
26 | runs-on: ubuntu-latest
27 | permissions:
28 | actions: read
29 | contents: read
30 | security-events: write
31 |
32 | strategy:
33 | fail-fast: false
34 | matrix:
35 | language: ['javascript']
36 | # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
37 | # Use only 'java' to analyze code written in Java, Kotlin or both
38 | # Use only 'javascript' to analyze code written in JavaScript, TypeScript or both
39 | # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
40 |
41 | steps:
42 | - name: Checkout repository
43 | uses: actions/checkout@v3
44 |
45 | # Initializes the CodeQL tools for scanning.
46 | - name: Initialize CodeQL
47 | uses: github/codeql-action/init@v2
48 | with:
49 | languages: ${{ matrix.language }}
50 | # If you wish to specify custom queries, you can do so here or in a config file.
51 | # By default, queries listed here will override any specified in a config file.
52 | # Prefix the list here with "+" to use these queries and those in the config file.
53 |
54 | # Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
55 | # queries: security-extended,security-and-quality
56 |
57 | # Autobuild attempts to build any compiled languages (C/C++, C#, Go, or Java).
58 | # If this step fails, then you should remove it and run the build manually (see below)
59 | - name: Autobuild
60 | uses: github/codeql-action/autobuild@v2
61 |
62 | # ℹ️ Command-line programs to run using the OS shell.
63 | # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
64 |
65 | # If the Autobuild fails above, remove it and uncomment the following three lines.
66 | # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
67 |
68 | # - run: |
69 | # echo "Run, Build Application using script"
70 | # ./location_of_script_within_repo/buildscript.sh
71 |
72 | - name: Perform CodeQL Analysis
73 | uses: github/codeql-action/analyze@v2
74 | with:
75 | category: '/language:${{matrix.language}}'
76 |
--------------------------------------------------------------------------------
/.github/workflows/dev.yml:
--------------------------------------------------------------------------------
1 | name: Dev
2 |
3 | on: push
4 |
5 | jobs:
6 | test:
7 | runs-on: ubuntu-latest
8 | steps:
9 | - name: Checkout
10 | uses: actions/checkout@v4
11 | - name: Use Node.js 20
12 | uses: actions/setup-node@v4
13 | with:
14 | node-version: 20
15 | - name: Test
16 | run: |
17 | corepack enable
18 | yarn install
19 | yarn test
20 |
21 | sample:
22 | runs-on: ubuntu-latest
23 | needs: [test]
24 | if: github.ref == 'refs/heads/main'
25 | name: Sample
26 | steps:
27 | - name: Run Action
28 | uses: llun/feeds@main
29 | with:
30 | storageType: files
31 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
2 |
3 | # dependencies
4 | /node_modules
5 | /.pnp
6 | .pnp.js
7 | .yarn
8 |
9 | # testing
10 | /coverage
11 |
12 | # next.js
13 | /.next/
14 | /out/
15 |
16 | # production
17 | /build
18 |
19 | # misc
20 | .DS_Store
21 | *.pem
22 | tsconfig.tsbuildinfo
23 | target
24 |
25 | # debug
26 | npm-debug.log*
27 | yarn-debug.log*
28 | yarn-error.log*
29 |
30 | # local env files
31 | .env.local
32 | .env.development.local
33 | .env.test.local
34 | .env.production.local
35 |
36 | # vercel
37 | .vercel
38 |
39 | # personal test file
40 | lab.js
41 |
42 | # feeds content
43 | *.sqlite3
44 | public/data
45 | contents
46 |
--------------------------------------------------------------------------------
/.gitlab-ci.yml:
--------------------------------------------------------------------------------
1 | image: node:lts
2 |
3 | pages:
4 | stage: deploy
5 | script:
6 | - yarn install
7 | - INPUT_OPMLFILE='feeds.opml' yarn run swc-node index.ts
8 | - yarn run build -o public
9 | artifacts:
10 | paths:
11 | - public
12 | rules:
13 | - if: '$CI_COMMIT_BRANCH == "main"'
14 |
--------------------------------------------------------------------------------
/.prettierrc.yml:
--------------------------------------------------------------------------------
1 | semi: false
2 | singleQuote: true
3 | arrowParens: "always"
4 | requirePragma: false
5 | trailingComma: "none"
6 |
--------------------------------------------------------------------------------
/.yarnrc.yml:
--------------------------------------------------------------------------------
1 | nodeLinker: node-modules
2 |
--------------------------------------------------------------------------------
/action.js:
--------------------------------------------------------------------------------
1 | // @ts-check
2 | const fs = require('fs')
3 | const path = require('path')
4 | const { spawnSync } = require('child_process')
5 |
6 | // Duplicate code from action/repository, keep this until
7 | // found a better way to include typescript without transpiles
8 | function runCommand(
9 | /** @type {string[]} */ commands,
10 | /** @type {string} */ cwd
11 | ) {
12 | return spawnSync(commands[0], commands.slice(1), {
13 | stdio: 'inherit',
14 | cwd
15 | })
16 | }
17 |
18 | function getGithubActionPath() {
19 | const workSpace = process.env['GITHUB_WORKSPACE']
20 | if (!workSpace) {
21 | return ''
22 | }
23 | const actionPath = '/home/runner/work/_actions/llun/feeds'
24 | try {
25 | const files = fs.readdirSync(actionPath)
26 | const version = files.filter((file) => {
27 | const stat = fs.statSync(path.join(actionPath, file))
28 | return stat.isDirectory()
29 | })
30 | return path.join(actionPath, version.pop() || 'main')
31 | } catch (error) {
32 | return path.join(actionPath, 'main')
33 | }
34 | }
35 |
36 | // Main
37 | console.log('Action: ', process.env['GITHUB_ACTION'])
38 | if (
39 | process.env['GITHUB_ACTION'] === 'llunfeeds' ||
40 | process.env['GITHUB_ACTION'] === '__llun_feeds'
41 | ) {
42 | runCommand(['node', '--version'], getGithubActionPath())
43 | const enableCorepackResult = runCommand(
44 | ['npm', 'install', '-g', 'corepack'],
45 | getGithubActionPath()
46 | )
47 | if (enableCorepackResult.error) {
48 | throw new Error('Fail to enable corepack')
49 | }
50 | const dependenciesResult = runCommand(
51 | ['yarn', 'install'],
52 | getGithubActionPath()
53 | )
54 | if (dependenciesResult.error) {
55 | throw new Error('Fail to run setup')
56 | }
57 | const executeResult = runCommand(
58 | ['node', '-r', '@swc-node/register', 'index.ts'],
59 | getGithubActionPath()
60 | )
61 | if (executeResult.error) {
62 | throw new Error('Fail to site builder')
63 | }
64 | }
65 |
--------------------------------------------------------------------------------
/action.yml:
--------------------------------------------------------------------------------
1 | name: 'FeedsFetcher'
2 | description: 'Fetch websites feed and build static feeds aggregator'
3 | branding:
4 | icon: archive
5 | color: yellow
6 | inputs:
7 | opmlFile:
8 | description: >
9 | OPML file with list of feeds and category
10 | default: 'feeds.opml'
11 | storageType:
12 | description: >
13 | Feed storage type, database or files
14 | default: 'database'
15 | branch:
16 | description: >
17 | Output branch
18 | default: 'contents'
19 | token:
20 | description: >
21 | Github token for pulling repository information
22 | default: ${{ github.token }}
23 | customDomain:
24 | description: >
25 | Enable custom domain name for the site generator
26 | default: ''
27 | runs:
28 | using: 'node20'
29 | main: 'action.js'
30 |
--------------------------------------------------------------------------------
/action/feeds/database.test.ts:
--------------------------------------------------------------------------------
1 | import anyTest, { TestFn } from 'ava'
2 | import fs from 'fs'
3 | import { knex, Knex } from 'knex'
4 | import path from 'path'
5 | import sinon from 'sinon'
6 | import {
7 | createOrUpdateDatabase,
8 | createTables,
9 | deleteCategory,
10 | deleteEntry,
11 | deleteSite,
12 | deleteSiteCategory,
13 | getAllCategories,
14 | getAllSiteEntries,
15 | getCategorySites,
16 | hash,
17 | insertCategory,
18 | insertEntry,
19 | insertSite,
20 | removeOldEntries,
21 | removeOldSites
22 | } from './database'
23 | import { readOpml } from './opml'
24 | import { Entry, Site } from './parsers'
25 |
26 | const test = anyTest as TestFn<{
27 | db: Knex
28 | fixtures: {
29 | site: Site
30 | entry: Entry
31 | entryWithoutDate: Entry
32 | }
33 | }>
34 |
35 | test.beforeEach(async (t) => {
36 | const db = knex({
37 | client: 'sqlite3',
38 | connection: ':memory:',
39 | useNullAsDefault: true
40 | })
41 |
42 | const fixtureEntry: Entry = {
43 | title: 'Sample entry',
44 | link: 'https://www.llun.me/posts/2021-12-30-2021/',
45 | author: 'llun',
46 | content: 'Content',
47 | date: Date.now()
48 | }
49 | const fixtureEntryWithoutDate: Entry = {
50 | title: 'Sample entry',
51 | link: 'https://www.llun.me/posts/2021-12-30-2021/',
52 | author: 'llun',
53 | content: 'Content',
54 | date: null
55 | }
56 | const fixtureSite: Site = {
57 | title: 'Demo Site',
58 | link: 'https://llun.dev',
59 | description: 'Sample site',
60 | updatedAt: Date.now(),
61 | generator: 'Test',
62 | entries: [fixtureEntry]
63 | }
64 |
65 | await createTables(db)
66 | t.context = {
67 | db,
68 | fixtures: {
69 | site: fixtureSite,
70 | entry: fixtureEntry,
71 | entryWithoutDate: fixtureEntryWithoutDate
72 | }
73 | }
74 | })
75 |
76 | test.afterEach(async (t) => {
77 | const db = t.context.db
78 | await db.destroy()
79 | })
80 |
81 | test('#insertCategory', async (t) => {
82 | const { db } = t.context
83 | await insertCategory(db, 'category1')
84 | const count = await db('Categories').count('* as total').first()
85 | t.is(count.total, 1)
86 |
87 | const first = await db('Categories').first()
88 | t.is(first.name, 'category1')
89 | })
90 |
91 | test('#deleteCategory', async (t) => {
92 | const { db, fixtures } = t.context
93 | const { site, entry } = fixtures
94 |
95 | await insertCategory(db, 'category1')
96 | await insertCategory(db, 'category2')
97 | await insertSite(db, 'category1', site)
98 | await insertSite(db, 'category2', site)
99 |
100 | const siteKey = hash(site.title)
101 | await insertEntry(db, siteKey, site.title, 'category1', entry)
102 | await insertEntry(db, siteKey, site.title, 'category2', entry)
103 |
104 | await deleteCategory(db, 'category2')
105 |
106 | const entriesCount = await db('Entries').count('* as total').first()
107 | const sitesCount = await db('Sites').count('* as total').first()
108 | const entryCategoriesCount = await db('EntryCategories')
109 | .count('* as total')
110 | .first()
111 | const siteCategoriesCount = await db('SiteCategories')
112 | .count('* as total')
113 | .first()
114 | const categoriesCount = await db('Categories').count('* as total').first()
115 |
116 | t.is(entriesCount.total, 1)
117 | t.is(sitesCount.total, 1)
118 | t.is(entryCategoriesCount.total, 1)
119 | t.is(siteCategoriesCount.total, 1)
120 | t.is(categoriesCount.total, 1)
121 |
122 | await deleteCategory(db, 'category1')
123 |
124 | const entriesCount2 = await db('Entries').count('* as total').first()
125 | const sitesCount2 = await db('Sites').count('* as total').first()
126 | const entryCategoriesCount2 = await db('EntryCategories')
127 | .count('* as total')
128 | .first()
129 | const siteCategoriesCount2 = await db('SiteCategories')
130 | .count('* as total')
131 | .first()
132 | const categoriesCount2 = await db('Categories').count('* as total').first()
133 | t.is(entriesCount2.total, 0)
134 | t.is(sitesCount2.total, 0)
135 | t.is(entryCategoriesCount2.total, 0)
136 | t.is(siteCategoriesCount2.total, 0)
137 | t.is(categoriesCount2.total, 0)
138 | })
139 |
140 | test('#insertSite', async (t) => {
141 | const { db, fixtures } = t.context
142 | const { site } = fixtures
143 | await insertCategory(db, 'category1')
144 | const siteKey = await insertSite(db, 'category1', site)
145 | t.is(siteKey, hash(site.title))
146 | const persistedSite = await db('Sites').first()
147 | t.deepEqual(persistedSite, {
148 | key: hash(site.title),
149 | title: site.title,
150 | url: site.link,
151 | description: site.description,
152 | createdAt: Math.floor(site.updatedAt / 1000)
153 | })
154 |
155 | const persistedSiteCategory = await db('SiteCategories').first()
156 | t.deepEqual(persistedSiteCategory, {
157 | category: 'category1',
158 | siteKey: hash(site.title),
159 | siteTitle: site.title
160 | })
161 |
162 | // Ignore insertion when category is not exists
163 | await insertSite(db, 'category2', site)
164 | const categoryCount = await db('SiteCategories').count('* as total').first()
165 | t.is(categoryCount.total, 1)
166 | const siteCount = await db('Sites').count('* as total').first()
167 | t.is(siteCount.total, 1)
168 |
169 | // Multiple category but same site
170 | await insertCategory(db, 'category2')
171 | const siteKey2 = await insertSite(db, 'category2', site)
172 | t.is(siteKey, siteKey2)
173 | const categoryCount2 = await db('SiteCategories').count('* as total').first()
174 | t.is(categoryCount2.total, 2)
175 | const siteCount2 = await db('Sites').count('* as total').first()
176 | t.is(siteCount2.total, 1)
177 | })
178 |
179 | test('#deleteSiteCategory', async (t) => {
180 | const { db, fixtures } = t.context
181 | const { entry, site } = fixtures
182 | await insertCategory(db, 'category1')
183 | await insertCategory(db, 'category2')
184 | await insertSite(db, 'category1', site)
185 | await insertSite(db, 'category2', site)
186 |
187 | const siteKey = hash(site.title)
188 | await insertEntry(db, siteKey, site.title, 'category1', entry)
189 | await insertEntry(db, siteKey, site.title, 'category2', entry)
190 | await deleteSiteCategory(db, 'category2', siteKey)
191 |
192 | const entryCount = await db('Entries').count('* as total').first()
193 | const siteCount = await db('Sites').count('* as total').first()
194 | const entryCategoryCount = await db('EntryCategories')
195 | .count('* as total')
196 | .first()
197 | const siteCategoryCount = await db('SiteCategories')
198 | .count('* as total')
199 | .first()
200 |
201 | t.is(entryCount.total, 1)
202 | t.is(siteCount.total, 1)
203 | t.is(entryCategoryCount.total, 1)
204 | t.is(siteCategoryCount.total, 1)
205 |
206 | await deleteSiteCategory(db, 'category1', siteKey)
207 |
208 | const siteCategoryCount2 = await db('SiteCategories')
209 | .count('* as total')
210 | .first()
211 | const entryCategoryCount2 = await db('EntryCategories')
212 | .count('* as total')
213 | .first()
214 | const siteCount2 = await db('Sites').count('* as total').first()
215 | const entryCount2 = await db('Entries').count('* as total').first()
216 | t.is(siteCategoryCount2.total, 0)
217 | t.is(entryCategoryCount2.total, 0)
218 | t.is(siteCount2.total, 0)
219 | t.is(entryCount2.total, 0)
220 | })
221 |
222 | test('#deleteSite', async (t) => {
223 | const { db, fixtures } = t.context
224 | const { entry, site } = fixtures
225 | await insertCategory(db, 'category1')
226 | await insertCategory(db, 'category2')
227 | await insertSite(db, 'category1', site)
228 | await insertSite(db, 'category2', site)
229 |
230 | const siteKey = hash(site.title)
231 | await insertEntry(db, siteKey, site.title, 'category1', entry)
232 | await insertEntry(db, siteKey, site.title, 'category2', entry)
233 | await deleteSite(db, siteKey)
234 |
235 | const siteCategoryCount = await db('SiteCategories')
236 | .count('* as total')
237 | .first()
238 | const entryCategoryCount = await db('EntryCategories')
239 | .count('* as total')
240 | .first()
241 | const sitesCount = await db('Sites').count('* as total').first()
242 | const entriesCount = await db('Entries').count('* as total').first()
243 |
244 | t.is(siteCategoryCount.total, 0)
245 | t.is(entryCategoryCount.total, 0)
246 | t.is(sitesCount.total, 0)
247 | t.is(entriesCount.total, 0)
248 | })
249 |
250 | test('#insertEntry single entry', async (t) => {
251 | const { db, fixtures } = t.context
252 | const { entry, site } = fixtures
253 | await insertCategory(db, 'category1')
254 | await insertEntry(db, 'nonexist', 'nonexists', 'category1', entry)
255 | const countResult = await db('Entries').count('* as total').first()
256 | t.is(countResult.total, 0)
257 |
258 | const siteKey = await insertSite(db, 'category1', site)
259 | await insertEntry(db, siteKey, site.title, 'category2', entry)
260 | const countResult2 = await db('Entries').count('* as total').first()
261 | t.is(countResult2.total, 0)
262 |
263 | const entryKey = await insertEntry(
264 | db,
265 | siteKey,
266 | site.title,
267 | 'category1',
268 | entry
269 | )
270 | t.is(entryKey, hash(`${entry.title}${entry.link}`))
271 | const countResult3 = await db('Entries').count('* as total').first()
272 | t.is(countResult3.total, 1)
273 | const categoryResults = await db('EntryCategories')
274 | .count('* as total')
275 | .first()
276 | t.is(categoryResults.total, 1)
277 | const persistedEntry = await db('Entries').first()
278 | sinon.assert.match(persistedEntry, {
279 | key: hash(`${entry.title}${entry.link}`),
280 | siteKey: hash(site.title),
281 | siteTitle: site.title,
282 | url: entry.link,
283 | content: entry.content,
284 | contentTime: Math.floor(entry.date / 1000),
285 | createdAt: sinon.match.number
286 | })
287 | })
288 |
289 | test('#insertEntry with site in multiple categories', async (t) => {
290 | const { db, fixtures } = t.context
291 | const { entry, site } = fixtures
292 | await insertCategory(db, 'category1')
293 | await insertCategory(db, 'category2')
294 | await insertSite(db, 'category1', site)
295 | await insertSite(db, 'category2', site)
296 | const siteKey = hash(site.title)
297 |
298 | await insertEntry(db, siteKey, site.title, 'category1', entry)
299 | await insertEntry(db, siteKey, site.title, 'category2', entry)
300 | const count1 = await db('Entries').count('* as total').first()
301 | t.is(count1.total, 1)
302 | const count2 = await db('EntryCategories').count('* as total').first()
303 | t.is(count2.total, 2)
304 | })
305 |
306 | test('#insertEntry with empty date', async (t) => {
307 | const { db, fixtures } = t.context
308 | const { entryWithoutDate, site } = fixtures
309 | await insertCategory(db, 'category1')
310 | await insertSite(db, 'category1', site)
311 | const siteKey = hash(site.title)
312 |
313 | await insertEntry(db, siteKey, site.title, 'category1', entryWithoutDate)
314 |
315 | const entriesCount = await db('Entries').count('* as total').first()
316 | const entryCategoryCount = await db('EntryCategories')
317 | .count('* as total')
318 | .first()
319 |
320 | t.is(entriesCount.total, 1)
321 | t.is(entryCategoryCount.total, 1)
322 |
323 | const entry = await db('Entries').first()
324 | const entryCategory = await db('EntryCategories').first()
325 | t.is(
326 | entryCategory.entryContentTime,
327 | entry.createdAt,
328 | 'entryContentTime should use entry createdAt when contentTime is null'
329 | )
330 | t.is(
331 | entry.contentTime,
332 | entry.createdAt,
333 | 'Content time in the entry should be the same as createdAt'
334 | )
335 | })
336 |
337 | test('#deleteEntry', async (t) => {
338 | const { db, fixtures } = t.context
339 | const { entry, site } = fixtures
340 |
341 | await insertCategory(db, 'category1')
342 | const siteKey = await insertSite(db, 'category1', site)
343 | const key = await insertEntry(db, siteKey, site.title, 'category1', entry)
344 |
345 | await deleteEntry(db, key)
346 | const entryCount = await db('Entries').count('* as total').first()
347 | t.is(entryCount.total, 0)
348 |
349 | const entryCategoryCount = await db('EntryCategories')
350 | .count('* as total')
351 | .first()
352 | t.is(entryCategoryCount.total, 0)
353 | })
354 |
355 | test('#removeOldSites delete sites not exists in opml', async (t) => {
356 | const db = knex({
357 | client: 'sqlite3',
358 | connection: ':memory:',
359 | useNullAsDefault: true
360 | })
361 | await createTables(db)
362 | await insertCategory(db, 'Category2')
363 | await insertSite(db, 'Category2', {
364 | title: '@llun story',
365 | description: '',
366 | entries: [],
367 | generator: '',
368 | link: 'https://www.llun.me',
369 | updatedAt: Math.floor(Date.now() / 1000)
370 | })
371 | const site2 = await insertSite(db, 'Category2', {
372 | title: 'cheeaunblog',
373 | description: '',
374 | entries: [],
375 | generator: '',
376 | link: 'https://cheeaun.com/blog',
377 | updatedAt: Math.floor(Date.now() / 1000)
378 | })
379 | const site3 = await insertSite(db, 'Category2', {
380 | title: 'icez network',
381 | description: '',
382 | entries: [],
383 | generator: '',
384 | link: 'https://www.icez.net/blog',
385 | updatedAt: Math.floor(Date.now() / 1000)
386 | })
387 |
388 | const data = fs
389 | .readFileSync(path.join(__dirname, 'stubs', 'opml.xml'))
390 | .toString('utf8')
391 | const opml = await readOpml(data)
392 | await removeOldSites(db, opml[1])
393 | const sites = await getCategorySites(db, 'Category2')
394 | t.deepEqual(sites, [
395 | { siteKey: site2, siteTitle: 'cheeaunblog', category: 'Category2' },
396 | { siteKey: site3, siteTitle: 'icez network', category: 'Category2' }
397 | ])
398 | await db.destroy()
399 | })
400 |
401 | test('#removeOldEntries delete entries not exists in feed site anymore', async (t) => {
402 | const db = knex({
403 | client: 'sqlite3',
404 | connection: ':memory:',
405 | useNullAsDefault: true
406 | })
407 | await createTables(db)
408 | await insertCategory(db, 'Category1')
409 |
410 | const site: Site = {
411 | title: '@llun story',
412 | description: '',
413 | entries: [
414 | {
415 | author: 'llun',
416 | content: 'content1',
417 | date: Math.floor(Date.now() / 1000),
418 | link: 'https://www.llun.me/posts/2021-12-30-2021/',
419 | title: '2021'
420 | },
421 | {
422 | author: 'llun',
423 | content: 'content2',
424 | date: Math.floor(Date.now() / 1000),
425 | link: 'https://www.llun.me/posts/2020-12-31-2020/',
426 | title: '2020'
427 | }
428 | ],
429 | generator: '',
430 | link: 'https://www.llun.me',
431 | updatedAt: Math.floor(Date.now() / 1000)
432 | }
433 | const siteKey = await insertSite(db, 'Category1', site)
434 | await insertEntry(db, siteKey, '@llun story', 'Category1', {
435 | author: 'llun',
436 | content: 'content3',
437 | date: Math.floor(Date.now() / 1000),
438 | link: 'https://www.llun.me/posts/2018-12-31-2018/',
439 | title: '2018'
440 | })
441 | const entryKey = await insertEntry(db, siteKey, '@llun story', 'Category1', {
442 | author: 'llun',
443 | content: 'content2',
444 | date: Math.floor(Date.now() / 1000),
445 | link: 'https://www.llun.me/posts/2020-12-31-2020/',
446 | title: '2020'
447 | })
448 | await removeOldEntries(db, site)
449 | const entries = await getAllSiteEntries(db, siteKey)
450 | t.deepEqual(entries, [{ entryKey, siteKey, category: 'Category1' }])
451 | await db.destroy()
452 | })
453 |
454 | test('#createOrUpdateDatabase add fresh data for empty database', async (t) => {
455 | const db = knex({
456 | client: 'sqlite3',
457 | connection: ':memory:',
458 | useNullAsDefault: true
459 | })
460 | const data = fs
461 | .readFileSync(path.join(__dirname, 'stubs', 'opml.single.xml'))
462 | .toString('utf8')
463 | const opml = await readOpml(data)
464 | const entry1: Entry = {
465 | author: 'llun',
466 | content: 'content1',
467 | date: Math.floor(Date.now() / 1000),
468 | link: 'https://www.llun.me/posts/2021-12-30-2021/',
469 | title: '2021'
470 | }
471 | const entry2: Entry = {
472 | author: 'llun',
473 | content: 'content2',
474 | date: Math.floor(Date.now() / 1000),
475 | link: 'https://www.llun.me/posts/2020-12-31-2020/',
476 | title: '2020'
477 | }
478 | const site: Site = {
479 | title: '@llun story',
480 | description: '',
481 | entries: [entry1, entry2],
482 | generator: '',
483 | link: 'https://www.llun.me',
484 | updatedAt: Math.floor(Date.now() / 1000)
485 | }
486 | await createTables(db)
487 | await createOrUpdateDatabase(
488 | db,
489 | opml,
490 | async (title: string, url: string) => site
491 | )
492 | const categories = await getAllCategories(db)
493 | t.deepEqual(categories, ['default'])
494 | for (const category of categories) {
495 | const sites = await getCategorySites(db, category)
496 | t.deepEqual(sites, [
497 | {
498 | siteKey: hash(site.title),
499 | siteTitle: site.title,
500 | category: 'default'
501 | }
502 | ])
503 |
504 | for (const site of sites) {
505 | const entries = await getAllSiteEntries(db, site.siteKey)
506 | t.deepEqual(entries, [
507 | {
508 | entryKey: hash(`${entry2.title}${entry2.link}`),
509 | siteKey: site.siteKey,
510 | category: 'default'
511 | },
512 | {
513 | entryKey: hash(`${entry1.title}${entry1.link}`),
514 | siteKey: site.siteKey,
515 | category: 'default'
516 | }
517 | ])
518 | }
519 | }
520 | await db.destroy()
521 | })
522 |
523 | test('#createOrUpdateDatabase with old contents in database', async (t) => {
524 | const db = knex({
525 | client: 'sqlite3',
526 | connection: ':memory:',
527 | useNullAsDefault: true
528 | })
529 | const data = fs
530 | .readFileSync(path.join(__dirname, 'stubs', 'opml.single.xml'))
531 | .toString('utf8')
532 | const opml = await readOpml(data)
533 | const entry1: Entry = {
534 | author: 'llun',
535 | content: 'content1',
536 | date: Math.floor(Date.now() / 1000),
537 | link: 'https://www.llun.me/posts/2021-12-30-2021/',
538 | title: '2021'
539 | }
540 | const entry2: Entry = {
541 | author: 'llun',
542 | content: 'content2',
543 | date: Math.floor(Date.now() / 1000),
544 | link: 'https://www.llun.me/posts/2020-12-31-2020/',
545 | title: '2020'
546 | }
547 | const site: Site = {
548 | title: '@llun story',
549 | description: '',
550 | entries: [entry1, entry2],
551 | generator: '',
552 | link: 'https://www.llun.me',
553 | updatedAt: Math.floor(Date.now() / 1000)
554 | }
555 | await createTables(db)
556 | await insertCategory(db, 'default')
557 | await insertCategory(db, 'Category1')
558 | await insertSite(db, 'default', site)
559 | await insertSite(db, 'default', {
560 | title: 'Other site',
561 | description: '',
562 | entries: [],
563 | generator: '',
564 | link: 'https://google.com',
565 | updatedAt: Math.floor(Date.now() / 1000)
566 | })
567 | await insertSite(db, 'Category1', {
568 | title: 'Other site2',
569 | description: '',
570 | entries: [],
571 | generator: '',
572 | link: 'https://youtube.com',
573 | updatedAt: Math.floor(Date.now() / 1000)
574 | })
575 | await insertEntry(db, hash(site.title), site.title, 'default', {
576 | author: 'llun',
577 | content: 'content3',
578 | date: Math.floor(Date.now() / 1000),
579 | link: 'https://www.llun.me/posts/2018-12-31-2018/',
580 | title: '2018'
581 | })
582 | await createOrUpdateDatabase(
583 | db,
584 | opml,
585 | async (title: string, url: string) => site
586 | )
587 | const categories = await getAllCategories(db)
588 | t.deepEqual(categories, ['default'])
589 | for (const category of categories) {
590 | const sites = await getCategorySites(db, category)
591 | t.deepEqual(sites, [
592 | {
593 | siteKey: hash(site.title),
594 | siteTitle: site.title,
595 | category: 'default'
596 | }
597 | ])
598 | for (const site of sites) {
599 | const entries = await getAllSiteEntries(db, site.siteKey)
600 | t.deepEqual(entries, [
601 | {
602 | entryKey: hash(`${entry2.title}${entry2.link}`),
603 | siteKey: site.siteKey,
604 | category: 'default'
605 | },
606 | {
607 | entryKey: hash(`${entry1.title}${entry1.link}`),
608 | siteKey: site.siteKey,
609 | category: 'default'
610 | }
611 | ])
612 | }
613 | }
614 | await db.destroy()
615 | })
616 |
--------------------------------------------------------------------------------
/action/feeds/database.ts:
--------------------------------------------------------------------------------
1 | import crypto from 'crypto'
2 | import fs, { constants } from 'fs'
3 | import { knex, Knex } from 'knex'
4 | import path from 'path'
5 |
6 | import { getWorkspacePath } from '../repository'
7 | import { OpmlCategory } from './opml'
8 | import type { Entry, Site } from './parsers'
9 |
10 | export const DATABASE_FILE = 'data.sqlite3'
11 |
12 | export function hash(input: string) {
13 | return crypto.createHash('sha256').update(input).digest('hex')
14 | }
15 |
16 | export function getDatabase(contentDirectory: string) {
17 | try {
18 | const stats = fs.statSync(contentDirectory)
19 | if (!stats.isDirectory()) {
20 | throw new Error(`${contentDirectory} is not a directory`)
21 | }
22 | } catch (error) {
23 | if (error.code !== 'ENOENT') {
24 | throw new Error(`Fail to access ${contentDirectory}`)
25 | }
26 | fs.mkdirSync(contentDirectory, { recursive: true })
27 | }
28 |
29 | const databasePath = path.join(contentDirectory, DATABASE_FILE)
30 | console.log('Database path', databasePath)
31 | return knex({
32 | client: 'sqlite3',
33 | connection: {
34 | filename: databasePath
35 | },
36 | useNullAsDefault: true
37 | })
38 | }
39 |
40 | export async function createTables(knex: Knex) {
41 | await knex.raw('PRAGMA foreign_keys = ON')
42 | if (!(await knex.schema.hasTable('SchemaVersions'))) {
43 | await knex.schema.dropTableIfExists('Entries')
44 | await knex.schema.dropTableIfExists('EntryCategories')
45 |
46 | if (!(await knex.schema.hasTable('SchemaVersions'))) {
47 | await knex.schema.createTable('SchemaVersions', (table) => {
48 | table.integer('timestamp')
49 | table.integer('version')
50 | })
51 | const now = Math.floor(Date.now() / 1000)
52 | await knex('SchemaVersions').insert({ timestamp: now, version: 1 })
53 | }
54 | }
55 |
56 | if (!(await knex.schema.hasTable('Categories'))) {
57 | await knex.schema.createTable('Categories', (table) => {
58 | table.string('name').primary()
59 | })
60 | }
61 |
62 | if (!(await knex.schema.hasTable('Sites'))) {
63 | await knex.schema.createTable('Sites', (table) => {
64 | table.string('key').primary()
65 | table.string('title').notNullable()
66 | table.string('url').nullable()
67 | table.string('description')
68 | table.integer('createdAt')
69 | })
70 | }
71 |
72 | if (!(await knex.schema.hasTable('SiteCategories'))) {
73 | await knex.schema.createTable('SiteCategories', (table) => {
74 | table.string('category').notNullable()
75 | table.string('siteKey').notNullable()
76 | table.string('siteTitle').notNullable()
77 | table.index(['category', 'siteKey'], 'site_category_idx')
78 | table
79 | .foreign('category')
80 | .references('Categories.name')
81 | .onDelete('cascade')
82 | table.foreign('siteKey').references('Sites.key').onDelete('cascade')
83 | })
84 | }
85 |
86 | if (!(await knex.schema.hasTable('Entries'))) {
87 | await knex.schema.createTable('Entries', (table) => {
88 | table.string('key').primary()
89 | table.string('siteKey').notNullable()
90 | table.string('siteTitle').notNullable()
91 | table.string('title').notNullable()
92 | table.string('url').notNullable()
93 | table.text('content').notNullable()
94 | table.integer('contentTime').nullable()
95 | table.integer('createdAt').notNullable()
96 | table.index(
97 | ['siteKey', 'contentTime', 'createdAt'],
98 | 'site_content_time_created_at_idx'
99 | )
100 | table.foreign('siteKey').references('Sites.key').onDelete('cascade')
101 | })
102 | }
103 |
104 | if (!(await knex.schema.hasTable('EntryCategories'))) {
105 | await knex.schema.createTable('EntryCategories', (table) => {
106 | table.string('category').notNullable()
107 | table.string('entryKey').notNullable()
108 | table.string('entryTitle').notNullable()
109 | table.string('siteKey').notNullable()
110 | table.string('siteTitle').notNullable()
111 | table.integer('entryContentTime').nullable()
112 | table.integer('entryCreatedAt').notNullable()
113 | table.index(
114 | ['category', 'siteKey', 'entryKey', 'entryContentTime'],
115 | 'category_siteKey_entryKey_entryContentTime_idx'
116 | )
117 | table.foreign('entryKey').references('Entries.key').onDelete('cascade')
118 | table.foreign('siteKey').references('Sites.key').onDelete('cascade')
119 | table
120 | .foreign('category')
121 | .references('Categories.name')
122 | .onDelete('cascade')
123 | })
124 | }
125 | }
126 |
127 | export async function insertCategory(knex: Knex, category: string) {
128 | try {
129 | await knex.transaction(async (trx) => {
130 | const record = await trx('Categories').where('name', category).first()
131 | if (record) return
132 | await trx('Categories').insert({ name: category })
133 | })
134 | } catch (error) {
135 | console.error(`Fail to insert ${category}`)
136 | }
137 | }
138 |
139 | export async function deleteCategory(knex: Knex, category: string) {
140 | const sites = (
141 | await knex('SiteCategories').select('siteKey').where('category', category)
142 | ).map((item) => item.siteKey)
143 |
144 | await knex('Categories').where('name', category).delete()
145 | const siteWithoutCategories = (await Promise.all(
146 | sites.map((siteKey) =>
147 | knex('SiteCategories')
148 | .where('siteKey', siteKey)
149 | .select(knex.raw(`'${siteKey}' as key`))
150 | .count('* as total')
151 | .first()
152 | )
153 | )) as { key: string; total: number }[]
154 | await Promise.all(
155 | siteWithoutCategories
156 | .filter((item) => item.total === 0)
157 | .map((item) => deleteSite(knex, item.key))
158 | )
159 | }
160 |
161 | export async function getAllCategories(knex: Knex): Promise {
162 | const categories = await knex('Categories').orderBy('name', 'asc')
163 | return categories.map((item) => item.name)
164 | }
165 |
166 | export async function isEntryExists(knex: Knex, entry: Entry) {
167 | const key = hash(`${entry.title}${entry.link}`)
168 | const count = await knex('Entries')
169 | .where('key', key)
170 | .count<{ total: number }>('* as total')
171 | .first()
172 | return count.total > 0
173 | }
174 |
175 | async function isSiteExists(knex: Knex, siteKey: string) {
176 | const count = await knex('Sites')
177 | .where('key', siteKey)
178 | .count<{ total: number }>('* as total')
179 | .first()
180 | return count.total > 0
181 | }
182 |
183 | async function isSiteCategoryExists(
184 | knex: Knex,
185 | category: string,
186 | siteKey: string
187 | ) {
188 | const count = await knex('SiteCategories')
189 | .where('category', category)
190 | .andWhere('siteKey', siteKey)
191 | .count<{ total: number }>('* as total')
192 | .first()
193 | return count.total > 0
194 | }
195 |
196 | async function isCategoryExists(knex: Knex, category: string) {
197 | const count = await knex('Categories')
198 | .where('name', category)
199 | .count<{ total: number }>('* as total')
200 | .first()
201 | return count.total > 0
202 | }
203 |
204 | export async function insertEntry(
205 | knex: Knex,
206 | siteKey: string,
207 | siteTitle: string,
208 | category: string,
209 | entry: Entry
210 | ) {
211 | if (!(await isSiteExists(knex, siteKey))) return
212 | if (!(await isCategoryExists(knex, category))) return
213 |
214 | const key = hash(`${entry.title}${entry.link}`)
215 | const createdTime = Math.floor(Date.now() / 1000)
216 | const contentTime =
217 | (entry.date && Math.floor(entry.date / 1000)) || createdTime
218 | if (!(await isEntryExists(knex, entry))) {
219 | await knex('Entries').insert({
220 | key,
221 | siteKey,
222 | siteTitle,
223 | title: entry.title,
224 | url: entry.link,
225 | content: entry.content,
226 | contentTime,
227 | createdAt: createdTime
228 | })
229 | }
230 | const isEntryCategoryExists = await knex('EntryCategories')
231 | .where('category', category)
232 | .andWhere('entryKey', key)
233 | .first()
234 | if (!isEntryCategoryExists) {
235 | await knex('EntryCategories').insert({
236 | category,
237 | entryKey: key,
238 | entryTitle: entry.title,
239 | siteKey,
240 | siteTitle,
241 | entryContentTime: contentTime,
242 | entryCreatedAt: createdTime
243 | })
244 | }
245 | return key
246 | }
247 |
248 | export async function deleteEntry(knex: Knex, entryKey: string) {
249 | const counter = await knex('Entries')
250 | .where('key', entryKey)
251 | .count('* as total')
252 | .first()
253 | if (counter.total === 0) return
254 |
255 | await knex('Entries').where('key', entryKey).delete()
256 | await knex('EntryCategories').where('entryKey', entryKey).delete()
257 | }
258 |
259 | export async function getAllSiteEntries(knex: Knex, siteKey: string) {
260 | const entries = await knex('EntryCategories')
261 | .select('entryKey', 'siteKey', 'category')
262 | .where('siteKey', siteKey)
263 | return entries as { entryKey: string; siteKey: string; category: string }[]
264 | }
265 |
266 | export async function insertSite(knex: Knex, category: string, site: Site) {
267 | try {
268 | const key = await knex.transaction(async (trx) => {
269 | const key = hash(site.title)
270 | const updatedAt = site.updatedAt || Date.now()
271 | if (!(await isCategoryExists(trx, category))) return null
272 | if (!(await isSiteExists(trx, key))) {
273 | await trx('Sites').insert({
274 | key,
275 | title: site.title,
276 | url: site.link || null,
277 | description: site.description || null,
278 | createdAt: Math.floor(updatedAt / 1000)
279 | })
280 | }
281 | if (!(await isSiteCategoryExists(trx, category, key))) {
282 | await trx('SiteCategories').insert({
283 | category,
284 | siteKey: key,
285 | siteTitle: site.title
286 | })
287 | }
288 | return key
289 | })
290 | return key
291 | } catch (error) {
292 | console.error(`Fail to insert site ${site.title}`)
293 | console.error(error.message)
294 | return null
295 | }
296 | }
297 |
298 | export async function deleteSiteCategory(
299 | knex: Knex,
300 | category: string,
301 | siteKey: string
302 | ) {
303 | await knex('SiteCategories')
304 | .where('category', category)
305 | .andWhere('siteKey', siteKey)
306 | .delete()
307 | await knex('EntryCategories')
308 | .where('category', category)
309 | .andWhere('siteKey', siteKey)
310 | .delete()
311 |
312 | const siteCategoryCount = await knex('SiteCategories')
313 | .where('siteKey', siteKey)
314 | .count<{ total: number }>('* as total')
315 | .first()
316 | if (siteCategoryCount.total > 0) return
317 | await knex('Sites').where('key', siteKey).delete()
318 | }
319 |
320 | export async function deleteSite(knex: Knex, siteKey: string) {
321 | await knex('Sites').where('key', siteKey).delete()
322 | }
323 |
324 | export async function getCategorySites(knex: Knex, category: string) {
325 | const sites = await knex('SiteCategories')
326 | .select('siteKey', 'siteTitle', 'category')
327 | .where('category', category)
328 | return sites as { siteKey: string; siteTitle: string; category: string }[]
329 | }
330 |
331 | export async function cleanup(knex: Knex) {
332 | await knex.raw('pragma journal_mode = delete')
333 | await knex.raw('pragma page_size = 4096')
334 | await knex.raw('vacuum')
335 | }
336 |
337 | export async function removeOldCategories(db: Knex, opml: OpmlCategory[]) {
338 | const existingCategories = await getAllCategories(db)
339 | const opmlCategories = opml.map((item) => item.category)
340 | const removedCategory = existingCategories.filter(
341 | (category) => !opmlCategories.includes(category)
342 | )
343 | await Promise.all(
344 | removedCategory.map((category) => deleteCategory(db, category))
345 | )
346 | }
347 |
348 | export async function removeOldSites(db: Knex, opmlCategory: OpmlCategory) {
349 | const existingSites = await getCategorySites(db, opmlCategory.category)
350 | const opmlSites = opmlCategory.items.map((item) => hash(`${item.title}`))
351 | const removedCategorySites = existingSites
352 | .map((item) => item.siteKey)
353 | .filter((key) => !opmlSites.includes(key))
354 | await Promise.all(
355 | removedCategorySites.map((siteKey) =>
356 | deleteSiteCategory(db, opmlCategory.category, siteKey)
357 | )
358 | )
359 | }
360 |
361 | export async function removeOldEntries(db: Knex, site: Site) {
362 | const existingEntries = await getAllSiteEntries(db, hash(site.title))
363 | const siteEntries = site.entries.map((item) =>
364 | hash(`${item.title}${item.link}`)
365 | )
366 | const removedEntries = existingEntries
367 | .map((item) => item.entryKey)
368 | .filter((key) => !siteEntries.includes(key))
369 | await Promise.all(removedEntries.map((key) => deleteEntry(db, key)))
370 | }
371 |
372 | export async function createOrUpdateDatabase(
373 | db: Knex,
374 | opmlCategories: OpmlCategory[],
375 | feedLoader: (title: string, url: string) => Promise
376 | ) {
377 | await removeOldCategories(db, opmlCategories)
378 | for (const category of opmlCategories) {
379 | const { category: categoryName, items } = category
380 | if (!items) continue
381 | await insertCategory(db, categoryName)
382 | await removeOldSites(db, category)
383 | for (const item of items) {
384 | const site = await feedLoader(item.title, item.xmlUrl)
385 | if (!site) {
386 | continue
387 | }
388 | console.log(`Load ${site.title}`)
389 | const siteKey = await insertSite(db, categoryName, site)
390 | await removeOldEntries(db, site)
391 | for (const entry of site.entries) {
392 | if (await isEntryExists(db, entry)) {
393 | continue
394 | }
395 | await insertEntry(db, siteKey, site.title, categoryName, entry)
396 | }
397 | }
398 | }
399 | }
400 |
401 | export async function copyExistingDatabase(publicPath: string) {
402 | const workSpace = getWorkspacePath()
403 | if (workSpace) {
404 | const existingDatabase = path.join(workSpace, DATABASE_FILE)
405 | const targetDatabase = path.join(publicPath, DATABASE_FILE)
406 | try {
407 | fs.statSync(existingDatabase)
408 | console.log(`Copying ${existingDatabase} to ${targetDatabase}`)
409 | fs.copyFileSync(existingDatabase, targetDatabase, constants.COPYFILE_EXCL)
410 | } catch (error) {
411 | // Fail to read old database, ignore it
412 | console.log('Skip copy old database because of error: ', error.message)
413 | }
414 | }
415 | }
416 |
--------------------------------------------------------------------------------
/action/feeds/file.test.ts:
--------------------------------------------------------------------------------
1 | import test from 'ava'
2 | import fs from 'fs/promises'
3 | import path from 'path'
4 | import sinon from 'sinon'
5 |
6 | import {
7 | createEntryData,
8 | createHash,
9 | createRepositoryData,
10 | prepareDirectories
11 | } from './file'
12 |
13 | function randomPaths() {
14 | const random = Math.floor(Math.random() * 1000)
15 | const rootPath = path.join('/tmp', random.toString())
16 | const dataPath = path.join(rootPath, 'data')
17 | const paths = /** @type {import('./data').Paths} */ {
18 | feedsContentPath: path.join(rootPath, 'contents'),
19 | dataPath: dataPath,
20 | categoryDataPath: path.join(dataPath, 'categories'),
21 | embeddedDataPath: path.join(dataPath, 'embedded'),
22 | entriesDataPath: path.join(dataPath, 'entries'),
23 | readabilityCachePath: path.join(dataPath, 'cached'),
24 | sitesDataPath: path.join(dataPath, 'sites'),
25 | repositoryDataPath: path.join(rootPath, 'github.json')
26 | }
27 | return paths
28 | }
29 |
30 | test('#createRepositoryData generate repository information in repository file', async (t) => {
31 | const paths = randomPaths()
32 | await fs.mkdir(paths.dataPath, { recursive: true })
33 | t.deepEqual(await createRepositoryData(paths, '', 'feeds.llun.dev'), {
34 | repository: ''
35 | })
36 | t.deepEqual(
37 | JSON.parse(await fs.readFile(paths.repositoryDataPath, 'utf-8')),
38 | { repository: '' }
39 | )
40 |
41 | t.deepEqual(
42 | await createRepositoryData(paths, 'octocat/Hello-World', 'feeds.llun.dev'),
43 | {
44 | repository: ''
45 | }
46 | )
47 | t.deepEqual(await createRepositoryData(paths, 'octocat/Hello-World', ''), {
48 | repository: '/Hello-World'
49 | })
50 | t.deepEqual(
51 | JSON.parse(await fs.readFile(paths.repositoryDataPath, 'utf-8')),
52 | { repository: '/Hello-World' }
53 | )
54 | })
55 |
56 | test('#createEntryData create entry hash and persist entry information in entry hash file', async (t) => {
57 | const paths = randomPaths()
58 | await fs.mkdir(paths.feedsContentPath, { recursive: true })
59 | await prepareDirectories(paths)
60 |
61 | const expected = {
62 | author: 'Site Author',
63 | content: 'Sample Content',
64 | date: sinon.match.number,
65 | link: 'https://llun.dev/',
66 | title: 'Sample Content',
67 | siteTitle: 'Sample Site',
68 | siteHash: '123456',
69 | entryHash: createHash('Sample Content,https://llun.dev/'),
70 | category: 'category1'
71 | }
72 | sinon.assert.match(
73 | await createEntryData(paths, 'category1', 'Sample Site', '123456', {
74 | author: 'Site Author',
75 | content: 'Sample Content',
76 | date: Date.now(),
77 | link: 'https://llun.dev/',
78 | title: 'Sample Content'
79 | }),
80 | expected
81 | )
82 | sinon.assert.match(
83 | JSON.parse(
84 | await fs.readFile(
85 | path.join(paths.entriesDataPath, `${expected.entryHash}.json`),
86 | 'utf-8'
87 | )
88 | ),
89 | expected
90 | )
91 | })
92 |
--------------------------------------------------------------------------------
/action/feeds/file.ts:
--------------------------------------------------------------------------------
1 | import crypto from 'crypto'
2 | import fs from 'fs/promises'
3 | import path from 'path'
4 | import { getGithubActionPath } from '../repository'
5 | import { loadFeed, readOpml } from './opml'
6 | import { Entry, Site } from './parsers'
7 |
8 | interface Paths {
9 | feedsContentPath: string
10 | categoryDataPath: string
11 | sitesDataPath: string
12 | entriesDataPath: string
13 | dataPath: string
14 | repositoryDataPath: string
15 | }
16 |
17 | interface RepositoryData {
18 | repository: string
19 | }
20 |
21 | interface SiteData {
22 | title: string
23 | link: string
24 | updatedAt: number
25 | siteHash: string
26 | totalEntries: number
27 | }
28 |
29 | interface CategoryData {
30 | name: string
31 | sites: SiteData[]
32 | totalEntries: number
33 | }
34 |
35 | interface SiteEntryData {
36 | title: string
37 | link: string
38 | date: number
39 | author: string
40 | category: string
41 | entryHash: string
42 | siteHash: string
43 | siteTitle: string
44 | }
45 |
46 | interface SiteDataWithEntries extends SiteData {
47 | entries: SiteEntryData[]
48 | }
49 |
50 | interface EntryData extends Entry {
51 | siteTitle: string
52 | siteHash: string
53 | entryHash: string
54 | category: string
55 | }
56 |
57 | export async function createCategoryDirectory(
58 | rootDirectory: string,
59 | category: string
60 | ) {
61 | try {
62 | const stats = await fs.stat(path.join(rootDirectory, category))
63 | if (!stats.isDirectory()) {
64 | throw new Error(
65 | `${path.join(rootDirectory, category)} is not a directory`
66 | )
67 | }
68 | } catch (error) {
69 | if (error.code !== 'ENOENT') {
70 | throw new Error(`Fail to access ${rootDirectory}`)
71 | }
72 | await fs.mkdir(path.join(rootDirectory, category), { recursive: true })
73 | }
74 | }
75 |
76 | export async function loadOPMLAndWriteFiles(
77 | contentDirectory: string,
78 | opmlPath: string
79 | ) {
80 | const opmlContent = (await fs.readFile(opmlPath)).toString('utf8')
81 | const opml = await readOpml(opmlContent)
82 | for (const category of opml) {
83 | const { category: title, items } = category
84 | await createCategoryDirectory(contentDirectory, title)
85 | if (!items) continue
86 | console.log(`Load category ${title}`)
87 | for (const item of items) {
88 | const feedData = await loadFeed(item.title, item.xmlUrl)
89 | if (!feedData) {
90 | continue
91 | }
92 | console.log(`Load ${feedData.title}`)
93 | const sha256 = crypto.createHash('sha256')
94 | sha256.update(feedData.title)
95 | const hexTitle = sha256.digest('hex')
96 | await fs.writeFile(
97 | path.join(contentDirectory, title, `${hexTitle}.json`),
98 | JSON.stringify(feedData)
99 | )
100 | }
101 | }
102 | }
103 |
104 | export const GITHUB_ACTION_PATH = getGithubActionPath()
105 | export const FEEDS_CONTENT_PATH = GITHUB_ACTION_PATH
106 | ? path.join(GITHUB_ACTION_PATH, 'contents')
107 | : path.join('contents')
108 | export const DATA_PATH = GITHUB_ACTION_PATH
109 | ? path.join(GITHUB_ACTION_PATH, 'public', 'data')
110 | : path.join('public', 'data')
111 | export const CATEGORY_DATA_PATH = path.join(DATA_PATH, 'categories')
112 | export const SITES_DATA_PATH = path.join(DATA_PATH, 'sites')
113 | export const ENTRIES_DATA_PATH = path.join(DATA_PATH, 'entries')
114 | export const REPOSITORY_DATA_PATH = path.join(DATA_PATH, 'github.json')
115 |
116 | export const DEFAULT_PATHS = {
117 | feedsContentPath: FEEDS_CONTENT_PATH,
118 | categoryDataPath: CATEGORY_DATA_PATH,
119 | sitesDataPath: SITES_DATA_PATH,
120 | entriesDataPath: ENTRIES_DATA_PATH,
121 | dataPath: DATA_PATH,
122 | repositoryDataPath: REPOSITORY_DATA_PATH
123 | }
124 |
125 | export async function prepareDirectories(paths: Paths) {
126 | const { feedsContentPath, categoryDataPath, sitesDataPath, entriesDataPath } =
127 | paths
128 | await fs.stat(feedsContentPath)
129 | await fs.mkdir(categoryDataPath, { recursive: true })
130 | await fs.mkdir(sitesDataPath, { recursive: true })
131 | await fs.mkdir(entriesDataPath, { recursive: true })
132 | }
133 |
134 | export function createHash(input: string) {
135 | const hash = crypto.createHash('sha256')
136 | hash.update(input)
137 | return hash.digest('hex')
138 | }
139 |
140 | export async function createRepositoryData(
141 | paths: Paths,
142 | githubRootName: string,
143 | customDomainName: string
144 | ) {
145 | const { repositoryDataPath } = paths
146 | const isCustomDomainEnabled = !!customDomainName
147 |
148 | const data: RepositoryData = {
149 | repository:
150 | (!isCustomDomainEnabled &&
151 | githubRootName.split('/').length > 1 &&
152 | `/${githubRootName.split('/')[1]}`) ||
153 | ''
154 | }
155 | await fs.writeFile(repositoryDataPath, JSON.stringify(data))
156 | return data
157 | }
158 |
159 | export async function createEntryData(
160 | paths: Paths,
161 | category: string,
162 | siteTitle: string,
163 | siteHash: string,
164 | entry: Entry
165 | ) {
166 | const { entriesDataPath } = paths
167 | const entryHash = createHash(`${entry.title},${entry.link}`)
168 | const data: EntryData = {
169 | ...entry,
170 | siteTitle,
171 | siteHash,
172 | entryHash,
173 | category
174 | }
175 | await fs.writeFile(
176 | path.join(entriesDataPath, `${entryHash}.json`),
177 | JSON.stringify(data)
178 | )
179 | return data
180 | }
181 |
182 | export async function createSitesData(
183 | paths: Paths,
184 | category: string,
185 | sites: string[]
186 | ) {
187 | const { feedsContentPath, sitesDataPath } = paths
188 | const result: SiteDataWithEntries[] = []
189 | for (const site of sites) {
190 | const content = await fs.readFile(
191 | path.join(feedsContentPath, category, site),
192 | 'utf-8'
193 | )
194 | const json: Site = JSON.parse(content)
195 | const siteHash = createHash(site.substring(0, site.length - '.json'.length))
196 |
197 | const entries = await Promise.all(
198 | json.entries.map(async (entry) => {
199 | const entryData = await createEntryData(
200 | paths,
201 | category,
202 | json.title,
203 | siteHash,
204 | entry
205 | )
206 | return {
207 | title: entryData.title,
208 | link: entryData.link,
209 | date: entryData.date,
210 | author: entryData.author,
211 | category,
212 | siteTitle: json.title,
213 | siteHash,
214 | entryHash: entryData.entryHash
215 | }
216 | })
217 | )
218 |
219 | const data: SiteDataWithEntries = {
220 | title: json.title,
221 | link: json.link,
222 | updatedAt: json.updatedAt,
223 | siteHash,
224 | entries: entries.sort((a, b) => b.date - a.date),
225 | totalEntries: entries.length
226 | }
227 | await fs.writeFile(
228 | path.join(sitesDataPath, `${siteHash}.json`),
229 | JSON.stringify(data)
230 | )
231 | result.push(data)
232 | }
233 | return result
234 | }
235 |
236 | export async function createAllEntriesData() {
237 | const entries = await fs.readdir(ENTRIES_DATA_PATH)
238 | const entriesData = (
239 | await Promise.all(
240 | entries.map(async (entryHashFile) => {
241 | const entry = await fs.readFile(
242 | path.join(ENTRIES_DATA_PATH, entryHashFile),
243 | 'utf-8'
244 | )
245 | try {
246 | const json: EntryData = JSON.parse(entry)
247 | const data: SiteEntryData = {
248 | title: json.title,
249 | link: json.link,
250 | date: json.date,
251 | author: json.author,
252 | siteTitle: json.siteTitle,
253 | siteHash: json.siteHash,
254 | entryHash: json.entryHash,
255 | category: json.category
256 | }
257 | return data
258 | } catch {
259 | return null
260 | }
261 | })
262 | )
263 | )
264 | .filter((item) => item)
265 | .sort((a, b) => b.date - a.date)
266 | const text = JSON.stringify(entriesData)
267 | await fs.writeFile(path.join(DATA_PATH, 'all.json'), text)
268 | }
269 |
270 | export async function createCategoryData(paths: Paths) {
271 | const { feedsContentPath, categoryDataPath, dataPath } = paths
272 | const categories = await fs.readdir(feedsContentPath)
273 | const categoriesData: CategoryData[] = []
274 | for (const category of categories) {
275 | const sites = await fs.readdir(path.join(feedsContentPath, category))
276 | const sitesData = await createSitesData(paths, category, sites)
277 | const totalCategoriesEntries = sitesData.reduce(
278 | (sum, item) => sum + item.entries.length,
279 | 0
280 | )
281 | const categoryData: CategoryData = {
282 | name: category,
283 | sites: sitesData.map((data) => ({
284 | title: data.title,
285 | link: data.link,
286 | updatedAt: data.updatedAt,
287 | siteHash: data.siteHash,
288 | totalEntries: data.entries.length
289 | })),
290 | totalEntries: totalCategoriesEntries
291 | }
292 | categoriesData.push(categoryData)
293 |
294 | const categoryEntries = sitesData.reduce(
295 | (entries, site) => [...entries, ...site.entries],
296 | [] as SiteEntryData[]
297 | )
298 | categoryEntries.sort((a, b) => b.date - a.date)
299 | await fs.writeFile(
300 | path.join(categoryDataPath, `${category}.json`),
301 | JSON.stringify(categoryEntries)
302 | )
303 | }
304 | const text = JSON.stringify(categoriesData)
305 | await fs.writeFile(path.join(dataPath, 'categories.json'), text)
306 | }
307 |
--------------------------------------------------------------------------------
/action/feeds/index.ts:
--------------------------------------------------------------------------------
1 | import * as core from '@actions/core'
2 | import fs from 'fs/promises'
3 | import path from 'path'
4 | import { getWorkspacePath } from '../repository'
5 | import {
6 | cleanup,
7 | copyExistingDatabase,
8 | createOrUpdateDatabase,
9 | createTables,
10 | getDatabase
11 | } from './database'
12 | import {
13 | DEFAULT_PATHS,
14 | createAllEntriesData,
15 | createCategoryData,
16 | createRepositoryData,
17 | loadOPMLAndWriteFiles,
18 | prepareDirectories
19 | } from './file'
20 | import { loadFeed, readOpml } from './opml'
21 |
22 | export async function createFeedDatabase(githubActionPath: string) {
23 | try {
24 | const storageType = core.getInput('storageType')
25 | // This feed site uses files
26 | if (storageType !== 'sqlite') return
27 | const feedsFile = core.getInput('opmlFile', { required: true })
28 | const opmlContent = (
29 | await fs.readFile(path.join(getWorkspacePath(), feedsFile))
30 | ).toString('utf8')
31 | const opml = await readOpml(opmlContent)
32 | const publicPath = githubActionPath
33 | ? path.join(githubActionPath, 'public')
34 | : 'public'
35 | await copyExistingDatabase(publicPath)
36 | const database = getDatabase(publicPath)
37 | await createTables(database)
38 | await createOrUpdateDatabase(database, opml, loadFeed)
39 | await cleanup(database)
40 | await database.destroy()
41 | } catch (error) {
42 | console.error(error.message)
43 | console.error(error.stack)
44 | core.setFailed(error)
45 | }
46 | }
47 |
48 | export async function createFeedFiles(githubActionPath: string) {
49 | try {
50 | const storageType = core.getInput('storageType')
51 | // This feed site uses database
52 | if (storageType === 'sqlite') return
53 | const feedsFile = core.getInput('opmlFile', { required: true })
54 | const publicPath = githubActionPath
55 | ? path.join(githubActionPath, 'contents')
56 | : path.join('contents')
57 | await loadOPMLAndWriteFiles(
58 | publicPath,
59 | path.join(getWorkspacePath(), feedsFile)
60 | )
61 | const customDomainName = core.getInput('customDomain')
62 | const githubRootName = process.env['GITHUB_REPOSITORY'] || ''
63 |
64 | await prepareDirectories(DEFAULT_PATHS)
65 | await createRepositoryData(DEFAULT_PATHS, githubRootName, customDomainName)
66 | await createCategoryData(DEFAULT_PATHS)
67 | await createAllEntriesData()
68 | } catch (error) {
69 | console.error(error.message)
70 | console.error(error.stack)
71 | core.setFailed(error)
72 | }
73 | }
74 |
--------------------------------------------------------------------------------
/action/feeds/opml.test.ts:
--------------------------------------------------------------------------------
1 | import test from 'ava'
2 | import fs from 'fs/promises'
3 | import { knex } from 'knex'
4 | import path from 'path'
5 | import sinon from 'sinon'
6 | import {
7 | createTables,
8 | getAllCategories,
9 | insertCategory,
10 | removeOldCategories
11 | } from './database'
12 | import { readOpml } from './opml'
13 |
14 | test('#readOpml returns categories and sites in OPML file', async (t) => {
15 | const data = (
16 | await fs.readFile(path.join(__dirname, 'stubs', 'opml.xml'))
17 | ).toString('utf-8')
18 | const feeds = await readOpml(data)
19 | sinon.assert.match(feeds, [
20 | { category: 'Category1', items: sinon.match.array },
21 | { category: 'Category2', items: sinon.match.array }
22 | ])
23 | sinon.assert.match(feeds[0].items[0], {
24 | type: 'rss',
25 | text: '@llun story',
26 | title: '@llun story',
27 | htmlUrl: 'https://www.llun.me/',
28 | xmlUrl: 'https://www.llun.me/feeds/main'
29 | })
30 | t.is(feeds[0].items.length, 1)
31 | t.is(feeds[1].items.length, 2)
32 | })
33 |
34 | test('#readOpml returns default category for flat opml', async (t) => {
35 | const data = (
36 | await fs.readFile(path.join(__dirname, 'stubs', 'opml.flat.xml'))
37 | ).toString('utf8')
38 | const feeds = await readOpml(data)
39 | sinon.assert.match(feeds, [{ category: 'default', items: sinon.match.array }])
40 | sinon.assert.match(feeds[0].items[0], {
41 | type: 'rss',
42 | text: '@llun story',
43 | title: '@llun story',
44 | htmlUrl: 'https://www.llun.me/',
45 | xmlUrl: 'https://www.llun.me/feeds/main'
46 | })
47 | t.is(feeds[0].items.length, 3)
48 | })
49 |
50 | test('#readOpml returns default category with feed under category for mixed opml', async (t) => {
51 | const data = (
52 | await fs.readFile(path.join(__dirname, 'stubs', 'opml.mixed.xml'))
53 | ).toString('utf8')
54 | const feeds = await readOpml(data)
55 | sinon.assert.match(feeds, [
56 | { category: 'default', items: sinon.match.array },
57 | { category: 'Category1', items: sinon.match.array }
58 | ])
59 | sinon.assert.match(feeds[1].items[0], {
60 | type: 'rss',
61 | text: '@llun story',
62 | title: '@llun story',
63 | htmlUrl: 'https://www.llun.me/',
64 | xmlUrl: 'https://www.llun.me/feeds/main'
65 | })
66 | t.is(feeds[0].items.length, 2)
67 | t.is(feeds[1].items.length, 1)
68 | })
69 |
70 | test('#readOpml ignore sub-category', async (t) => {
71 | const data = (
72 | await fs.readFile(path.join(__dirname, 'stubs', 'opml.subcategory.xml'))
73 | ).toString('utf8')
74 | const feeds = await readOpml(data)
75 | sinon.assert.match(feeds, [
76 | { category: 'default', items: sinon.match.array },
77 | { category: 'Category1', items: sinon.match.array }
78 | ])
79 | sinon.assert.match(feeds[1].items[0], {
80 | type: 'rss',
81 | text: '@llun story',
82 | title: '@llun story',
83 | htmlUrl: 'https://www.llun.me/',
84 | xmlUrl: 'https://www.llun.me/feeds/main'
85 | })
86 | t.is(feeds[0].items.length, 2)
87 | t.is(feeds[1].items.length, 1)
88 | })
89 |
90 | test('#removeOldCategories do nothing for category exists in opml', async (t) => {
91 | const db = knex({
92 | client: 'sqlite3',
93 | connection: ':memory:',
94 | useNullAsDefault: true
95 | })
96 | await createTables(db)
97 | await insertCategory(db, 'Category1')
98 | await insertCategory(db, 'Category2')
99 |
100 | const data = (
101 | await fs.readFile(path.join(__dirname, 'stubs', 'opml.xml'))
102 | ).toString('utf8')
103 | const opml = await readOpml(data)
104 | await removeOldCategories(db, opml)
105 |
106 | const categories = await getAllCategories(db)
107 | t.deepEqual(categories, ['Category1', 'Category2'])
108 | await db.destroy()
109 | })
110 |
111 | test('#removeOldCategories delete category not exists in opml', async (t) => {
112 | const db = knex({
113 | client: 'sqlite3',
114 | connection: ':memory:',
115 | useNullAsDefault: true
116 | })
117 | await createTables(db)
118 | await insertCategory(db, 'Category1')
119 | await insertCategory(db, 'Category2')
120 | await insertCategory(db, 'Category3')
121 |
122 | const data = (
123 | await fs.readFile(path.join(__dirname, 'stubs', 'opml.xml'))
124 | ).toString('utf8')
125 | const opml = await readOpml(data)
126 | await removeOldCategories(db, opml)
127 | const categories = await getAllCategories(db)
128 | t.deepEqual(categories, ['Category1', 'Category2'])
129 | await db.destroy()
130 | })
131 |
--------------------------------------------------------------------------------
/action/feeds/opml.ts:
--------------------------------------------------------------------------------
1 | import { parseAtom, parseRss, parseXML } from './parsers'
2 |
3 | export async function loadFeed(title: string, url: string) {
4 | try {
5 | const response = await fetch(url, {
6 | headers: { 'User-Agent': 'llun/feed' }
7 | })
8 | const text = await response.text()
9 | const xml = await parseXML(text)
10 | if (!('rss' in xml || 'feed' in xml)) {
11 | return null
12 | }
13 |
14 | const site = 'rss' in xml ? parseRss(title, xml) : parseAtom(title, xml)
15 | return site
16 | } catch (error) {
17 | console.error(
18 | `Fail to load - ${title} (${url}) because of ${error.message}`
19 | )
20 | return null
21 | }
22 | }
23 |
24 | export interface OpmlItem {
25 | type: string
26 | text: string
27 | title: string
28 | xmlUrl: string
29 | htmlUrl: string
30 | }
31 | export interface OpmlCategory {
32 | category: string
33 | items: OpmlItem[]
34 | }
35 |
36 | export async function readOpml(opmlContent: string): Promise {
37 | const input = await parseXML(opmlContent)
38 | const body = input.opml.body
39 | const outlines = body[0].outline
40 |
41 | const rootSubscriptions = outlines
42 | .filter((item: any) => item.$.type === 'rss')
43 | .map((item: any) => item.$)
44 | const categories = outlines
45 | .filter((item: any) => item.$.type !== 'rss')
46 | .reduce((out: OpmlCategory[], outline: any) => {
47 | const category = outline.$.title
48 | const items = outline.outline
49 | out.push({
50 | category,
51 | items:
52 | items &&
53 | items
54 | .map((item: any) => item.$)
55 | .filter((item: any) => item.type === 'rss')
56 | })
57 | return out
58 | }, [])
59 | const output: OpmlCategory[] = []
60 | if (rootSubscriptions.length > 0) {
61 | output.push({
62 | category: 'default',
63 | items: rootSubscriptions
64 | })
65 | }
66 | output.push(...categories)
67 | return output
68 | }
69 |
--------------------------------------------------------------------------------
/action/feeds/parsers#parseAtom.test.ts:
--------------------------------------------------------------------------------
1 | import test from 'ava'
2 | import fs from 'fs'
3 | import path from 'path'
4 | import sinon from 'sinon'
5 | import { parseAtom, parseXML } from './parsers'
6 |
7 | test('#parseAtom returns site information with empty string for fields that does not have information', async (t) => {
8 | const data = fs
9 | .readFileSync(path.join(__dirname, 'stubs', 'atom1.xml'))
10 | .toString('utf8')
11 | const xml = await parseXML(data)
12 | const site = parseAtom('llun site', xml)
13 |
14 | t.is(site?.entries.length, 2)
15 | sinon.assert.match(site, {
16 | title: 'llun site',
17 | description: 'Life, Ride and Code',
18 | link: 'https://www.llun.me/',
19 | updatedAt: new Date('2021-02-16T00:00:00Z').getTime(),
20 | generator: '',
21 | entries: sinon.match.array
22 | })
23 | sinon.assert.match(site?.entries, [
24 | {
25 | title: '2020',
26 | link: 'https://www.llun.me/posts/2020-12-31-2020/',
27 | date: new Date('2020-12-31T00:00:00Z').getTime(),
28 | content: sinon.match.string,
29 | author: 'Maythee Anegboonlap'
30 | },
31 | {
32 | title: 'Festive500',
33 | link: 'https://www.llun.me/posts/ride/2021-01-01-festive-500/',
34 | date: new Date('2021-01-01T00:00:00Z').getTime(),
35 | content: sinon.match.string,
36 | author: 'Maythee Anegboonlap'
37 | }
38 | ])
39 | })
40 |
41 | test('#parseAtom uses summary when entry does not have content', async (t) => {
42 | const data = fs
43 | .readFileSync(path.join(__dirname, 'stubs', 'atom2.xml'))
44 | .toString('utf8')
45 | const xml = await parseXML(data)
46 | const site = parseAtom('cheeaun blog', xml)
47 |
48 | t.is(site?.entries.length, 5)
49 | sinon.assert.match(site, {
50 | title: 'cheeaun blog',
51 | description: '',
52 | link: 'https://cheeaun.com/blog',
53 | updatedAt: new Date('2020-12-31T00:00:00Z').getTime(),
54 | generator: '',
55 | entries: sinon.match.array
56 | })
57 | sinon.assert.match(site?.entries[0], {
58 | title: '2020 in review',
59 | link: 'https://cheeaun.com/blog/2020/12/2020-in-review/',
60 | date: new Date('2020-12-31T00:00:00Z').getTime(),
61 | content:
62 | 'Alright, let’s do this. On January, I received my State of JS t-shirt. 👕 On February, I physically attended JavaScript Bangkok. 🎟 On March,…',
63 | author: 'Lim Chee Aun'
64 | })
65 | })
66 |
--------------------------------------------------------------------------------
/action/feeds/parsers#parseRss.test.ts:
--------------------------------------------------------------------------------
1 | import test from 'ava'
2 | import fs from 'fs'
3 | import path from 'path'
4 | import sinon from 'sinon'
5 | import { parseRss, parseXML } from './parsers'
6 |
7 | test('#parseAtom returns site information with empty string for fields that does not have information', async (t) => {
8 | const data = fs
9 | .readFileSync(path.join(__dirname, 'stubs', 'rss1.xml'))
10 | .toString('utf8')
11 | const xml = await parseXML(data)
12 | const site = parseRss('icez blog', xml)
13 | const firstEntry = xml.rss.channel[0].item[0]
14 | t.is(site?.entries.length, 10)
15 | sinon.assert.match(site, {
16 | title: 'icez blog',
17 | description: 'Technical Blog by icez network',
18 | link: 'https://www.icez.net/blog',
19 | updatedAt: new Date('2021-02-08T10:05:50Z').getTime(),
20 | generator: 'https://wordpress.org/?v=5.3.6',
21 | entries: sinon.match.array
22 | })
23 | sinon.assert.match(site?.entries[0], {
24 | title: firstEntry.title.join('').trim(),
25 | link: firstEntry.link.join('').trim(),
26 | date: new Date('2021-02-08T10:05:48Z').getTime(),
27 | content: firstEntry['content:encoded'].join('').trim(),
28 | author: 'icez'
29 | })
30 | })
31 |
--------------------------------------------------------------------------------
/action/feeds/parsers.ts:
--------------------------------------------------------------------------------
1 | import { parseString } from 'xml2js'
2 | import sanitizeHtml from 'sanitize-html'
3 |
4 | export interface Entry {
5 | title: string
6 | link: string
7 | date: number
8 | content: string
9 | author: string
10 | }
11 |
12 | export interface Site {
13 | title: string
14 | link: string
15 | description: string
16 | updatedAt: number
17 | generator: string
18 | entries: Entry[]
19 | }
20 |
21 | type Values = string[] | { _: string; $: { type: 'text' } }[] | null
22 |
23 | function joinValuesOrEmptyString(values: Values) {
24 | if (values && values.length > 0 && typeof values[0] !== 'string') {
25 | return values[0]._
26 | }
27 | return (values && values.join('').trim()) || ''
28 | }
29 |
30 | export async function parseXML(data: string): Promise {
31 | const xml = await new Promise((resolve, reject) =>
32 | parseString(data, (error, result) => {
33 | if (error) return reject(error)
34 | resolve(result)
35 | })
36 | )
37 | return xml
38 | }
39 |
40 | export function parseRss(feedTitle: string, xml: any): Site {
41 | if (!xml.rss) return null
42 | const { channel: channels } = xml.rss
43 | const {
44 | link,
45 | description,
46 | lastBuildDate,
47 | generator,
48 | item: items
49 | } = channels[0]
50 | const feed = {
51 | title: feedTitle,
52 | link: joinValuesOrEmptyString(link),
53 | description: joinValuesOrEmptyString(description),
54 | updatedAt: new Date(
55 | joinValuesOrEmptyString(lastBuildDate || channels[0]['dc:date'])
56 | ).getTime(),
57 | generator: joinValuesOrEmptyString(generator || channels[0]['dc:creator']),
58 | entries:
59 | (items &&
60 | items.map((item) => {
61 | const { title, link, pubDate, description } = item
62 | return {
63 | title: joinValuesOrEmptyString(title).trim(),
64 | link: joinValuesOrEmptyString(link),
65 | date: new Date(
66 | joinValuesOrEmptyString(pubDate || item['dc:date'])
67 | ).getTime(),
68 | content: sanitizeHtml(
69 | joinValuesOrEmptyString(item['content:encoded'] || description),
70 | {
71 | allowedTags: sanitizeHtml.defaults.allowedTags.concat(['img'])
72 | }
73 | ),
74 | author: joinValuesOrEmptyString(item['dc:creator'])
75 | }
76 | })) ||
77 | []
78 | }
79 |
80 | return feed
81 | }
82 |
83 | export function parseAtom(feedTitle: string, xml: any): Site {
84 | if (!xml.feed) return null
85 | const { title, subtitle, link, updated, generator, entry, author } = xml.feed
86 | const siteLink = link && link.find((item) => item.$.rel === 'alternate')
87 | const siteAuthor = (author && joinValuesOrEmptyString(author[0].name)) || ''
88 | const feed = {
89 | title: feedTitle,
90 | description: joinValuesOrEmptyString(subtitle),
91 | link: siteLink && siteLink.$.href,
92 | updatedAt: new Date(joinValuesOrEmptyString(updated)).getTime(),
93 | generator: joinValuesOrEmptyString(generator),
94 | entries: entry.map((item) => {
95 | const { title, link, published, updated, content, author, summary } = item
96 | const itemLink =
97 | link && (link.find((item) => item.$.rel === 'alternate') || link[0])
98 | const feedContent = content ? content[0]._ : summary ? summary[0]._ : ''
99 | return {
100 | title: joinValuesOrEmptyString(title).trim(),
101 | link: itemLink.$.href,
102 | date: new Date(joinValuesOrEmptyString(published || updated)).getTime(),
103 | content: sanitizeHtml(feedContent, {
104 | allowedTags: sanitizeHtml.defaults.allowedTags.concat(['img'])
105 | }),
106 | author:
107 | (author && joinValuesOrEmptyString(author[0].name)) || siteAuthor
108 | }
109 | })
110 | }
111 |
112 | return feed
113 | }
114 |
--------------------------------------------------------------------------------
/action/feeds/stubs/atom1.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | @llun story
4 | Life, Ride and Code
5 | https://www.llun.me/img/favicon-32x32.png
6 |
7 |
8 | Copyright © 2021, Maythee Anegboonlap
9 | 2021-02-16T00:00:00Z
10 | https://www.llun.me/feeds/main
11 |
12 | Maythee Anegboonlap
13 | contact@llun.me
14 |
15 |
16 |
17 | 2020
18 |
19 | 2020-12-31T00:00:00Z
20 | 2020-12-31T00:00:00Z
21 | https://www.llun.me/posts/2020-12-31-2020/
22 |
23 | Content
25 | ]]>
26 |
27 |
28 |
29 |
30 | Festive500
31 |
32 | 2021-01-01T00:00:00Z
33 | 2021-01-05T00:00:00Z
34 | https://www.llun.me/posts/ride/2021-01-01-festive-500/
35 |
36 | Content 2
38 |
39 | Send a comment
40 |
41 | ]]>
42 |
43 |
44 |
45 |
--------------------------------------------------------------------------------
/action/feeds/stubs/atom2.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | cheeaunblog
4 | tag:cheeaun.com,2011:blog
5 |
6 |
7 | 2020-12-31T00:00:00Z
8 |
9 | Lim Chee Aun
10 |
11 |
12 | https://cheeaun.com/blog/2020/12/2020-in-review/
13 | 2020-12-31T00:00:00Z
14 | 2020 in review
15 | Alright, let’s do this. On January, I received my State of JS t-shirt. 👕 On February, I physically attended JavaScript Bangkok. 🎟 On March,…
16 |
17 |
18 |
19 | https://cheeaun.com/blog/2020/01/2019-in-review/
20 | 2020-01-05T00:00:00Z
21 | 2019 in review
22 | As per my previous year-in-reviews (2018, 2017, 2016, 2015, 2014), I always start with the little things, now properly sectioned by month, as…
23 |
24 |
25 |
26 | https://cheeaun.com/blog/2019/07/next-level-visualizations-exploretrees-sg/
27 | 2019-07-10T00:00:00Z
28 | Next-level visualizations with ExploreTrees.SG
29 | Last year, I wrote about one of my most ambitious side project ever, ExploreTrees.SG. It was simply breath-taking.Revisiting the masterpieceI…
30 |
31 |
32 |
33 | https://cheeaun.com/blog/2019/06/what-i-learned-from-printing-custom-swags/
34 | 2019-06-02T00:00:00Z
35 | What I learned from printing custom swags
36 | Apparently, I’ve been called the “swag king” of Singapore 😅, mainly due to my voluntary work on printing swags like stickers and t-shirts for…
37 |
38 |
39 |
40 | https://cheeaun.com/blog/2019/02/building-busrouter-sg/
41 | 2019-02-04T00:00:00Z
42 | Building BusRouter SG
43 | In 2016, I wrote on Building side projects: More than 10 years ago, I used to take buses all the time. At first, it's quite exhilarating because…
44 |
45 |
46 |
47 |
--------------------------------------------------------------------------------
/action/feeds/stubs/opml.flat.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Feeds
5 |
6 |
7 |
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/action/feeds/stubs/opml.mixed.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Feeds
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/action/feeds/stubs/opml.single.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Feeds
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/action/feeds/stubs/opml.subcategory.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Feeds
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
--------------------------------------------------------------------------------
/action/feeds/stubs/opml.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Feeds
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
--------------------------------------------------------------------------------
/action/feeds/stubs/rss1.xml:
--------------------------------------------------------------------------------
1 |
2 |
9 |
10 |
11 | icez network
12 |
13 | https://www.icez.net/blog
14 | Technical Blog by icez network
15 | Mon, 08 Feb 2021 10:05:50 +0000
16 | en-US
17 |
18 | hourly
19 |
20 | 1
21 | https://wordpress.org/?v=5.3.6
22 | -
23 | 0249 | rsyslog log remote host to separate file
24 | https://www.icez.net/blog/167459/rsyslog-log-remote-host-to-separate-file
25 | https://www.icez.net/blog/167459/rsyslog-log-remote-host-to-separate-file#respond
26 | Mon, 08 Feb 2021 10:05:48 +0000
27 |
28 |
29 |
30 | https://www.icez.net/blog/?p=167459
31 |
32 | enter these in /etc/rsyslog.d/udp.conf
33 |
$ModLoad imudp
34 | $UDPServerRun 514
35 |
36 |
37 | $FileOwner root
38 | $FileGroup adm
39 | $FileCreateMode 0640
40 | $DirCreateMode 0755
41 | $Umask 0022
42 |
43 | $template RemoteHost,"/var/log/remote/%fromhost-ip%/%$YEAR%/%$MONTH%-%$DAY%.log"
44 |
45 | if $fromhost-ip != '127.0.0.1' then ?RemoteHost
46 | & stop
47 |
48 | ]]>
49 | https://www.icez.net/blog/167459/rsyslog-log-remote-host-to-separate-file/feed
50 | 0
51 |
52 | -
53 | 0248 | vscode remote ssh บน windows
54 | https://www.icez.net/blog/167450/0248-vscode-remote-ssh-on-windows
55 | https://www.icez.net/blog/167450/0248-vscode-remote-ssh-on-windows#respond
56 | Mon, 01 Feb 2021 05:31:58 +0000
57 |
58 |
59 |
60 | https://www.icez.net/blog/?p=167450
61 |
62 | ไม่รู้จะทำให้ชีวิตยุ่งยากไปทำไม ใช้ code-server ง่ายกว่าเยอะเลย แต่ถ้าอยากทำก็…
63 |
สำหรับคนที่ใช้ putty / pageant ในการ ssh อยู่แล้ว และไม่อยากจัดการ key ให้วุ่นวาย
64 |
65 | - ติดตั้ง vscode (แหงล่ะ)
66 | - ติดตั้ง openssh บน windows (กด start > พิมพ์คำว่า optional feature > เลือก เมนู Add an optional features > เลือก OpenSSH Client กด install)
67 |
68 | เชื่อม ssh-agent กับ pageant
69 |
70 | - โหลด https://github.com/benpye/wsl-ssh-pageant มาลง (หน้า release > ไฟล์ wsl-ssh-pageant-amd64-gui.exe)
71 | - เปิด windows explorer คลิกขวาที่ไฟล์ wsl-ssh-pageant-amd64-gui.exe กด create shortcut คลิก
72 | - คลิกขวาไฟล์ shortcut ที่เพิ่งสร้าง กด properties ช่อง target เติม
--winssh ssh-pageant
ต่อท้ายไป (เว้นวรรคก่อนเติมด้วย)
73 | - double click ตัว shortcut ที่รันมาเมื่อกี้ + จับย้ายไปใน startup ได้เลย
74 | - กด start > พิมพ์คำว่า environment เลือก > Edit the system environment > แล้วกดตามนี้
75 | 
76 |
77 | ทดสอบการ ssh
78 |
79 | - เปิด powershell
80 | - สั่ง
ssh-add -l
ถ้าระบบทำงานถูกควรจะมี key ที่เรา add ไว้ใน pageant โผล่ขึ้นมา
81 |
82 | ใช้งาน
83 | ใน vscode ติดตั้ง remote ssh extension แล้วกด connect เข้า server ได้เลย
84 | ]]>
85 | https://www.icez.net/blog/167450/0248-vscode-remote-ssh-on-windows/feed
86 | 0
87 |
88 | -
89 | 0247 | internal linux traffic flow
90 | https://www.icez.net/blog/167435/0247-internal-linux-traffic-flow
91 | https://www.icez.net/blog/167435/0247-internal-linux-traffic-flow#respond
92 | Mon, 01 Jun 2020 10:39:11 +0000
93 |
94 |
95 |
96 | https://www.icez.net/blog/?p=167435
97 |
98 | diagram จาก wikipedia
99 |

100 | action log จาก linux box จริงๆ
101 | Jun 14 13:02:12 deb8 kernel: [ 4273.341087] simple: tc[eth1]ingress_1
102 | Jun 14 13:02:12 deb8 kernel: [ 4273.341114] simple: tc[ifb1]egress_1
103 | Jun 14 13:02:12 deb8 kernel: [ 4273.341229] ipt[PREROUTING]raw IN=eth1 OUT= MAC=08:00:27:ee:8f:15:08:00:27:89:16:5b:08:00 SRC=10.1.1.3 DST=10.1.1.2 LEN=84 TOS=0x00 PREC=0x00 TTL=64 ID=53979 DF PROTO=ICMP TYPE=8 CODE=0 ID=1382 SEQ=1
104 | Jun 14 13:02:12 deb8 kernel: [ 4273.341238] ipt[PREROUTING]mangle IN=eth1 OUT= MAC=08:00:27:ee:8f:15:08:00:27:89:16:5b:08:00 SRC=10.1.1.3 DST=10.1.1.2 LEN=84 TOS=0x00 PREC=0x00 TTL=64 ID=53979 DF PROTO=ICMP TYPE=8 CODE=0 ID=1382 SEQ=1
105 | Jun 14 13:02:12 deb8 kernel: [ 4273.341242] ipt[PREROUTING]nat IN=eth1 OUT= MAC=08:00:27:ee:8f:15:08:00:27:89:16:5b:08:00 SRC=10.1.1.3 DST=10.1.1.2 LEN=84 TOS=0x00 PREC=0x00 TTL=64 ID=53979 DF PROTO=ICMP TYPE=8 CODE=0 ID=1382 SEQ=1
106 | Jun 14 13:02:12 deb8 kernel: [ 4273.341249] ipt[INPUT]mangle IN=eth1 OUT= MAC=08:00:27:ee:8f:15:08:00:27:89:16:5b:08:00 SRC=10.1.1.3 DST=10.1.1.2 LEN=84 TOS=0x00 PREC=0x00 TTL=64 ID=53979 DF PROTO=ICMP TYPE=8 CODE=0 ID=1382 SEQ=1
107 | Jun 14 13:02:12 deb8 kernel: [ 4273.341252] ipt[INPUT]filter IN=eth1 OUT= MAC=08:00:27:ee:8f:15:08:00:27:89:16:5b:08:00 SRC=10.1.1.3 DST=10.1.1.2 LEN=84 TOS=0x00 PREC=0x00 TTL=64 ID=53979 DF PROTO=ICMP TYPE=8 CODE=0 ID=1382 SEQ=1
108 | Jun 14 13:02:12 deb8 kernel: [ 4273.341255] ipt[INPUT]nat IN=eth1 OUT= MAC=08:00:27:ee:8f:15:08:00:27:89:16:5b:08:00 SRC=10.1.1.3 DST=10.1.1.2 LEN=84 TOS=0x00 PREC=0x00 TTL=64 ID=53979 DF PROTO=ICMP TYPE=8 CODE=0 ID=1382 SEQ=1
109 | Jun 14 13:02:12 deb8 kernel: [ 4273.341267] ipt[OUTPUT]raw IN= OUT=eth1 SRC=10.1.1.2 DST=10.1.1.3 LEN=84 TOS=0x00 PREC=0x00 TTL=64 ID=37735 PROTO=ICMP TYPE=0 CODE=0 ID=1382 SEQ=1
110 | Jun 14 13:02:12 deb8 kernel: [ 4273.341270] ipt[OUTPUT]mangle IN= OUT=eth1 SRC=10.1.1.2 DST=10.1.1.3 LEN=84 TOS=0x00 PREC=0x00 TTL=64 ID=37735 PROTO=ICMP TYPE=0 CODE=0 ID=1382 SEQ=1
111 | Jun 14 13:02:12 deb8 kernel: [ 4273.341272] ipt[OUTPUT]filter IN= OUT=eth1 SRC=10.1.1.2 DST=10.1.1.3 LEN=84 TOS=0x00 PREC=0x00 TTL=64 ID=37735 PROTO=ICMP TYPE=0 CODE=0 ID=1382 SEQ=1
112 | Jun 14 13:02:12 deb8 kernel: [ 4273.341274] ipt[POSTROUTING]mangle IN= OUT=eth1 SRC=10.1.1.2 DST=10.1.1.3 LEN=84 TOS=0x00 PREC=0x00 TTL=64 ID=37735 PROTO=ICMP TYPE=0 CODE=0 ID=1382 SEQ=1
113 | Jun 14 13:02:12 deb8 kernel: [ 4273.341278] simple: tc[eth1]egress_1
114 | Jun 14 13:02:12 deb8 kernel: [ 4273.341280] simple: tc[ifb0]egress_1
115 | source: https://unix.stackexchange.com/questions/288959/how-is-the-ifb-device-positioned-in-the-packet-flow-of-the-linux-kernel
116 | ]]>
117 | https://www.icez.net/blog/167435/0247-internal-linux-traffic-flow/feed
118 | 0
119 |
120 | -
121 | 0246 | tcpdump filter multiple vlans
122 | https://www.icez.net/blog/167424/tcpdump-filter-multiple-vlans
123 | https://www.icez.net/blog/167424/tcpdump-filter-multiple-vlans#respond
124 | Sat, 09 May 2020 11:59:58 +0000
125 |
126 |
127 |
128 | https://www.icez.net/blog/?p=167424
129 |
130 |
tcpdump "vlan and (ether[14:2]&0x0fff=2001 or ether[14:2]&0x0fff=2002 or ... or ether[14:2]&0x0fff=2026)"
131 | ref: https://github.com/the-tcpdump-group/libpcap/issues/815
132 | ]]>
133 | https://www.icez.net/blog/167424/tcpdump-filter-multiple-vlans/feed
134 | 0
135 |
136 | -
137 | 0245 | เพจของโครงการก่อสร้างมอเตอร์เวย์ 81 (บางใหญ่ – กาญจนบุรี)
138 | https://www.icez.net/blog/167413/0245-motorway-81-construction-project-pages
139 | https://www.icez.net/blog/167413/0245-motorway-81-construction-project-pages#respond
140 | Sat, 02 May 2020 15:47:41 +0000
141 |
142 |
143 |
144 | https://www.icez.net/blog/?p=167413
145 |
146 | เท่าที่หามาได้ เป็นทางการมั่งไม่เป็นทางการมั่ง
147 |
ตอน 1
148 | https://www.facebook.com/pages/โครงการก่อสร้างมอเตอร์เวย์ทางหลวงระหว่างเมือง-บางใหญ่-กาญจนบุรี-ตอนที่-1/107302320116961
149 | ตอน 1-2
150 | https://www.facebook.com/Ekkabhob/
151 | ตอน 4
152 | https://www.facebook.com/pages/โครงการมอเตอร์เวย์-บางใหญ่-กาญจนบุรี-ตอน4/157102398388620
153 | ตอน 5
154 | https://www.facebook.com/โครงการก่อสร้างทางหลวงพิเศษระหว่างเมือง-สายบางใหญ่-กาญจนบุรี-ตอน5-224749621363734/
155 | https://www.facebook.com/pages/โครงการก่อสร้างทางหลวงพิเศษระหว่างเมือง-สายบางใหญ่-กาญจนบุรี-ตอน-5/168752320719738
156 | ตอน 4-5???
157 | https://www.facebook.com/pages/โครงการ-ทล-81-มอเตอร์เวย์บางใหญ่-กาญจนบุรี/350480208639859
158 | ตอน 6
159 | https://www.facebook.com/pages/สำนักงานควบคุมโครงการ-บางใหญ่-กาญจนบุรี-ตอน-6/1641126339529425
160 | ตอน 7
161 | https://www.facebook.com/pages/โครงการมอเตอร์เวย์บางใหญ่-เมืองกาญตอน7/287834341714815
162 | ตอน 10
163 | https://www.facebook.com/pages/โครงการก่อสร้างทางหลวงพิเศษระหว่างเมือง-บางใหญ่-กาญฯ-ตอน-10/174132526691697
164 | ตอน 12
165 | https://www.facebook.com/โครงการฯ-สายบางใหญ่-กาญจนบุรี-ตอน-12-ระหว่าง-กม-38-ถึง-44-638705109642274/
166 | https://www.facebook.com/pages/โครงการฯ-สายบางใหญ่-กาญจนบุรี-ตอน-12/437367396628493
167 | ตอน 14
168 | https://www.facebook.com/โครงการฯ-สาย-บางใหญ่-กาญจนบุรี-ตอน-14-1540622262634643/
169 | https://www.facebook.com/pages/โครงการ-บางใหญ่-กาญจนบุรี-ตอนที่14/378147885891944
170 | ตอน 15
171 | https://www.facebook.com/โครงการก่อสร้างทางหลวงพิเศษระหว่างเมือง-สาย-บางใหญ่-กาญจนบุรี-ตอน15-1834672776842710/
172 | ตอน 17
173 | https://www.facebook.com/มอเตอร์เวย์-บางใหญ่-กาญจนบุรี-ตอน-17-1872042599675138/
174 | ตอน 20
175 | https://www.facebook.com/pages/โครงการก่อสร้างทางหลวงพิเศษระหว่างเมือง-สายบางใหญ่-กาญจนบุรี-ตอน-20/843291149146834
176 | ตอน 21
177 | https://www.facebook.com/pages/สำนักงานโครงการทางหลวง-สายบางใหญ่-กาญจนบุรี-ตอนที่-21/538736306478037
178 | ตอน 22
179 | https://www.facebook.com/pages/โครงการทางด่วนมอเตอร์เวย์-สาย81-บางใหญ่-กาญจนบุรี-ตอนที่-22/257410621664594
180 | ตอน 23
181 | https://www.facebook.com/bangyaikanchanaburi23/
182 | ]]>
183 | https://www.icez.net/blog/167413/0245-motorway-81-construction-project-pages/feed
184 | 0
185 |
186 | -
187 | 0244 | ใช้ GPU intel transcode video ด้วย ffmpeg
188 | https://www.icez.net/blog/167400/ffmpeg-transcode-video-intel
189 | https://www.icez.net/blog/167400/ffmpeg-transcode-video-intel#respond
190 | Wed, 25 Mar 2020 10:53:26 +0000
191 |
192 |
193 |
194 | https://www.icez.net/blog/?p=167400
195 |
196 | API ใหม่ๆ ของ ffmpeg จะทำงานบน vaapi (video accelerator api) เพราะงั้นเวลา transcode เลยต้องใช้ interface ของ vaapi ประมาณนี้
197 |
ffmpeg \
198 | -hwaccel vaapi -hwaccel_device /dev/dri/renderD128 -hwaccel_output_format vaapi \
199 | -i inputfile.mp4 \
200 | -c:v h264_vaapi -b:v 1500k -profile:v main -g 100 \
201 | -c:a aac -b:a 128k \
202 | -f mp4 output.mp4
203 |
204 |
205 | - สั่ง ffmpeg
206 | - ตั้งค่าให้สัญญาณ video ขาเข้า ไป decode ด้วย vaapi (เพื่อให้เอาผลลัพท์ไปใช้ต่อใน vaapi ได้)
207 | - ระบุสัญญาณ input
208 | - ระบุการแปลง format ให้ใช้ vaapi h264 encoder ปรับ profile/bitrate ตามชอบ option อื่นๆ ไปดูใน docs
209 | - ตั้ง codec เสียง
210 | - เก็บ output เป็น format mp4 ลงไฟล์ output.mp4
211 |
212 | ref: https://trac.ffmpeg.org/wiki/Hardware/VAAPI
213 | แถม อันนี้ใช้ vlc clean สัญญาณกล้องวงจรปิด ก่อนส่งให้ ffmpeg transcode ต่อ (ไม่รู้ทำไม ffmpeg transcode ตรงๆ ไม่ได้)
214 | vlc \
215 | 'rtsp://192.168.1.102/user=admin&password=&channel=1&stream=0.sdp?' \
216 | --sout '#duplicate{dst=std{access=file,mux=ts,dst=-}}' | \
217 | ffmpeg \
218 | -hwaccel vaapi -hwaccel_device /dev/dri/renderD128 -hwaccel_output_format vaapi \
219 | -i - \
220 | -c:v h264_vaapi -b:v 1500k -profile:v main -g 100 \
221 | -y -f flv rtmp://127.0.0.1/live/cctv2
222 |
223 | ]]>
224 | https://www.icez.net/blog/167400/ffmpeg-transcode-video-intel/feed
225 | 0
226 |
227 | -
228 | 0243 | upgrade centos 7 เป็น centos 8
229 | https://www.icez.net/blog/167388/upgrade-centos-7-to-centos-8
230 | https://www.icez.net/blog/167388/upgrade-centos-7-to-centos-8#respond
231 | Wed, 12 Feb 2020 17:06:48 +0000
232 |
233 |
234 |
235 | https://www.icez.net/blog/?p=167388
236 |
237 |
238 | อาจไม่ได้ผลในกรณีที่ลง software จาก repository ภายนอก
239 | ตรวจสอบให้แน่ใจว่าทุก repository ที่ใช้งานอยู่ รองรับ centos 8 ก่อนอัพเกรด
240 | backup ก่อนเสมอ
241 |
242 |
reference: https://www.tecmint.com/upgrade-centos-7-to-centos-8/
243 | 1) ลง epel repository
244 | yum install epel-release -y
245 | 2) ลง package yum-utils กับ rpmconf
246 | yum install yum-utils rpmconf
247 | 3) check ไฟล์ config ว่ามีอะไรเปลี่ยนบ้าง ให้ revert กลับมาที่ default rpm ก่อน ** ระวัง
248 | rpmconf -a
249 | 4) cleanup package ที่ซ้ำซ้อน
250 | package-cleanup --leaves
251 | package-cleanup --orphans
252 | 5) ติดตั้ง dnf แล้วลบ yum ทิ้ง (package manager ตัวใหม่ที่มาแทน yum)
253 | yum install dnf
254 | dnf -y remove yum yum-metadata-parser
255 | rm -Rf /etc/yum
256 | 6) upgrade package ทั้งหมดของ centos 7 ให้เป็น version ล่าสุด
257 | dnf upgrade
258 | 7) ติดตั้ง centos release ของ centos 8 ถ้าวันไหน centos 8 ออก update ก็ต้องแก้ version package ตาม
259 | dnf install \
260 | http://mirror.centos.org/centos/8/BaseOS/x86_64/os/Packages/centos-repos-8.1-1.1911.0.8.el8.x86_64.rpm \
261 | http://mirror.centos.org/centos/8/BaseOS/x86_64/os/Packages/centos-gpg-keys-8.1-1.1911.0.8.el8.noarch.rpm \
262 | http://mirror.centos.org/centos/8/BaseOS/x86_64/os/Packages/centos-release-8.1-1.1911.0.8.el8.x86_64.rpm
263 |
264 | 8) upgrade epel repository และ repository อื่นๆ ให้เป็น package ของ centos 8
265 | dnf -y upgrade https://dl.fedoraproject.org/pub/epel/epel-release-latest-8.noarch.rpm
266 | ** ถ้ามีใช้งาน repository ภายนอก เช่น remi ก็ไล่ upgrade release package ของแต่ละ repository ให้หมด
267 | 9) ลบ kernel เก่าของ centos 7 ออก
268 | rpm -e `rpm -q kernel`
269 | 10) ลบ conflict package ระหว่าง centos 7 กับ 8 (อาจมีอีก ถ้า error ตอน upgrade ในขั้นถัดไปก็มาไล่ลบ)
270 | rpm -e --nodeps sysvinit-tools
271 | 11) upgrade เป็น centos 8
272 | dnf -y --releasever=8 --allowerasing --setopt=deltarpm=false distro-sync
273 | 12) ติดตั้ง kernel ใหม่
274 | dnf -y install kernel-core
275 | 13) ติดตั้ง package ใน group minimal ของ centos 8 เพิ่มให้ครบ
276 | dnf -y groupupdate "Core" "Minimal Install"
277 | 14) สำคัญมาก ตรวจสอบให้แน่ใจว่า SELINUX ปิดไปแล้ว (ในไฟล์ /etc/selinux/config)
278 | เสร็จแล้วก็ reboot แล้วก็สวดภาวนาได้เลยครับ
279 | ]]>
280 | https://www.icez.net/blog/167388/upgrade-centos-7-to-centos-8/feed
281 | 0
282 |
283 | -
284 | 0242 | bgp route-map processing flow
285 | https://www.icez.net/blog/167384/0242-bgp-route-map-processing-flow
286 | https://www.icez.net/blog/167384/0242-bgp-route-map-processing-flow#respond
287 | Mon, 10 Feb 2020 05:54:17 +0000
288 |
289 |
290 |
291 | https://www.icez.net/blog/?p=167384
292 |
293 | จดกันลืม
294 |
295 | - list รายการ prefix แล้วส่งไปตรวจสอบกับ route-map ว่ามี match รึเปล่า
296 |
297 | ตัวอย่าง
298 | 2020/02/10 12:45:34 BGP: Route-map: v4-test-out, sequence: 1000, prefix: 192.0.2.0/24, result: no match
299 | 2020/02/10 12:45:34 BGP: Route-map: v4-test-out, sequence: 1010, prefix: 192.0.2.0/24, result: no match
300 | 2020/02/10 12:45:34 BGP: Route-map: v4-test-out, sequence: 1020, prefix: 192.0.2.0/24, result: no match
301 | 2020/02/10 12:45:34 BGP: Route-map: v4-test-out, sequence: 1030, prefix: 192.0.2.0/24, result: no match
302 | 2020/02/10 12:45:34 BGP: Route-map: v4-test-out, prefix: 192.0.2.0/24, result: deny
303 | 2020/02/10 12:45:34 BGP: Route-map: v4-test-out, sequence: 1000, prefix: 192.0.2.2/32, result: no match
304 | 2020/02/10 12:45:34 BGP: Route-map: v4-test-out, sequence: 1010, prefix: 192.0.2.2/32, result: no match
305 | 2020/02/10 12:45:34 BGP: Route-map: v4-test-out, sequence: 1020, prefix: 192.0.2.2/32, result: no match
306 | 2020/02/10 12:45:34 BGP: Route-map: v4-test-out, sequence: 1030, prefix: 192.0.2.2/32, result: no match
307 | 2020/02/10 12:45:34 BGP: Route-map: v4-test-out, prefix: 192.0.2.2/32, result: deny
308 | 2020/02/10 12:45:34 BGP: Route-map: v4-test-out, sequence: 1000, prefix: 10.24.0.0/22, result: no match
309 | 2020/02/10 12:45:34 BGP: Route-map: v4-test-out, sequence: 1010, prefix: 10.24.0.0/22, result: no match
310 | 2020/02/10 12:45:34 BGP: Route-map: v4-test-out, sequence: 1020, prefix: 10.24.0.0/22, result: match
311 | 2020/02/10 12:45:34 BGP: Route-map: v4-test-out, prefix: 10.24.0.0/22, result: permit
312 | ref: https://github.com/FRRouting/frr/blob/master/lib/routemap.c
313 | ]]>
314 | https://www.icez.net/blog/167384/0242-bgp-route-map-processing-flow/feed
315 | 0
316 |
317 | -
318 | 0241 | This Video Is Sponsored By ███ VPN
319 | https://www.icez.net/blog/167376/0241-this-video-is-sponsored-by-%e2%96%88%e2%96%88%e2%96%88-vpn
320 | https://www.icez.net/blog/167376/0241-this-video-is-sponsored-by-%e2%96%88%e2%96%88%e2%96%88-vpn#respond
321 | Wed, 30 Oct 2019 16:17:15 +0000
322 |
323 |
324 |
325 | https://www.icez.net/blog/?p=167376
326 |
327 | คนไทย/คลิปไทยคงไม่ค่อยเจอ แต่นั่งดูคลิปต่างประเทศแล้วชอบเจอ This Video Is Sponsored By ***VPN แล้วก็จะมีพูดว่าทำไมต้องใช้ VPN อย่างโน้นอย่างนี้ ทั้งเรื่อง wifi ไม่ปลอดภัย โน่นนี่นั่นไม่ปลอดภัย อะไรก็แล้วแต่ ที่คนสาย security ดูแล้วก็จะหัวเราะหึหึ
328 |
วันนี้มี Youtuber คนนึงมาพูดเรื่องนี้แทนแล้วครับ แฮปปี้มาก 555 ครบถ้วนกระบวนความ ว่าแล้วก็ไปชมกันเลย
329 |
330 | สำหรับในไทย เหตุผลหลักของการใช้ VPN คงมีอยู่สองเรื่องหลักๆ
331 |
332 | - ป้องกันการติดตามตัว
333 | - เข้าเว็บที่ถูกบล็อก
334 |
335 | (ซึ่งอันนี้ปกติผู้ให้บริการ VPN ก็ไม่ค่อยอยากพูดถึง เพราะมันเทาๆ กึ่งๆ จะผิดกฎหมาย 555)
336 | ]]>
337 | https://www.icez.net/blog/167376/0241-this-video-is-sponsored-by-%e2%96%88%e2%96%88%e2%96%88-vpn/feed
338 | 0
339 |
340 | -
341 | 0240 | ping: socket: Operation not permitted
342 | https://www.icez.net/blog/167369/0240
343 | https://www.icez.net/blog/167369/0240#respond
344 | Tue, 29 Oct 2019 16:43:54 +0000
345 |
346 |
347 |
348 | https://www.icez.net/blog/?p=167369
349 |
350 | rhel 7.4 มีบั๊กนิดหน่อย ทำให้ user ที่ไม่ใช่ root สั่ง ping ไม่ได้ แล้วขึ้น error ประมาณนี้
351 |
ping: socket: Operation not permitted
352 | วิธีแก้ ให้สั่งคำสั่งนี้
353 | sudo setcap 'cap_net_admin,cap_net_raw+ep' /usr/bin/ping
354 |
355 | reference: https://bugzilla.redhat.com/show_bug.cgi?id=1475871
356 | ]]>
357 | https://www.icez.net/blog/167369/0240/feed
358 | 0
359 |
360 |
361 |
--------------------------------------------------------------------------------
/action/repository.ts:
--------------------------------------------------------------------------------
1 | import { spawnSync } from 'child_process'
2 | import fs from 'fs'
3 | import path from 'path'
4 |
5 | export function runCommand(commands: string[], cwd?: string) {
6 | return spawnSync(commands[0], commands.slice(1), {
7 | stdio: 'inherit',
8 | cwd,
9 | env: process.env
10 | })
11 | }
12 |
13 | export function getGithubActionPath() {
14 | const workSpace = process.env['GITHUB_WORKSPACE']
15 | if (!workSpace) {
16 | return ''
17 | }
18 |
19 | const actionPath = '/home/runner/work/_actions/llun/feeds'
20 | try {
21 | const files = fs.readdirSync(actionPath)
22 | const version = files.filter((file) => {
23 | const stat = fs.statSync(path.join(actionPath, file))
24 | return stat.isDirectory()
25 | })
26 | return path.join(actionPath, version.pop() || 'main')
27 | } catch (error) {
28 | return path.join(actionPath, 'main')
29 | }
30 | }
31 |
32 | export function getWorkspacePath() {
33 | const workSpace = process.env['GITHUB_WORKSPACE']
34 | if (!workSpace) {
35 | return ''
36 | }
37 | return workSpace
38 | }
39 |
40 | export async function buildSite() {
41 | const workSpace = getWorkspacePath()
42 | if (workSpace) {
43 | // Remove old static resources
44 | runCommand(['rm', '-rf', '_next'], workSpace)
45 | // Bypass Jekyll
46 | runCommand(['touch', '.nojekyll'], workSpace)
47 |
48 | const core = await import('@actions/core')
49 | const storageType = core.getInput('storageType')
50 | if (storageType === 'files') process.env.NEXT_PUBLIC_STORAGE = 'files'
51 |
52 | const result = runCommand(['yarn', 'build'], getGithubActionPath())
53 | runCommand(['cp', '-rT', 'out', workSpace], getGithubActionPath())
54 | if (result.error) {
55 | throw new Error('Fail to build site')
56 | }
57 | }
58 | }
59 |
60 | export async function setup() {
61 | console.log('Action: ', process.env['GITHUB_ACTION'])
62 | const workSpace = getWorkspacePath()
63 | if (workSpace) {
64 | const core = await import('@actions/core')
65 | const github = await import('@actions/github')
66 | const { Octokit } = await import('@octokit/rest')
67 | const user = process.env['GITHUB_ACTOR']
68 | const token = core.getInput('token', { required: true })
69 | const branch = core.getInput('branch', { required: true })
70 |
71 | const octokit = new Octokit({
72 | auth: token
73 | })
74 | const response = await octokit.repos.listBranches({
75 | owner: github.context.repo.owner,
76 | repo: github.context.repo.repo
77 | })
78 | const isBranchExist = response.data
79 | .map((item) => item.name)
80 | .includes(branch)
81 | const checkoutBranch = isBranchExist
82 | ? branch
83 | : github.context.ref.substring('refs/heads/'.length)
84 | const cloneUrl = `https://${user}:${token}@github.com/${github.context.repo.owner}/${github.context.repo.repo}`
85 | const cloneResult = runCommand([
86 | 'git',
87 | 'clone',
88 | '-b',
89 | checkoutBranch,
90 | '--depth',
91 | '1',
92 | cloneUrl,
93 | workSpace
94 | ])
95 | if (cloneResult.error) {
96 | throw new Error('Fail to clone repository')
97 | }
98 |
99 | if (!isBranchExist) {
100 | console.log(`Create content branch ${branch}`)
101 | const branchResult = runCommand(
102 | ['git', 'checkout', '-B', branch],
103 | workSpace
104 | )
105 | if (branchResult.error) {
106 | throw new Error('Fail to switch branch')
107 | }
108 | }
109 | }
110 | }
111 |
112 | export async function publish() {
113 | const workSpace = getWorkspacePath()
114 | if (workSpace) {
115 | const core = await import('@actions/core')
116 | const github = await import('@actions/github')
117 | const branch = core.getInput('branch', { required: true })
118 | const token = core.getInput('token', { required: true })
119 | const user = process.env['GITHUB_ACTOR']
120 | const pushUrl = `https://${user}:${token}@github.com/${github.context.repo.owner}/${github.context.repo.repo}`
121 |
122 | // Fix custom domain getting disable after run
123 | const customDomain = core.getInput('customDomain')
124 | if (customDomain) {
125 | fs.writeFileSync('CNAME', customDomain)
126 | }
127 |
128 | runCommand(
129 | [
130 | 'rm',
131 | '-rf',
132 | 'action.yml',
133 | 'index.js',
134 | 'package-lock.json',
135 | 'package.json',
136 | '.gitignore',
137 | '.prettierrc.yml',
138 | 'tsconfig.json',
139 | '.eleventy.js',
140 | 'tailwind.config.js',
141 | 'webpack.config.js',
142 | '.github',
143 | 'action',
144 | 'readme.md',
145 | 'app',
146 | 'pages',
147 | 'contents',
148 | 'browser',
149 | 'public',
150 | 'lib',
151 | '.gitlab-ci.yml',
152 | 'yarn.lock',
153 | 'action.js',
154 | 'index.ts',
155 | // NextJS files
156 | 'next-env.d.ts',
157 | 'next.config.js',
158 | 'postcss.config.js',
159 | // Old eleventy structure
160 | 'css',
161 | 'js'
162 | ],
163 | workSpace
164 | )
165 | runCommand(
166 | ['git', 'config', '--global', 'user.email', 'bot@llun.dev'],
167 | workSpace
168 | )
169 | runCommand(
170 | ['git', 'config', '--global', 'user.name', '"Feed bots"'],
171 | workSpace
172 | )
173 | runCommand(['git', 'add', '-f', '--all'], workSpace)
174 | runCommand(['git', 'commit', '-m', 'Update feeds contents'], workSpace)
175 | runCommand(['git', 'push', '-f', pushUrl, `HEAD:${branch}`], workSpace)
176 | }
177 | }
178 |
--------------------------------------------------------------------------------
/app/globals.css:
--------------------------------------------------------------------------------
1 | @import "tailwindcss";
2 | @import "tw-animate-css";
3 |
4 | @custom-variant dark (&:is(.dark *));
5 |
6 | @theme inline {
7 | --color-background: var(--background);
8 | --color-foreground: var(--foreground);
9 | --font-sans: var(--font-geist-sans);
10 | --font-mono: var(--font-geist-mono);
11 | --color-sidebar-ring: var(--sidebar-ring);
12 | --color-sidebar-border: var(--sidebar-border);
13 | --color-sidebar-accent-foreground: var(--sidebar-accent-foreground);
14 | --color-sidebar-accent: var(--sidebar-accent);
15 | --color-sidebar-primary-foreground: var(--sidebar-primary-foreground);
16 | --color-sidebar-primary: var(--sidebar-primary);
17 | --color-sidebar-foreground: var(--sidebar-foreground);
18 | --color-sidebar: var(--sidebar);
19 | --color-chart-5: var(--chart-5);
20 | --color-chart-4: var(--chart-4);
21 | --color-chart-3: var(--chart-3);
22 | --color-chart-2: var(--chart-2);
23 | --color-chart-1: var(--chart-1);
24 | --color-ring: var(--ring);
25 | --color-input: var(--input);
26 | --color-border: var(--border);
27 | --color-destructive: var(--destructive);
28 | --color-accent-foreground: var(--accent-foreground);
29 | --color-accent: var(--accent);
30 | --color-muted-foreground: var(--muted-foreground);
31 | --color-muted: var(--muted);
32 | --color-secondary-foreground: var(--secondary-foreground);
33 | --color-secondary: var(--secondary);
34 | --color-primary-foreground: var(--primary-foreground);
35 | --color-primary: var(--primary);
36 | --color-popover-foreground: var(--popover-foreground);
37 | --color-popover: var(--popover);
38 | --color-card-foreground: var(--card-foreground);
39 | --color-card: var(--card);
40 | --radius-sm: calc(var(--radius) - 4px);
41 | --radius-md: calc(var(--radius) - 2px);
42 | --radius-lg: var(--radius);
43 | --radius-xl: calc(var(--radius) + 4px);
44 | }
45 |
46 | :root {
47 | --radius: 0.625rem;
48 | --background: oklch(1 0 0);
49 | --foreground: oklch(0.145 0 0);
50 | --card: oklch(1 0 0);
51 | --card-foreground: oklch(0.145 0 0);
52 | --popover: oklch(1 0 0);
53 | --popover-foreground: oklch(0.145 0 0);
54 | --primary: oklch(0.205 0 0);
55 | --primary-foreground: oklch(0.985 0 0);
56 | --secondary: oklch(0.97 0 0);
57 | --secondary-foreground: oklch(0.205 0 0);
58 | --muted: oklch(0.97 0 0);
59 | --muted-foreground: oklch(0.556 0 0);
60 | --accent: oklch(0.97 0 0);
61 | --accent-foreground: oklch(0.205 0 0);
62 | --destructive: oklch(0.577 0.245 27.325);
63 | --border: oklch(0.922 0 0);
64 | --input: oklch(0.922 0 0);
65 | --ring: oklch(0.708 0 0);
66 | --chart-1: oklch(0.646 0.222 41.116);
67 | --chart-2: oklch(0.6 0.118 184.704);
68 | --chart-3: oklch(0.398 0.07 227.392);
69 | --chart-4: oklch(0.828 0.189 84.429);
70 | --chart-5: oklch(0.769 0.188 70.08);
71 | --sidebar: oklch(0.985 0 0);
72 | --sidebar-foreground: oklch(0.145 0 0);
73 | --sidebar-primary: oklch(0.205 0 0);
74 | --sidebar-primary-foreground: oklch(0.985 0 0);
75 | --sidebar-accent: oklch(0.97 0 0);
76 | --sidebar-accent-foreground: oklch(0.205 0 0);
77 | --sidebar-border: oklch(0.922 0 0);
78 | --sidebar-ring: oklch(0.708 0 0);
79 | }
80 |
81 | .dark {
82 | --background: oklch(0.145 0 0);
83 | --foreground: oklch(0.985 0 0);
84 | --card: oklch(0.205 0 0);
85 | --card-foreground: oklch(0.985 0 0);
86 | --popover: oklch(0.205 0 0);
87 | --popover-foreground: oklch(0.985 0 0);
88 | --primary: oklch(0.922 0 0);
89 | --primary-foreground: oklch(0.205 0 0);
90 | --secondary: oklch(0.269 0 0);
91 | --secondary-foreground: oklch(0.985 0 0);
92 | --muted: oklch(0.269 0 0);
93 | --muted-foreground: oklch(0.708 0 0);
94 | --accent: oklch(0.269 0 0);
95 | --accent-foreground: oklch(0.985 0 0);
96 | --destructive: oklch(0.704 0.191 22.216);
97 | --border: oklch(1 0 0 / 10%);
98 | --input: oklch(1 0 0 / 15%);
99 | --ring: oklch(0.556 0 0);
100 | --chart-1: oklch(0.488 0.243 264.376);
101 | --chart-2: oklch(0.696 0.17 162.48);
102 | --chart-3: oklch(0.769 0.188 70.08);
103 | --chart-4: oklch(0.627 0.265 303.9);
104 | --chart-5: oklch(0.645 0.246 16.439);
105 | --sidebar: oklch(0.205 0 0);
106 | --sidebar-foreground: oklch(0.985 0 0);
107 | --sidebar-primary: oklch(0.488 0.243 264.376);
108 | --sidebar-primary-foreground: oklch(0.985 0 0);
109 | --sidebar-accent: oklch(0.269 0 0);
110 | --sidebar-accent-foreground: oklch(0.985 0 0);
111 | --sidebar-border: oklch(1 0 0 / 10%);
112 | --sidebar-ring: oklch(0.556 0 0);
113 | }
114 |
115 | @layer base {
116 | * {
117 | @apply border-border outline-ring/50;
118 | }
119 |
120 | body {
121 | @apply bg-background text-foreground;
122 | }
123 |
124 | button {
125 | cursor: pointer;
126 | }
127 | }
128 |
--------------------------------------------------------------------------------
/app/layout.tsx:
--------------------------------------------------------------------------------
1 | import type { Metadata, Viewport } from 'next'
2 | import { Geist, Geist_Mono } from 'next/font/google'
3 | import { ThemeProvider } from 'next-themes'
4 |
5 | import './globals.css'
6 |
7 | const geistSans = Geist({
8 | variable: '--font-geist-sans',
9 | subsets: ['latin']
10 | })
11 |
12 | const geistMono = Geist_Mono({
13 | variable: '--font-geist-mono',
14 | subsets: ['latin']
15 | })
16 |
17 | export const metadata: Metadata = {
18 | title: 'Feeds',
19 | description: 'Static Feeds Aggregator',
20 | icons: {
21 | icon: '/favicon.ico'
22 | }
23 | }
24 |
25 | export const viewport: Viewport = {
26 | width: 'device-width',
27 | initialScale: 1,
28 | themeColor: [
29 | { media: '(prefers-color-scheme: light)', color: '#ffffff' },
30 | { media: '(prefers-color-scheme: dark)', color: '#000000' }
31 | ]
32 | }
33 |
34 | export const dynamicParams = false
35 | export const dynamic = 'force-static'
36 |
37 | export default function RootLayout({
38 | children
39 | }: {
40 | children: React.ReactNode
41 | }) {
42 | return (
43 |
44 |
47 |
48 | {children}
49 |
50 |
51 |
52 | )
53 | }
54 |
--------------------------------------------------------------------------------
/app/not-found.tsx:
--------------------------------------------------------------------------------
1 | page.tsx
--------------------------------------------------------------------------------
/app/page.tsx:
--------------------------------------------------------------------------------
1 | import { FC } from 'react'
2 | import { Page } from '../lib/page'
3 |
4 | const Index: FC = async () => {
5 | return
6 | }
7 |
8 | export default Index
9 |
--------------------------------------------------------------------------------
/components.json:
--------------------------------------------------------------------------------
1 | {
2 | "$schema": "https://ui.shadcn.com/schema.json",
3 | "style": "new-york",
4 | "rsc": true,
5 | "tsx": true,
6 | "tailwind": {
7 | "config": "",
8 | "css": "src/app/globals.css",
9 | "baseColor": "neutral",
10 | "cssVariables": true,
11 | "prefix": ""
12 | },
13 | "aliases": {
14 | "components": "@/components",
15 | "utils": "@/lib/utils",
16 | "ui": "@/components/ui",
17 | "lib": "@/lib",
18 | "hooks": "@/hooks"
19 | },
20 | "iconLibrary": "lucide"
21 | }
22 |
--------------------------------------------------------------------------------
/feeds.opml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Feeds
5 |
6 |
7 |
8 |
10 |
11 |
12 |
14 |
15 |
16 |
17 |
--------------------------------------------------------------------------------
/index.ts:
--------------------------------------------------------------------------------
1 | import { createFeedDatabase, createFeedFiles } from './action/feeds'
2 | import {
3 | buildSite,
4 | getGithubActionPath,
5 | publish,
6 | setup
7 | } from './action/repository'
8 |
9 | async function run() {
10 | await setup()
11 | await createFeedDatabase(getGithubActionPath())
12 | await createFeedFiles(getGithubActionPath())
13 | await buildSite()
14 | await publish()
15 | }
16 |
17 | run()
18 | .then(() => {
19 | console.log('Done')
20 | process.exit(0)
21 | })
22 | .catch((error) => {
23 | console.error(error.message)
24 | console.error(error.stack)
25 | })
26 |
--------------------------------------------------------------------------------
/lib/components/BackButton.tsx:
--------------------------------------------------------------------------------
1 | import React from 'react'
2 | import { ChevronLeft } from 'lucide-react'
3 |
4 | interface Props {
5 | onClickBack: () => void
6 | }
7 |
8 | export const BackButton = ({ onClickBack }: Props) => {
9 | return (
10 |
17 | )
18 | }
19 |
--------------------------------------------------------------------------------
/lib/components/CategoryList.tsx:
--------------------------------------------------------------------------------
1 | import React, { useState } from 'react'
2 | import Image from 'next/image'
3 | import { Category } from '../storage/types'
4 | import { ThemeToggle } from './ThemeToggle'
5 |
6 | interface CategoryListProps {
7 | categories: Category[]
8 | totalEntries: number | null
9 | selectCategory?: (category: string) => void
10 | selectSite?: (siteKey: string, siteTitle: string) => void
11 | }
12 |
13 | export const CategoryList = ({
14 | categories,
15 | totalEntries,
16 | selectCategory,
17 | selectSite
18 | }: CategoryListProps) => {
19 | const [currentCategory, setCurrentCategory] = useState()
20 | return (
21 |
92 | )
93 | }
94 |
--------------------------------------------------------------------------------
/lib/components/ItemContent.tsx:
--------------------------------------------------------------------------------
1 | import React, { useEffect } from 'react'
2 | import { Content } from '../storage/types'
3 | import { format, formatDistance } from 'date-fns'
4 | import { BackButton } from './BackButton'
5 | import parse from 'html-react-parser'
6 |
7 | interface ReactParserNode {
8 | name: string
9 | attribs?: {
10 | [key in string]: string
11 | }
12 | }
13 |
14 | interface ItemContentProps {
15 | content?: Content
16 | selectBack?: () => void
17 | }
18 |
19 | export const ItemContent = ({ content, selectBack }: ItemContentProps) => {
20 | let element: HTMLElement | null = null
21 | useEffect(() => {
22 | if (!element) return
23 | element.scrollTo(0, 0)
24 | }, [content])
25 |
26 | if (!content) {
27 | return (
28 |
29 |
Select an item from the list to view its content.
30 |
31 | )
32 | }
33 |
34 | return (
35 |
36 |
37 |
38 |
39 |
40 |
41 |
{content.title}
42 |
43 |
44 | Published:{' '}
45 | {formatDistance(content.timestamp * 1000, new Date(), {
46 | addSuffix: true
47 | })}
48 |
49 |
|
50 |
56 | View Original
57 |
58 |
59 |
60 |
61 | {
64 | element = contentPane
65 | }}
66 | >
67 | {parse(content.content, {
68 | replace: (domNode) => {
69 | const node = domNode as ReactParserNode
70 | if (node.attribs && node.name === 'a') {
71 | node.attribs.target = '_blank'
72 | node.attribs.rel = 'noopener noreferrer'
73 | return node
74 | }
75 | return domNode
76 | }
77 | })}
78 |
79 |
80 | )
81 | }
82 |
--------------------------------------------------------------------------------
/lib/components/ItemList.tsx:
--------------------------------------------------------------------------------
1 | import React, { useEffect, useRef, useState } from 'react'
2 | import { SiteEntry } from '../storage/types'
3 | import { formatDistance } from 'date-fns'
4 | import { LocationState } from '../utils'
5 | import { getStorage } from '../storage'
6 | import { BackButton } from './BackButton'
7 |
8 | interface ItemListProps {
9 | basePath: string
10 | title: string
11 | locationState: LocationState
12 | selectEntry?: (
13 | parentType: string,
14 | parentKey: string,
15 | entryKey: string
16 | ) => void
17 | selectSite?: (siteKey: string) => void
18 | selectBack?: () => void
19 | }
20 |
21 | export const ItemList = ({
22 | basePath,
23 | title,
24 | locationState,
25 | selectSite,
26 | selectEntry,
27 | selectBack
28 | }: ItemListProps) => {
29 | const [pageState, setPageState] = useState<'loaded' | 'loading'>('loading')
30 | const [currentCategoryOrSite, setCurrentCategoryOrSite] = useState('')
31 | const [entries, setEntries] = useState([])
32 | const [totalEntry, setTotalEntry] = useState(0)
33 | const [selectedEntryHash, setSelectedEntryHash] = useState('')
34 | const [page, setPage] = useState(0)
35 |
36 | const itemsRef = useRef(null)
37 | const nextBatchEntry = useRef(null)
38 |
39 | let element: HTMLElement | null = null
40 |
41 | const loadEntries = async (
42 | basePath: string,
43 | locationState: LocationState,
44 | page: number = 0
45 | ) => {
46 | const storage = getStorage(basePath)
47 | switch (locationState.type) {
48 | case 'category': {
49 | const category = locationState.category
50 | const [entries, totalEntry] = await Promise.all([
51 | storage.getCategoryEntries(category, page),
52 | storage.countCategoryEntries(category)
53 | ])
54 | return { entries, totalEntry }
55 | }
56 | case 'site': {
57 | const { siteKey } = locationState
58 | const [entries, totalEntry] =
59 | siteKey === 'all'
60 | ? await Promise.all([
61 | storage.getAllEntries(page),
62 | storage.countAllEntries()
63 | ])
64 | : await Promise.all([
65 | storage.getSiteEntries(siteKey, page),
66 | storage.countSiteEntries(siteKey)
67 | ])
68 | return { entries, totalEntry }
69 | }
70 | case 'entry':
71 | const { parent } = locationState
72 | const { key } = parent
73 | if (parent.type === 'category') {
74 | const [entries, totalEntry] = await Promise.all([
75 | storage.getCategoryEntries(key, page),
76 | storage.countCategoryEntries(key)
77 | ])
78 | return { entries, totalEntry }
79 | }
80 |
81 | const [entries, totalEntry] =
82 | key === 'all'
83 | ? await Promise.all([
84 | storage.getAllEntries(page),
85 | storage.countAllEntries()
86 | ])
87 | : await Promise.all([
88 | storage.getSiteEntries(key, page),
89 | storage.countSiteEntries(key)
90 | ])
91 | return { entries, totalEntry }
92 | }
93 | }
94 |
95 | const loadNextPage = async (page: number): Promise => {
96 | if (pageState === 'loading') return
97 | if (entries.length === totalEntry) return
98 |
99 | const { entries: newEntries } = await loadEntries(
100 | basePath,
101 | locationState,
102 | page
103 | )
104 | setEntries(entries.concat(newEntries))
105 | }
106 |
107 | const selectEntryHash = (entryKey: string, scrollIntoView?: boolean) => {
108 | setSelectedEntryHash(entryKey)
109 | if (scrollIntoView) {
110 | const dom = globalThis.document.querySelector(`#entry-${entryKey}`)
111 | dom?.scrollIntoView({
112 | block: 'center',
113 | inline: 'start'
114 | })
115 | }
116 | if (!selectEntry) return
117 | selectEntry(parentType, parentKey, entryKey)
118 | }
119 |
120 | useEffect(() => {
121 | if (locationState.type === 'entry') return
122 |
123 | switch (locationState.type) {
124 | case 'category': {
125 | if (currentCategoryOrSite === locationState.category) return
126 | return setCurrentCategoryOrSite(locationState.category)
127 | }
128 | case 'site': {
129 | if (currentCategoryOrSite === locationState.siteKey) return
130 | return setCurrentCategoryOrSite(locationState.siteKey)
131 | }
132 | }
133 | }, [locationState])
134 |
135 | useEffect(() => {
136 | if (!element) return
137 | ;(async (element: HTMLElement) => {
138 | const { entries: newEntries, totalEntry } = await loadEntries(
139 | basePath,
140 | locationState
141 | )
142 | setPageState('loaded')
143 | setEntries(newEntries)
144 | setTotalEntry(totalEntry)
145 | setPage(0)
146 | element.scrollTo(0, 0)
147 | })(element)
148 | }, [currentCategoryOrSite, element])
149 |
150 | useEffect(() => {
151 | if (!nextBatchEntry?.current) return
152 |
153 | const observer = new IntersectionObserver((entries) => {
154 | const [entry] = entries
155 | if (pageState === 'loading') return
156 | if (entry.isIntersecting) {
157 | setPageState('loading')
158 | loadNextPage(page + 1).then(() => {
159 | setPage((current) => current + 1)
160 | setPageState('loaded')
161 | })
162 | }
163 | })
164 | observer.observe(nextBatchEntry.current)
165 | return () => {
166 | observer.disconnect()
167 | }
168 | }, [nextBatchEntry, totalEntry, entries])
169 |
170 | useEffect(() => {
171 | const handler: EventListener = (event: KeyboardEvent) => {
172 | switch (event.code) {
173 | case 'ArrowUp':
174 | case 'KeyW': {
175 | event.preventDefault()
176 | if (!selectedEntryHash) {
177 | selectEntryHash(entries[0].key)
178 | return
179 | }
180 |
181 | const index = entries.findIndex(
182 | (entry) => entry.key === selectedEntryHash
183 | )
184 | if (index <= 0) return
185 | selectEntryHash(entries[index - 1].key, true)
186 | return
187 | }
188 | case 'ArrowDown':
189 | case 'KeyS': {
190 | event.preventDefault()
191 | if (!selectedEntryHash) {
192 | selectEntryHash(entries[0].key)
193 | return
194 | }
195 |
196 | const index = entries.findIndex(
197 | (entry) => entry.key === selectedEntryHash
198 | )
199 | if (index >= entries.length - 1) return
200 | selectEntryHash(entries[index + 1].key, true)
201 | return
202 | }
203 | }
204 | }
205 | globalThis.document.addEventListener('keydown', handler)
206 | return () => {
207 | globalThis.document.removeEventListener('keydown', handler)
208 | }
209 | }, [entries, selectedEntryHash])
210 |
211 | const parentType =
212 | locationState.type === 'entry'
213 | ? locationState.parent.type
214 | : locationState.type
215 | const parentKey =
216 | locationState.type === 'entry'
217 | ? locationState.parent.key
218 | : locationState.type === 'category'
219 | ? locationState.category
220 | : locationState.siteKey
221 |
222 | return (
223 |
224 |
225 |
226 |
227 |
228 |
{
231 | element = section
232 | }}
233 | >
234 |
{title}
235 |
236 |
237 |
238 |
239 | {pageState === 'loading' ? (
240 |
241 |
242 |
243 | Loading items...
244 |
245 |
246 | ) : entries.length > 0 ? (
247 |
251 | {entries.map((entry, index) => (
252 | -
266 |
267 |
281 |
282 |
290 |
291 | •
292 |
293 |
294 | {formatDistance(entry.timestamp * 1000, new Date(), {
295 | addSuffix: true
296 | })}
297 |
298 |
299 |
300 |
301 | ))}
302 |
303 | ) : (
304 |
305 |
No items to display.
306 |
307 | Select a category or site from the left panel.
308 |
309 |
310 | )}
311 |
312 |
313 | )
314 | }
315 |
--------------------------------------------------------------------------------
/lib/components/ThemeToggle.tsx:
--------------------------------------------------------------------------------
1 | import { useTheme } from 'next-themes'
2 | import { Sun, Moon, Laptop } from 'lucide-react'
3 | import { useState } from 'react'
4 |
5 | export const ThemeToggle = () => {
6 | const { theme, setTheme } = useTheme()
7 | const [showModal, setShowModal] = useState(false)
8 |
9 | const getCurrentIcon = () => {
10 | switch (theme) {
11 | case 'light':
12 | return
13 | case 'dark':
14 | return
15 | default:
16 | return
17 | }
18 | }
19 |
20 | return (
21 |
22 |
29 |
30 | {showModal && (
31 | <>
32 |
setShowModal(false)}
35 | />
36 |
37 |
38 |
51 |
64 |
77 |
78 |
79 | >
80 | )}
81 |
82 | )
83 | }
84 |
--------------------------------------------------------------------------------
/lib/fixtures/contents/cat1/site1.json:
--------------------------------------------------------------------------------
1 | {
2 | "title": "Site1 Title",
3 | "description": "Sample description",
4 | "updatedAt": 1637542213292,
5 | "generator": "Feed Generator",
6 | "entries": [
7 | {
8 | "title": "Content1",
9 | "link": "https://www.llun.me/sample1",
10 | "date": 1636588800000,
11 | "content": "Content1",
12 | "author": "Author1"
13 | },
14 | {
15 | "title": "Content2",
16 | "link": "https://www.llun.me/sample2",
17 | "date": 1636588700000,
18 | "content": "Content2",
19 | "author": "Author1"
20 | }
21 | ]
22 | }
23 |
--------------------------------------------------------------------------------
/lib/fixtures/contents/cat1/site2.json:
--------------------------------------------------------------------------------
1 | {
2 | "title": "Site2 Title",
3 | "description": "Sample description",
4 | "updatedAt": 1637542213292,
5 | "generator": "Feed Generator",
6 | "entries": [
7 | {
8 | "title": "Content1",
9 | "link": "https://www.llun.me/sample3",
10 | "date": 1636588850000,
11 | "content": "Content1",
12 | "author": "Author2"
13 | },
14 | {
15 | "title": "Content2",
16 | "link": "https://www.llun.me/sample4",
17 | "date": 1636588750000,
18 | "content": "Content2",
19 | "author": "Author2"
20 | }
21 | ]
22 | }
23 |
--------------------------------------------------------------------------------
/lib/fixtures/contents/cat2/site3.json:
--------------------------------------------------------------------------------
1 | {
2 | "title": "Site3 Title",
3 | "description": "Sample description",
4 | "updatedAt": 1637542213292,
5 | "generator": "Feed Generator",
6 | "entries": [
7 | {
8 | "title": "Content1",
9 | "link": "https://www.llun.me/sample5",
10 | "date": 1636588820000,
11 | "content": "Content1",
12 | "author": "Author3"
13 | },
14 | {
15 | "title": "Content2",
16 | "link": "https://www.llun.me/sample6",
17 | "date": 1636588720000,
18 | "content": "Content2",
19 | "author": "Author4"
20 | }
21 | ]
22 | }
23 |
--------------------------------------------------------------------------------
/lib/page.tsx:
--------------------------------------------------------------------------------
1 | 'use client'
2 |
3 | import { FC, useState, useEffect, useReducer } from 'react'
4 | import { usePathname, useRouter } from 'next/navigation'
5 |
6 | import { ItemList } from './components/ItemList'
7 | import { ItemContent } from './components/ItemContent'
8 | import { CategoryList } from '../lib/components/CategoryList'
9 | import { getStorage } from '../lib/storage'
10 | import { Category, Content } from '../lib/storage/types'
11 | import {
12 | PageState,
13 | articleClassName,
14 | categoriesClassName,
15 | entriesClassName,
16 | locationController,
17 | parseLocation
18 | } from '../lib/utils'
19 | import { PathReducer, updatePath } from './reducers/path'
20 |
21 | export const Page: FC = () => {
22 | const [status, setStatus] = useState<'loading' | 'loaded'>('loading')
23 | const [pageState, setPageState] = useState
('categories')
24 | const [categories, setCategories] = useState([])
25 | const [listTitle, setListTitle] = useState('')
26 | const [content, setContent] = useState(null)
27 | const [totalEntries, setTotalEntries] = useState(null)
28 | const router = useRouter()
29 | const originalPath = usePathname()
30 | const [state, dispatch] = useReducer(PathReducer, {
31 | pathname: originalPath,
32 | location: parseLocation(originalPath)
33 | })
34 |
35 | useEffect(() => {
36 | ;(async () => {
37 | if (!state.location) {
38 | const targetPath = '/sites/all'
39 | dispatch(updatePath(targetPath))
40 | return
41 | }
42 |
43 | if (status === 'loading') {
44 | const storage = getStorage(process.env.NEXT_PUBLIC_BASE_PATH ?? '')
45 | const [categories, totalEntries] = await Promise.all([
46 | storage.getCategories(),
47 | storage.countAllEntries()
48 | ])
49 | setTotalEntries(totalEntries)
50 | setCategories(categories)
51 | setStatus('loaded')
52 | }
53 |
54 | await locationController(
55 | state.location,
56 | state.pathname,
57 | setContent,
58 | setPageState
59 | )
60 | })()
61 |
62 | const historyPopHandler = (event: PopStateEvent) => {
63 | dispatch(updatePath(originalPath))
64 | }
65 | window.addEventListener('popstate', historyPopHandler)
66 | return () => {
67 | window.removeEventListener('popstate', historyPopHandler)
68 | }
69 | }, [status, state, router])
70 |
71 | useEffect(() => {
72 | const storage = getStorage(process.env.NEXT_PUBLIC_BASE_PATH ?? '')
73 | switch (state.location?.type) {
74 | case 'category':
75 | setListTitle(state.location.category)
76 | break
77 | case 'site': {
78 | if (state.location.siteKey === 'all') {
79 | setListTitle('All Items')
80 | break
81 | }
82 | storage.getSiteEntries(state.location.siteKey).then((entries) => {
83 | if (entries.length === 0) return
84 | setListTitle(entries[0].site.title)
85 | })
86 | break
87 | }
88 | case 'entry': {
89 | const parentType = state.location.parent.type
90 | if (parentType === 'category') {
91 | setListTitle(state.location.parent.key)
92 | break
93 | }
94 |
95 | if (state.location.parent.key === 'all') {
96 | setListTitle('All Items')
97 | break
98 | }
99 |
100 | storage.getSiteEntries(state.location.parent.key).then((entries) => {
101 | if (entries.length === 0) return
102 | setListTitle(entries[0].site.title)
103 | })
104 | break
105 | }
106 | default:
107 | setListTitle('All Items')
108 | break
109 | }
110 | }, [state])
111 |
112 | if (status === 'loading') {
113 | return (
114 |
115 |
116 |
117 |
Loading content...
118 |
119 | This will take a few seconds
120 |
121 |
122 |
123 | )
124 | }
125 |
126 | return (
127 |
128 |
133 | {
137 | setListTitle(category)
138 | dispatch(updatePath(`/categories/${category}`))
139 | }}
140 | selectSite={(siteKey: string, siteTitle: string) => {
141 | setListTitle(siteTitle)
142 | dispatch(updatePath(`/sites/${siteKey}`))
143 | }}
144 | />
145 |
146 |
147 |
152 | {listTitle ? (
153 |
setPageState('categories')}
158 | selectSite={(site: string) => {
159 | dispatch(updatePath(`/sites/${site}`))
160 | }}
161 | selectEntry={(
162 | parentType: string,
163 | parentKey: string,
164 | entryKey: string
165 | ) => {
166 | const targetPath = `/${
167 | parentType === 'category' ? 'categories' : 'sites'
168 | }/${parentKey}/entries/${entryKey}`
169 | dispatch(updatePath(targetPath))
170 | }}
171 | />
172 | ) : (
173 |
174 |
175 | Select a category or site from the left panel to see feed items.
176 |
177 |
178 | )}
179 |
180 |
181 |
186 | {
189 | const location = state.location
190 | if (location.type !== 'entry') return
191 | const { parent } = location
192 | const { type, key } = parent
193 | dispatch(
194 | updatePath(
195 | `/${type === 'category' ? 'categories' : 'sites'}/${key}`
196 | )
197 | )
198 | }}
199 | />
200 |
201 |
202 | )
203 | }
204 |
--------------------------------------------------------------------------------
/lib/reducers/path.ts:
--------------------------------------------------------------------------------
1 | import { Reducer } from 'react'
2 | import { LocationState, parseLocation } from '../utils'
3 |
4 | export const updatePath = (path: string) => ({
5 | type: 'UPDATE_PATH',
6 | value: path
7 | })
8 | type ActionUpdatePath = ReturnType
9 |
10 | type Actions = ActionUpdatePath
11 |
12 | interface PathState {
13 | pathname: string
14 | location: LocationState
15 | }
16 |
17 | export const PathReducer: Reducer = (
18 | state: PathState,
19 | action
20 | ) => {
21 | switch (action.type) {
22 | case 'UPDATE_PATH':
23 | const pathname = action.value
24 | if (pathname === state.pathname) {
25 | return state
26 | }
27 |
28 | const location = parseLocation(pathname)
29 | window.history.pushState({ location }, '', pathname)
30 | return {
31 | ...state,
32 | pathname,
33 | location
34 | }
35 | default:
36 | return state
37 | }
38 | }
39 |
--------------------------------------------------------------------------------
/lib/storage/file.ts:
--------------------------------------------------------------------------------
1 | import { Storage } from './types'
2 |
3 | export class FileStorage implements Storage {
4 | private basePath: string
5 |
6 | constructor(basePath: string) {
7 | this.basePath = `${basePath}/data`
8 | }
9 |
10 | async getCategories() {
11 | const response = await fetch(`${this.basePath}/categories.json`)
12 | if (response.status !== 200) throw new Error('Fail to load categories')
13 |
14 | const categories = await response.json()
15 | return categories.map((category) => ({
16 | title: category.name,
17 | totalEntries: category.totalEntries,
18 | sites: category.sites.map((site) => ({
19 | key: site.siteHash,
20 | title: site.title,
21 | totalEntries: site.totalEntries
22 | }))
23 | }))
24 | }
25 |
26 | async getCategoryEntries(category: string, page = 0) {
27 | const response = await fetch(`${this.basePath}/categories/${category}.json`)
28 | if (response.status !== 200)
29 | throw new Error('Fail to load category entries')
30 |
31 | const json = await response.json()
32 | return json.map((entry) => ({
33 | key: entry.entryHash,
34 | title: entry.title,
35 | site: {
36 | key: entry.siteHash,
37 | title: entry.siteTitle
38 | },
39 | timestamp: Math.floor(entry.date / 1000)
40 | }))
41 | }
42 |
43 | async getSiteEntries(siteKey: string, page = 0) {
44 | const response = await fetch(`${this.basePath}/sites/${siteKey}.json`)
45 | if (response.status !== 200) throw new Error('Fail to load site entries')
46 |
47 | const json = await response.json()
48 | const entries = json.entries
49 | return entries.map((entry) => ({
50 | key: entry.entryHash,
51 | title: entry.title,
52 | site: {
53 | key: entry.siteHash,
54 | title: entry.siteTitle
55 | },
56 | timestamp: Math.floor(entry.date / 1000)
57 | }))
58 | }
59 |
60 | async countAllEntries() {
61 | const response = await fetch(`${this.basePath}/categories.json`)
62 | if (response.status !== 200)
63 | throw new Error('Fail to load count all entries')
64 |
65 | const categories = await response.json()
66 | return categories.reduce(
67 | (sum: number, category) => sum + category.totalEntries,
68 | 0
69 | )
70 | }
71 |
72 | async countSiteEntries(siteKey: string) {
73 | const response = await fetch(`${this.basePath}/sites/${siteKey}.json`)
74 | if (response.status !== 200) throw new Error('Fail to load site entries')
75 | const json = await response.json()
76 | const entries = json.entries
77 | return entries.length
78 | }
79 |
80 | async countCategoryEntries(category: string) {
81 | const response = await fetch(`${this.basePath}/categories/${category}.json`)
82 | if (response.status !== 200)
83 | throw new Error('Fail to load category entries')
84 |
85 | const json = await response.json()
86 | return json.length
87 | }
88 |
89 | async getAllEntries(page = 0) {
90 | const response = await fetch(`${this.basePath}/all.json`)
91 | if (response.status !== 200) throw new Error('Fail to load all entries')
92 |
93 | const json = await response.json()
94 | return json.map((entry) => ({
95 | key: entry.entryHash,
96 | title: entry.title,
97 | site: {
98 | key: entry.siteHash,
99 | title: entry.siteTitle
100 | },
101 | timestamp: Math.floor(entry.date / 1000)
102 | }))
103 | }
104 |
105 | async getContent(key: string) {
106 | const response = await fetch(`${this.basePath}/entries/${key}.json`)
107 | if (response.status !== 200) throw new Error('Fail to load content')
108 |
109 | const json = await response.json()
110 | return {
111 | title: json.title,
112 | content: json.content,
113 | url: json.link,
114 | siteKey: json.siteHash,
115 | siteTitle: json.siteTitle,
116 | timestamp: Math.floor(json.date / 1000)
117 | }
118 | }
119 | }
120 |
--------------------------------------------------------------------------------
/lib/storage/index.ts:
--------------------------------------------------------------------------------
1 | import { FileStorage } from './file'
2 | import { SqliteStorage } from './sqlite'
3 | import { Storage } from './types'
4 |
5 | let storage: Storage | null = null
6 |
7 | export const getStorage = (basePath: string) => {
8 | if (!storage) {
9 | switch (process.env.NEXT_PUBLIC_STORAGE) {
10 | case 'sqlite': {
11 | storage = new SqliteStorage(basePath)
12 | break
13 | }
14 | case 'files':
15 | default: {
16 | storage = new FileStorage(basePath)
17 | break
18 | }
19 | }
20 | }
21 | return storage
22 | }
23 |
--------------------------------------------------------------------------------
/lib/storage/sqlite.ts:
--------------------------------------------------------------------------------
1 | import { createDbWorker, WorkerHttpvfs } from 'sql.js-httpvfs'
2 | import { SplitFileConfig } from 'sql.js-httpvfs/dist/sqlite.worker'
3 | import { Category, Content, SiteEntry, Storage } from './types'
4 |
5 | const CONTENT_PER_PAGE = 30
6 |
7 | function getDatabaseConfig(basePath: string): SplitFileConfig {
8 | return {
9 | from: 'inline',
10 | config: {
11 | serverMode: 'full',
12 | requestChunkSize: 4096,
13 | url: `${basePath}/data.sqlite3`
14 | }
15 | }
16 | }
17 |
18 | export class SqliteStorage implements Storage {
19 | private worker: WorkerHttpvfs | null = null
20 | private config: SplitFileConfig
21 | private basePath: string
22 |
23 | constructor(basePath: string) {
24 | this.config = getDatabaseConfig(basePath)
25 | this.basePath = basePath
26 | }
27 |
28 | private async getWorker(
29 | config: SplitFileConfig,
30 | basePath: string
31 | ): Promise {
32 | if (!this.worker) {
33 | this.worker = await createDbWorker(
34 | [config],
35 | `${basePath}/sqlite.worker.js`,
36 | `${basePath}/sql-wasm.wasm`
37 | )
38 | }
39 | return this.worker
40 | }
41 |
42 | async getCategories(): Promise {
43 | const worker = await this.getWorker(this.config, this.basePath)
44 | const categories = (await worker.db.query(
45 | `select category, siteKey, siteTitle from SiteCategories`
46 | )) as {
47 | category: string
48 | siteKey: string
49 | siteTitle: string
50 | }[]
51 | const categoryEntryCounts = (
52 | (await worker.db.query(
53 | `select category, count(*) as totalEntries from EntryCategories group by category`
54 | )) as { category: string; totalEntries: number }[]
55 | ).reduce((out, row) => {
56 | out[row.category] = row.totalEntries
57 | return out
58 | }, {} as { [key in string]: number })
59 | const siteEntryCounts = (
60 | (await worker.db.query(
61 | `select siteKey, count(*) as totalEntries from EntryCategories group by siteKey;`
62 | )) as { siteKey: string; totalEntries: number }[]
63 | ).reduce((out, row) => {
64 | out[row.siteKey] = row.totalEntries
65 | return out
66 | }, {} as { [key in string]: number })
67 |
68 | const map = categories.reduce((map, item) => {
69 | if (!map[item.category])
70 | map[item.category] = {
71 | totalEntries: categoryEntryCounts[item.category],
72 | sites: []
73 | }
74 | map[item.category].sites.push({
75 | key: item.siteKey,
76 | title: item.siteTitle,
77 | totalEntries: siteEntryCounts[item.siteKey]
78 | })
79 | return map
80 | }, {} as { [key in string]: { sites: { key: string; title: string; totalEntries: number }[]; totalEntries: number } })
81 | return Object.keys(map).map((title) => ({
82 | title,
83 | sites: map[title].sites,
84 | totalEntries: map[title].totalEntries
85 | }))
86 | }
87 |
88 | async getCategoryEntries(category: string, page = 0): Promise {
89 | const worker = await this.getWorker(this.config, this.basePath)
90 | const offset = page * CONTENT_PER_PAGE
91 | const list = (await worker.db.query(
92 | `select * from EntryCategories where category = ? and entryContentTime is not null order by entryContentTime desc limit ? offset ?`,
93 | [category, CONTENT_PER_PAGE, offset]
94 | )) as {
95 | category: string
96 | entryContentTime: number
97 | entryKey: string
98 | entryTitle: string
99 | siteKey: string
100 | siteTitle: string
101 | }[]
102 | return list.map((item) => ({
103 | key: item.entryKey,
104 | title: item.entryTitle,
105 | site: {
106 | key: item.siteKey,
107 | title: item.siteTitle
108 | },
109 | timestamp: item.entryContentTime
110 | }))
111 | }
112 |
113 | async getSiteEntries(siteKey: string, page = 0) {
114 | const worker = await this.getWorker(this.config, this.basePath)
115 | const offset = page * CONTENT_PER_PAGE
116 | const list = (await worker.db.query(
117 | `select entryKey, siteKey, siteTitle, entryTitle, entryContentTime from EntryCategories where siteKey = ? order by entryContentTime desc limit ? offset ?`,
118 | [siteKey, CONTENT_PER_PAGE, offset]
119 | )) as {
120 | entryKey: string
121 | siteKey: string
122 | siteTitle: string
123 | entryTitle: string
124 | entryContentTime?: number
125 | }[]
126 | return list.map((item) => ({
127 | key: item.entryKey,
128 | title: item.entryTitle,
129 | site: {
130 | key: item.siteKey,
131 | title: item.siteTitle
132 | },
133 | timestamp: item.entryContentTime
134 | }))
135 | }
136 |
137 | async countAllEntries() {
138 | const worker = await this.getWorker(this.config, this.basePath)
139 | const count = (await worker.db.query(
140 | `select count(*) as total from EntryCategories`
141 | )) as { total: number }[]
142 | return count[0].total
143 | }
144 |
145 | async countSiteEntries(siteKey: string) {
146 | const worker = await this.getWorker(this.config, this.basePath)
147 | const count = (await worker.db.query(
148 | `select count(*) as total from EntryCategories where siteKey = ?`,
149 | [siteKey]
150 | )) as { total: number }[]
151 | return count[0].total
152 | }
153 |
154 | async countCategoryEntries(category: string) {
155 | const worker = await this.getWorker(this.config, this.basePath)
156 | const count = (await worker.db.query(
157 | `select count(*) as total from EntryCategories where category = ?`,
158 | [category]
159 | )) as { total: number }[]
160 | return count[0].total
161 | }
162 |
163 | async getAllEntries(page = 0) {
164 | const worker = await this.getWorker(this.config, this.basePath)
165 | const offset = page * CONTENT_PER_PAGE
166 | const list = (await worker.db.query(
167 | `select entryKey, siteKey, siteTitle, entryTitle, entryContentTime from EntryCategories where entryContentTime is not null order by entryContentTime desc limit ? offset ?`,
168 | [CONTENT_PER_PAGE, offset]
169 | )) as {
170 | entryKey: string
171 | siteKey: string
172 | siteTitle: string
173 | entryTitle: string
174 | entryContentTime?: number
175 | }[]
176 | return list.map((item) => ({
177 | key: item.entryKey,
178 | title: item.entryTitle,
179 | site: {
180 | key: item.siteKey,
181 | title: item.siteTitle
182 | },
183 | timestamp: item.entryContentTime
184 | }))
185 | }
186 |
187 | async getContent(key: string): Promise {
188 | const worker = await this.getWorker(this.config, this.basePath)
189 | const entry = await worker.db.query(
190 | `select title, content, url, siteKey, siteTitle, contentTime as timestamp from Entries where key = ?`,
191 | [key]
192 | )
193 | if (entry.length === 0) return null
194 | return entry[0] as Content
195 | }
196 | }
197 |
--------------------------------------------------------------------------------
/lib/storage/types.ts:
--------------------------------------------------------------------------------
1 | export interface Category {
2 | title: string
3 | sites: {
4 | key: string
5 | title: string
6 | totalEntries: number
7 | }[]
8 | totalEntries: number
9 | }
10 |
11 | export interface SiteEntry {
12 | key: string
13 | title: string
14 | site: {
15 | key: string
16 | title: string
17 | }
18 | timestamp?: number
19 | }
20 |
21 | export interface Content {
22 | title: string
23 | content: string
24 | url: string
25 | siteKey: string
26 | siteTitle: string
27 | timestamp: number
28 | }
29 |
30 | export interface Storage {
31 | getCategories(): Promise
32 | getCategoryEntries(category: string, page?: number): Promise
33 | getSiteEntries(siteKey: string, page?: number): Promise
34 | countAllEntries(): Promise
35 | countSiteEntries(siteKey: string): Promise
36 | countCategoryEntries(category: string): Promise
37 | getAllEntries(page?: number): Promise
38 | getContent(key: string): Promise
39 | }
40 |
--------------------------------------------------------------------------------
/lib/utils.test.ts:
--------------------------------------------------------------------------------
1 | import test from 'ava'
2 | import { parseLocation } from './utils'
3 |
4 | test('#parseLocation returns category type', (t) => {
5 | t.deepEqual(parseLocation('/categories/Apple'), {
6 | type: 'category',
7 | category: 'Apple'
8 | })
9 | t.deepEqual(parseLocation('/categories/categoryKey'), {
10 | type: 'category',
11 | category: 'categoryKey'
12 | })
13 | })
14 |
15 | test('#parseLocation returns site type', (t) => {
16 | t.deepEqual(parseLocation('/sites/all'), {
17 | type: 'site',
18 | siteKey: 'all'
19 | })
20 | t.deepEqual(parseLocation('/sites/siteKey'), {
21 | type: 'site',
22 | siteKey: 'siteKey'
23 | })
24 | })
25 |
26 | test('#parseLocation returns enry type', (t) => {
27 | t.deepEqual(parseLocation('/sites/all/entries/entryKey'), {
28 | type: 'entry',
29 | entryKey: 'entryKey',
30 | parent: {
31 | type: 'site',
32 | key: 'all'
33 | }
34 | })
35 | t.deepEqual(parseLocation('/sites/siteKey/entries/entryKey'), {
36 | type: 'entry',
37 | entryKey: 'entryKey',
38 | parent: {
39 | type: 'site',
40 | key: 'siteKey'
41 | }
42 | })
43 | t.deepEqual(parseLocation('/categories/categoryKey/entries/entryKey'), {
44 | type: 'entry',
45 | entryKey: 'entryKey',
46 | parent: {
47 | type: 'category',
48 | key: 'categoryKey'
49 | }
50 | })
51 | })
52 |
53 | test('#parseLocation returns null as invalid path', (t) => {
54 | t.is(parseLocation('/sites/all/entries'), null)
55 | t.is(parseLocation('/sites/siteKey/entries/'), null)
56 | t.is(parseLocation('/sites/siteKey/somethingwrong/entryKey'), null)
57 | t.is(parseLocation('/somethingelse/siteKey/entries/entryKey'), null)
58 | t.is(parseLocation('/sites/'), null)
59 | t.is(parseLocation('/categories'), null)
60 | t.is(parseLocation('/somethingelse'), null)
61 | })
62 |
--------------------------------------------------------------------------------
/lib/utils.ts:
--------------------------------------------------------------------------------
1 | import React from 'react'
2 |
3 | import { getStorage } from './storage'
4 | import { Content } from './storage/types'
5 |
6 | export type PageState = 'categories' | 'entries' | 'article'
7 |
8 | export const articleClassName = (pageState: PageState): string => {
9 | switch (pageState) {
10 | case 'article':
11 | return 'block'
12 | default:
13 | return 'hidden md:block'
14 | }
15 | }
16 |
17 | export const entriesClassName = (pageState: PageState): string => {
18 | switch (pageState) {
19 | case 'entries':
20 | return 'md:block'
21 | case 'article':
22 | default:
23 | return 'hidden md:block'
24 | }
25 | }
26 |
27 | export const categoriesClassName = (pageState: PageState): string => {
28 | switch (pageState) {
29 | case 'article':
30 | case 'entries':
31 | return 'hidden md:block'
32 | default:
33 | return 'md:block'
34 | }
35 | }
36 |
37 | export type LocationState =
38 | | {
39 | type: 'category'
40 | category: string
41 | }
42 | | {
43 | type: 'site'
44 | siteKey: string
45 | }
46 | | {
47 | type: 'entry'
48 | entryKey: string
49 | parent: {
50 | type: 'category' | 'site'
51 | key: string
52 | }
53 | }
54 | | null
55 |
56 | export const parseLocation = (url: string): LocationState => {
57 | const parts = url.split('/')
58 | parts.shift()
59 |
60 | /**
61 | * Path structure
62 | *
63 | * - /categories/[name], showing entries in category (categories)
64 | * - /sites/all, showing all entries (sites)
65 | * - /sites/[name], showing specific site entries (sites)
66 | * - /categories/[name]/entries/[entry], showing specific entry (entry)
67 | * - /sites/all/entries/[entry], showing specific entry (entry)
68 | * - /sites/[name]/entries/[entry], showing specific entry (entry)
69 | */
70 | if (![2, 4].includes(parts.length)) return null
71 | if (parts.length === 2) {
72 | if (!parts[1].trim()) return null
73 | switch (parts[0]) {
74 | case 'categories':
75 | return {
76 | type: 'category',
77 | category: parts[1]
78 | }
79 | case 'sites':
80 | return {
81 | type: 'site',
82 | siteKey: parts[1]
83 | }
84 | default:
85 | return null
86 | }
87 | }
88 |
89 | if (!parts[3].trim()) return null
90 | if (!['categories', 'sites'].includes(parts[0])) return null
91 | if (parts[2] !== 'entries') return null
92 | return {
93 | type: 'entry',
94 | entryKey: parts[3],
95 | parent: {
96 | type: parts[0] === 'categories' ? 'category' : 'site',
97 | key: parts[1]
98 | }
99 | }
100 | }
101 |
102 | export const locationController = async (
103 | locationState: LocationState,
104 | basePath: string,
105 | setContent: React.Dispatch>,
106 | setPageState: React.Dispatch>
107 | ) => {
108 | if (!locationState) return null
109 |
110 | const storage = getStorage(basePath)
111 | switch (locationState.type) {
112 | case 'category': {
113 | setContent(null)
114 | setPageState('entries')
115 | return
116 | }
117 | case 'site': {
118 | setContent(null)
119 | setPageState('entries')
120 | return
121 | }
122 | case 'entry': {
123 | const { entryKey } = locationState
124 | const content = await storage.getContent(entryKey)
125 | if (!content) return
126 | setContent(content)
127 | setPageState('article')
128 | return
129 | }
130 | }
131 | }
132 |
--------------------------------------------------------------------------------
/next-env.d.ts:
--------------------------------------------------------------------------------
1 | ///
2 | ///
3 |
4 | // NOTE: This file should not be edited
5 | // see https://nextjs.org/docs/app/api-reference/config/typescript for more information.
6 |
--------------------------------------------------------------------------------
/next.config.ts:
--------------------------------------------------------------------------------
1 | import { NextConfig } from 'next'
2 |
3 | export default async () => {
4 | const core = await import('@actions/core')
5 | const customDomainName = core.getInput('customDomain')
6 | const githubRootName = process.env['GITHUB_REPOSITORY'] || ''
7 | const basePath = customDomainName
8 | ? ''
9 | : (githubRootName.split('/').length > 1 &&
10 | `/${githubRootName.split('/')[1]}`) ||
11 | ''
12 | process.env.NEXT_PUBLIC_BASE_PATH = basePath ?? '/'
13 |
14 | const nextConfig: NextConfig = {
15 | basePath,
16 | output: 'export'
17 | }
18 | return nextConfig
19 | }
20 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "feeds-fetcher",
3 | "version": "3.0.4",
4 | "description": "Websites feed fetcher and static feeds aggregator",
5 | "main": "index.js",
6 | "author": "Maythee Anegboonlap ",
7 | "repository": "github:llun/feeds",
8 | "license": "ISC",
9 | "scripts": {
10 | "dev": "next dev --turbopack",
11 | "build": "next build",
12 | "start": "next start",
13 | "load": "INPUT_OPMLFILE='feeds.opml' node -r @swc-node/register index.ts",
14 | "loadFile": "INPUT_OPMLFILE='feeds.opml' INPUT_STORAGETYPE='files' node -r @swc-node/register index.ts",
15 | "test": "ava"
16 | },
17 | "ava": {
18 | "extensions": [
19 | "ts",
20 | "tsx",
21 | "js"
22 | ],
23 | "failWithoutAssertions": false,
24 | "require": [
25 | "@swc-node/register"
26 | ]
27 | },
28 | "dependencies": {
29 | "@actions/core": "^1.11.1",
30 | "@actions/github": "^6.0.1",
31 | "@octokit/rest": "^22",
32 | "@swc-node/core": "^1.13.3",
33 | "@swc-node/register": "^1.10.10",
34 | "@swc/core": "^1.11.31",
35 | "@tailwindcss/postcss": "^4.1.8",
36 | "@tailwindcss/typography": "^0.5.16",
37 | "@vscode/sqlite3": "^5.1.2",
38 | "autoprefixer": "^10.4.21",
39 | "date-fns": "^4.1.0",
40 | "history": "^5.3.0",
41 | "html-minifier": "^4.0.0",
42 | "html-react-parser": "^5.2.5",
43 | "jsdom": "^26.1.0",
44 | "knex": "^3.1.0",
45 | "lodash": "^4.17.21",
46 | "lucide-react": "^0.511.0",
47 | "next": "^15.3.3",
48 | "next-themes": "^0.4.6",
49 | "node-fetch": "3",
50 | "postcss": "^8.5.3",
51 | "react": "^19.1.0",
52 | "react-dom": "^19.1.0",
53 | "sanitize-html": "^2.17.0",
54 | "shadcn": "2.5.0",
55 | "sql.js-httpvfs": "^0.8.12",
56 | "sqlite3": "^5.1.7",
57 | "tailwindcss": "^4.1.8",
58 | "tailwindcss-cli": "^0.1.2",
59 | "tw-animate-css": "^1.3.3",
60 | "xml2js": "^0.6.2"
61 | },
62 | "devDependencies": {
63 | "@types/html-minifier": "^4.0.5",
64 | "@types/jsdom": "^21.1.7",
65 | "@types/node": "^22.15.29",
66 | "@types/react": "^19.1.6",
67 | "@types/react-dom": "^19.1.5",
68 | "@types/sanitize-html": "^2.16.0",
69 | "@types/sinon": "^17.0.4",
70 | "@types/xml2js": "^0.4.14",
71 | "ava": "^6.3.0",
72 | "sinon": "^20.0.0",
73 | "typescript": "^5.8.3"
74 | },
75 | "resolutions": {
76 | "@octokit/rest": "22.0.0"
77 | },
78 | "packageManager": "yarn@4.8.1"
79 | }
80 |
--------------------------------------------------------------------------------
/postcss.config.mjs:
--------------------------------------------------------------------------------
1 | const config = {
2 | plugins: ['@tailwindcss/postcss']
3 | }
4 |
5 | export default config
6 |
--------------------------------------------------------------------------------
/public/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/llun/feeds/b7a105393a218b7b22fe7693f8c3a7a7e64960d3/public/favicon.ico
--------------------------------------------------------------------------------
/public/logo.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 |
13 |
--------------------------------------------------------------------------------
/public/site.webmanifest:
--------------------------------------------------------------------------------
1 | {
2 | "name": "Feeds Reader",
3 | "short_name": "Feeds",
4 | "icons": [
5 | {
6 | "src": "/favicon.ico",
7 | "sizes": "32x32",
8 | "type": "image/x-icon"
9 | }
10 | ],
11 | "theme_color": "#000000",
12 | "background_color": "#ffffff",
13 | "display": "standalone"
14 | }
15 |
--------------------------------------------------------------------------------
/public/sql-wasm.wasm:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/llun/feeds/b7a105393a218b7b22fe7693f8c3a7a7e64960d3/public/sql-wasm.wasm
--------------------------------------------------------------------------------
/public/vercel.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/readme.md:
--------------------------------------------------------------------------------
1 | # Github Action for building static feed aggregator site
2 |
3 | A GitHub Action that fetches RSS/Atom feeds from an OPML file and builds a static site aggregating all the feed content. This project creates a modern, responsive feed reader as a static site.
4 |
5 | ## Usage
6 |
7 | To use it, create a new repository and add an OPML file named `feeds.opml` with your list of website RSS/Atom feeds. Create a GitHub workflow like the one below to fetch data and store it in a contents branch:
8 |
9 | ```
10 | name: Schedule
11 |
12 | on:
13 | schedule:
14 | - cron: '0 * * * *'
15 |
16 | jobs:
17 | playground:
18 | runs-on: ubuntu-latest
19 | name: Test
20 | steps:
21 | - name: Run Action
22 | uses: llun/feeds@3.0.0
23 | ```
24 |
25 | After this, enable GitHub Pages on the `contents` branch and the content will be available on that page.
26 |
27 | ## Configurations
28 |
29 | This action can be configured to use a custom domain and different types of storage. Here are the available configuration options:
30 |
31 | - `customDomain`: Specifies the custom domain for the feeds site. Required when generating a static site as it's needed to generate the `CNAME` file.
32 | - `branch`: Branch where the static site will be generated. The default value is `contents`. This is the branch you'll need to point the repository's GitHub Pages to.
33 | - `storageType`: **(Default is `files`)** Content storage type, currently supports `files` and `sqlite`.
34 | - `files`: Stores all feed contents in a JSON tree structure
35 | - `sqlite`: Stores content in a SQLite database that the client will download using HTTP chunks
36 | - `opmlFile`: Name of the OPML file containing the list of sites you want to include in your feed site.
37 |
38 | ### Sample Configuration
39 |
40 | ```
41 | name: Schedule
42 |
43 | on:
44 | schedule:
45 | - cron: '0 * * * *'
46 |
47 | jobs:
48 | playground:
49 | runs-on: ubuntu-latest
50 | name: Generate Feeds
51 | steps:
52 | - name: Run Action
53 | uses: llun/feeds@3.0.0
54 | with:
55 | storageType: files
56 | opmlFile: site.opml
57 | branch: public
58 | ```
59 |
60 | ## Sample Sites
61 |
62 | - https://feeds.llun.dev
63 | - https://llun.github.io/feeds/
64 |
65 | ## Sample Repository
66 |
67 | - https://github.com/llunbot/personal-feeds
--------------------------------------------------------------------------------
/tailwind.config.ts:
--------------------------------------------------------------------------------
1 | import type { Config } from 'tailwindcss'
2 | import typography from '@tailwindcss/typography'
3 |
4 | const config: Config = {
5 | content: [
6 | './app/**/*.{js,ts,jsx,tsx}',
7 | './lib/components/**/*.{js,ts,jsx,tsx}',
8 | './lib/components2/**/*.{js,ts,jsx,tsx}',
9 | './lib/page.{js,jsx,ts,tsx}',
10 | './lib/utils.ts'
11 | ],
12 | theme: {
13 | extend: {}
14 | },
15 | plugins: [typography]
16 | }
17 |
18 | export default config
19 |
--------------------------------------------------------------------------------
/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "target": "esnext",
4 | "lib": [
5 | "dom",
6 | "dom.iterable",
7 | "esnext"
8 | ],
9 | "allowJs": true,
10 | "skipLibCheck": true,
11 | "strict": false,
12 | "forceConsistentCasingInFileNames": true,
13 | "noEmit": true,
14 | "incremental": true,
15 | "esModuleInterop": true,
16 | "module": "nodenext",
17 | "moduleResolution": "NodeNext",
18 | "resolveJsonModule": true,
19 | "isolatedModules": true,
20 | "jsx": "preserve",
21 | "plugins": [
22 | {
23 | "name": "next"
24 | }
25 | ]
26 | },
27 | "include": [
28 | "**/*.ts",
29 | "**/*.tsx",
30 | "next-env.d.ts",
31 | ".next/types/**/*.ts"
32 | ],
33 | "exclude": [
34 | "node_modules"
35 | ]
36 | }
37 |
--------------------------------------------------------------------------------