├── .editorconfig ├── .github ├── FUNDING.yml └── workflows │ └── cron.yml ├── .gitignore ├── .vscode └── settings.json ├── LICENSE ├── Makefile ├── README.md ├── adapters ├── api.ts └── github.ts ├── book.toml ├── build-by-source.ts ├── build-by-time.ts ├── build-html.ts ├── build-search.ts ├── build.ts ├── config.yml ├── constant.ts ├── db-meta-init.json ├── db.ts ├── deno.json ├── deps.ts ├── error.ts ├── example ├── books.md ├── data │ ├── 1-raw │ │ ├── EbookFoundation │ │ │ └── free-programming-books │ │ │ │ └── books │ │ │ │ ├── markdownlist_free-programming-books-langs.md │ │ │ │ └── markdownlist_free-programming-books-zh.md │ │ └── heading.md │ ├── 2-formated │ │ └── EbookFoundation │ │ │ └── free-programming-books │ │ │ ├── books │ │ │ └── free-programming-books-zh.json │ │ │ └── readme.json │ ├── data.json │ └── github_awesome_nodejs.json ├── free-for-dev.md ├── mac.md ├── public-apis-simple.md ├── public │ └── EbookFoundation │ │ └── free-programming-books │ │ ├── books │ │ ├── free-programming-books-zh.md │ │ └── free-programming-books-zh │ │ │ ├── feed.json │ │ │ └── index.html │ │ ├── index.html │ │ └── readme.md ├── repo-meta.json └── simple.md ├── fetch-sources.ts ├── format-category.ts ├── format-markdown-item.ts ├── get-git-blame.ts ├── init-db.ts ├── init-items.ts ├── interface.ts ├── lib └── gemoji.js ├── local.kak ├── log.ts ├── main.ts ├── migrations ├── old-meta.json └── to-new-config.ts ├── morsels_config.json ├── package-lock.json ├── parser ├── markdown │ ├── heading.ts │ ├── list.ts │ ├── list_test.ts │ ├── table.ts │ ├── table_test.ts │ └── util.ts └── mod.ts ├── render-markdown.ts ├── render-markdown_test.ts ├── scripts ├── check-404.ts ├── install-mdbook.sh └── install-morsels.sh ├── serve-markdown.ts ├── serve-public.ts ├── static ├── 404.html ├── CNAME ├── android-chrome-192x192.png ├── android-chrome-512x512.png ├── apple-touch-icon.png ├── badge-flat.svg ├── badge.svg ├── browserconfig.xml ├── favicon-16x16.png ├── favicon-32x32.png ├── favicon.ico ├── icon.png ├── icon.svg ├── mstile-150x150.png ├── robots.txt ├── safari-pinned-tab.svg └── site.webmanifest ├── tal.ts ├── templates ├── day.md.mu ├── file-by-day.md.mu ├── index.html.mu ├── root-readme.md.mu ├── search.html.mu └── week.md.mu ├── test-deps.ts ├── util.ts └── util_test.ts /.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | 3 | [config.yml] 4 | indent_style = space 5 | indent_size = 2 6 | trim_trailing_whitespace = true 7 | -------------------------------------------------------------------------------- /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | github: theowenyoung 2 | -------------------------------------------------------------------------------- /.github/workflows/cron.yml: -------------------------------------------------------------------------------- 1 | name: cron tasks 2 | on: 3 | repository_dispatch: 4 | types: [schedule] 5 | workflow_dispatch: 6 | inputs: 7 | args: 8 | default: "" 9 | description: "args to build" 10 | type: string 11 | required: false 12 | push: 13 | branches: 14 | - main 15 | paths-ignore: 16 | - "**.md" 17 | schedule: 18 | - cron: "9 */12 * * *" 19 | jobs: 20 | build: 21 | runs-on: ubuntu-latest 22 | concurrency: build 23 | steps: 24 | - name: Check out repository code 25 | uses: actions/checkout@v3 26 | 27 | - id: install-aws-cli 28 | uses: unfor19/install-aws-cli-action@v1 29 | with: 30 | version: 2.22.35 # default 31 | verbose: false # default 32 | arch: amd64 # allowed values: amd64, arm64 33 | - uses: actions/setup-node@v3 34 | with: 35 | node-version: 22 36 | cache: "npm" 37 | - name: install wrangler 38 | run: npm install -g wrangler 39 | - uses: denoland/setup-deno@v1 40 | with: 41 | deno-version: v1.29.4 42 | - uses: actions/cache@v3 43 | with: 44 | path: | 45 | ~/.deno 46 | ~/.cache/deno 47 | key: ${{ runner.os }}-deno-${{ hashFiles('**/*deps.ts') }} 48 | - run: make prod-load 49 | env: 50 | AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} 51 | AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} 52 | AWS_DEFAULT_REGION: ${{secrets.AWS_DEFAULT_REGION}} 53 | AWS_ENDPOINT: ${{secrets.AWS_ENDPOINT}} 54 | - run: make install 55 | - run: "make prod-build args='${{ github.event.inputs.args }}'" 56 | id: source 57 | continue-on-error: true 58 | env: 59 | AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} 60 | AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} 61 | AWS_DEFAULT_REGION: ${{secrets.AWS_DEFAULT_REGION}} 62 | AWS_ENDPOINT: ${{secrets.AWS_ENDPOINT}} 63 | PUSH: 1 64 | DIST_REPO: ${{ secrets.DIST_REPO }} 65 | PERSONAL_GITHUB_TOKEN: ${{ secrets.PERSONAL_GITHUB_TOKEN }} 66 | - name: upload files 67 | run: make prod-upload 68 | env: 69 | AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} 70 | AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} 71 | AWS_DEFAULT_REGION: ${{secrets.AWS_DEFAULT_REGION}} 72 | AWS_ENDPOINT: ${{secrets.AWS_ENDPOINT}} 73 | - name: upload temp folder to github action for debug 74 | uses: actions/upload-artifact@v4 75 | with: 76 | name: temp 77 | path: temp 78 | if-no-files-found: ignore 79 | - name: throw if build failed 80 | if: steps.source.outcome == 'failure' 81 | run: | 82 | echo "::error::prod-build failed" 83 | exit 1 84 | - name: Publish pages 85 | if: true 86 | run: make prod-publish 87 | env: 88 | CLOUDFLARE_ACCOUNT_ID: ${{secrets.CLOUDFLARE_ACCOUNT_ID}} 89 | CLOUDFLARE_API_TOKEN: ${{secrets.CLOUDFLARE_API_TOKEN}} 90 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /prod-current/ 2 | deno.lock 3 | *.zip 4 | /bin/ 5 | /book/ 6 | /current/ 7 | /temp/ 8 | /cache/ 9 | /prod-temp/ 10 | /prod-*/ 11 | /public/ 12 | /prod-public/ 13 | /db/ 14 | temp-* 15 | # Logs 16 | logs 17 | *.log 18 | npm-debug.log* 19 | yarn-debug.log* 20 | yarn-error.log* 21 | lerna-debug.log* 22 | .pnpm-debug.log* 23 | 24 | # Diagnostic reports (https://nodejs.org/api/report.html) 25 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 26 | 27 | # Runtime data 28 | pids 29 | *.pid 30 | *.seed 31 | *.pid.lock 32 | 33 | # Directory for instrumented libs generated by jscoverage/JSCover 34 | lib-cov 35 | 36 | # Coverage directory used by tools like istanbul 37 | coverage 38 | *.lcov 39 | 40 | # nyc test coverage 41 | .nyc_output 42 | 43 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 44 | .grunt 45 | 46 | # Bower dependency directory (https://bower.io/) 47 | bower_components 48 | 49 | # node-waf configuration 50 | .lock-wscript 51 | 52 | # Compiled binary addons (https://nodejs.org/api/addons.html) 53 | build/Release 54 | 55 | # Dependency directories 56 | node_modules/ 57 | jspm_packages/ 58 | 59 | # Snowpack dependency directory (https://snowpack.dev/) 60 | web_modules/ 61 | 62 | # TypeScript cache 63 | *.tsbuildinfo 64 | 65 | # Optional npm cache directory 66 | .npm 67 | 68 | # Optional eslint cache 69 | .eslintcache 70 | 71 | # Optional stylelint cache 72 | .stylelintcache 73 | 74 | # Microbundle cache 75 | .rpt2_cache/ 76 | .rts2_cache_cjs/ 77 | .rts2_cache_es/ 78 | .rts2_cache_umd/ 79 | 80 | # Optional REPL history 81 | .node_repl_history 82 | 83 | # Output of 'npm pack' 84 | *.tgz 85 | 86 | # Yarn Integrity file 87 | .yarn-integrity 88 | 89 | # dotenv environment variable files 90 | .env 91 | .env.development.local 92 | .env.test.local 93 | .env.production.local 94 | .env.local 95 | 96 | # parcel-bundler cache (https://parceljs.org/) 97 | .cache 98 | .parcel-cache 99 | 100 | # Next.js build output 101 | .next 102 | out 103 | 104 | # Nuxt.js build / generate output 105 | .nuxt 106 | dist 107 | 108 | # Gatsby files 109 | .cache/ 110 | # Comment in the public line in if your project uses Gatsby and not Next.js 111 | # https://nextjs.org/blog/next-9-1#public-directory-support 112 | # public 113 | 114 | # vuepress build output 115 | .vuepress/dist 116 | 117 | # vuepress v2.x temp and cache directory 118 | .temp 119 | .cache 120 | 121 | # Docusaurus cache and generated files 122 | .docusaurus 123 | 124 | # Serverless directories 125 | .serverless/ 126 | 127 | # FuseBox cache 128 | .fusebox/ 129 | 130 | # DynamoDB Local files 131 | .dynamodb/ 132 | 133 | # TernJS port file 134 | .tern-port 135 | 136 | # Stores VSCode versions used for testing VSCode extensions 137 | .vscode-test 138 | 139 | # yarn v2 140 | .yarn/cache 141 | .yarn/unplugged 142 | .yarn/build-state.yml 143 | .yarn/install-state.gz 144 | .pnp.* 145 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "deno.enable": true, 3 | "deno.unstable": true 4 | } 5 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | ifneq (,$(wildcard ./.env)) 2 | include .env 3 | export 4 | endif 5 | 6 | .Phony: start 7 | start: 8 | deno run -A tal.ts "ripienaar/free-for-dev" 9 | 10 | .Phony: startall 11 | startall: 12 | deno run -A tal.ts 13 | 14 | .Phony: build 15 | build: 16 | deno run -A tal.ts --html --no-serve 17 | 18 | .Phony: prod-start 19 | prod-start: 20 | FORCE=1 PROD=1 deno run -A tal.ts ${args} 21 | .Phony: prod-builddemo 22 | prod-builddemo: 23 | FORCE=1 PROD=1 deno run -A tal.ts --no-fetch --html ripienaar/free-for-dev ${args} 24 | .Phony: prod-buildindex 25 | prod-buildindex: 26 | PROD=1 deno run -A tal.ts --no-fetch --html ripienaar/free-for-dev 27 | 28 | .Phony: prod-buildsource 29 | prod-buildsource: 30 | FORCE=1 PROD=1 deno run -A tal.ts --no-fetch --html ${source} 31 | 32 | .Phony: prod-build 33 | prod-build: 34 | PROD=1 deno run -A tal.ts --html --no-serve ${args} && make prod-buildsearch 35 | 36 | .Phony: prod-run 37 | prod-run: 38 | FORCE=1 PROD=1 deno run -A tal.ts --html ${args} 39 | 40 | 41 | .Phony: startsource 42 | startsource: 43 | deno run -A tal.ts --html ${source} 44 | 45 | .Phony: prod-startsource 46 | prod-startsource: 47 | PROD=1 deno run -A tal.ts ${source} 48 | 49 | .Phony: all 50 | all: 51 | FORCE=1 deno run -A tal.ts --html "ripienaar/free-for-dev" 52 | 53 | .Phony: allall 54 | allall: 55 | FORCE=1 deno run -A tal.ts --html 56 | .Phony: startallforce 57 | startallforce: 58 | deno run -A tal.ts --force 59 | .Phony: fetch 60 | fetch: 61 | deno run -A tal.ts --no-markdown --no-serve "ripienaar/free-for-dev" --force 62 | .Phony: fetchall 63 | fetchall: 64 | deno run -A tal.ts --no-markdown --no-serve 65 | .Phony: fetchsource 66 | fetchsource: 67 | deno run -A tal.ts --no-markdown --no-serve ${source} 68 | 69 | .Phony: buildmarkdown 70 | buildmarkdown: 71 | FORCE=1 deno run -A tal.ts --no-fetch --no-serve "ripienaar/free-for-dev" 72 | .Phony: buildsource 73 | buildsource: 74 | FORCE=1 deno run -A tal.ts --no-serve --no-fetch ${source} 75 | .Phony: buildmarkdownall 76 | buildmarkdownall: 77 | FORCE=1 deno run -A tal.ts --no-fetch --no-serve 78 | 79 | .Phony: serve 80 | serve: 81 | deno run -A --watch=tal.ts,templates/ tal.ts --no-fetch --no-markdown 82 | 83 | .Phony: run 84 | run: 85 | LIMIT=3 FORCE=1 deno run -A tal.ts --no-fetch --html 86 | 87 | .Phony: siteall 88 | siteall: 89 | FORCE=1 deno run -A tal.ts --no-fetch --html 90 | .Phony: initdb 91 | initdb: 92 | [[ ! -d /db/meta.json ]] && mkdir -p ./db && cat db-meta-init.json > ./db/meta.json && deno run -A init-db.ts 93 | 94 | .Phony: prod-initdb 95 | prod-initdb: 96 | [[ ! -d /prod-db/meta.json ]] && mkdir -p ./prod-db && cat db-meta-init.json > ./prod-db/meta.json && PROD=1 deno run -A init-db.ts 97 | 98 | .Phony: clean 99 | clean: 100 | rm -rf ./db rm -rf ./public && rm -rf ./dist && make initdb 101 | 102 | 103 | .Phony: cleanall 104 | cleanall: 105 | rm -rf ./db rm -rf ./public && rm -rf ./dist && rm -rf ./prod-db && rm -rf ./prod-dist && rm -rf ./prod-public && make initdb && make prod-initdb 106 | .Phony: push 107 | push: 108 | cd -- ./dist/repo && git add . && git commit -m "update" && git push 109 | 110 | .Phony: testbooks 111 | testbooks: 112 | deno test -A parsers/markdownlist_test.ts --filter="#2" 113 | .Phony: buildsite 114 | buildsite: 115 | FORCE=1 deno run -A tal.ts --no-fetch --html "ripienaar/free-for-dev" 116 | .Phony: buildsitesource 117 | buildsitesource: 118 | FORCE=1 deno run -A tal.ts --no-fetch --html ${source} 119 | .Phony: buildsiteall 120 | buildsiteall: 121 | FORCE=1 deno run -A tal.ts --no-fetch --html 122 | 123 | .Phony: prod-buildsiteall 124 | prod-buildsiteall: 125 | PROD=1 deno run -A tal.ts --no-fetch --html --no-serve 126 | .Phony: buildhtmlall 127 | buildhtmlall: 128 | deno run -A tal.ts --no-fetch --no-markdown --html --no-serve 129 | 130 | .Phony: servepublic 131 | servepublic: 132 | deno run -A https://deno.land/std@0.159.0/http/file_server.ts ./public -p 8000 133 | 134 | 135 | .Phony: install 136 | install: 137 | ./scripts/install-morsels.sh 138 | 139 | .Phony: servebook 140 | servebook: 141 | ./bin/mdbook serve --port 8000 142 | 143 | .Phony: buildbook 144 | buildbook: 145 | ./bin/mdbook build 146 | .Phony: publish 147 | publish: 148 | wrangler pages publish db/public --project-name trackawesomelist 149 | 150 | .Phony: prod-publish 151 | prod-publish: 152 | wrangler pages publish prod-db/public --project-name trackawesomelist 153 | 154 | .Phony: prod-upload 155 | prod-upload: 156 | make prod-zipdb && aws s3 cp ./prod-db.zip s3://trackawesomelist/prod-db.zip --endpoint-url $(AWS_ENDPOINT) 157 | 158 | .Phony: prod-load 159 | prod-load: 160 | aws s3 cp s3://trackawesomelist/prod-db.zip ./prod-db.zip --endpoint-url $(AWS_ENDPOINT) && make prod-unzipdb 161 | 162 | .Phony: prod-zipdb 163 | prod-zipdb: 164 | zip -r -q -FS prod-db.zip ./prod-db -x "*/.*" 165 | 166 | .Phony: prod-unzipdb 167 | prod-unzipdb: 168 | unzip -q -o prod-db.zip 169 | 170 | .Phony: prod-dbclean 171 | prod-dbclean: 172 | rm -rf ./prod-db/public && rm -rf ./prod-db/repos && rm ./prod-db/index.json && rm ./prod-db/meta.json && make prod-initdb 173 | 174 | .Phony: buildsearch 175 | buildsearch: 176 | ./bin/morsels ./db/public ./temp-morsels -c morsels_config.json && deno run -A ./build-search.ts 177 | .Phony: prod-buildsearch 178 | prod-buildsearch: 179 | ./bin/morsels ./prod-db/public ./temp-morsels -c morsels_config.json && PROD=1 deno run -A ./build-search.ts 180 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Track Awesome List Source 2 | 3 | This repo is for generating [trackawesomelist](https://github.com/trackawesomelist/trackawesomelist), runing with Deno, `json` files as database. 4 | 5 | > Documentation is being improved. 6 | 7 | ## Dev 8 | 9 | 1. Install [Deno](https://deno.land/manual@v1.26.2/getting_started/installation) 10 | 2. `git clone git@github.com:trackawesomelist/trackawesomelist-source.git` 11 | 3. Add github awesome repo to `config.yml` -> `sources` 12 | 4. `make startsource source=owner/repo` 13 | 14 | Open 15 | 16 | ### Rebuild Single Repo 17 | 18 | `make prod-build args="--rebuild xxx/repo"` 19 | -------------------------------------------------------------------------------- /adapters/api.ts: -------------------------------------------------------------------------------- 1 | import { RepoMeta, Source } from "../interface.ts"; 2 | export default class API { 3 | source: Source; 4 | constructor(source: Source) { 5 | this.source = source; 6 | } 7 | getConent(_filePath: string): Promise { 8 | return Promise.reject("not implemented"); 9 | } 10 | getRepoMeta(): Promise { 11 | return Promise.reject("not implemented"); 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /adapters/github.ts: -------------------------------------------------------------------------------- 1 | import API from "./api.ts"; 2 | import { RepoMeta, RepoMetaOverride, Source } from "../interface.ts"; 3 | import { base64 } from "../deps.ts"; 4 | import { 5 | got, 6 | gotWithCache, 7 | gotWithDbCache, 8 | isUseCache, 9 | readTextFile, 10 | } from "../util.ts"; 11 | 12 | export default class github extends API { 13 | repo: string; 14 | headers: Headers; 15 | apiPrefix = `https://api.github.com`; 16 | constructor(source: Source) { 17 | super(source); 18 | const githubToken = Deno.env.get("PERSONAL_GITHUB_TOKEN"); 19 | if (!githubToken) { 20 | throw new Error("PERSONAL_GITHUB_TOKEN is not set"); 21 | } 22 | const headerAuthorization = `token ${githubToken}`; 23 | this.headers = new Headers({ 24 | Authorization: headerAuthorization, 25 | }); 26 | const urlObj = new URL(source.url); 27 | this.repo = urlObj.pathname.slice(1); 28 | if (this.repo.endsWith(".git")) { 29 | this.repo = this.repo.slice(0, -4); 30 | } 31 | if (this.repo.endsWith("/")) { 32 | this.repo = this.repo.slice(0, -1); 33 | } 34 | } 35 | getCloneUrl(): string { 36 | return `https://github.com/${this.repo}.git`; 37 | } 38 | async getConent(filePath: string, branch?: string): Promise { 39 | const baseurl = `${this.apiPrefix}/repos/${this.repo}/contents/${filePath}`; 40 | const baseUrlObj = new URL(baseurl); 41 | if (branch) { 42 | baseUrlObj.searchParams.set("ref", branch); 43 | } 44 | const url = baseUrlObj.toString(); 45 | 46 | let result; 47 | if (isUseCache()) { 48 | result = await gotWithCache( 49 | url, 50 | { 51 | headers: this.headers, 52 | }, 53 | { 54 | expires: 4 * 60 * 60 * 1000, 55 | }, 56 | ); 57 | } else { 58 | result = await got( 59 | url, 60 | { 61 | headers: this.headers, 62 | }, 63 | ); 64 | } 65 | 66 | const data = JSON.parse(result); 67 | const content = base64.decode(data.content); 68 | const finalContent = new TextDecoder().decode(content); 69 | return finalContent; 70 | } 71 | async getRepoMeta(overrieds?: RepoMetaOverride): Promise { 72 | const url = `${this.apiPrefix}/repos/${this.repo}`; 73 | const json = await gotWithDbCache( 74 | url, 75 | { 76 | headers: this.headers, 77 | }, 78 | ); 79 | const data = JSON.parse(json); 80 | 81 | let repoMeta: RepoMeta = { 82 | default_branch: data.default_branch, 83 | name: data.name, 84 | description: data.description, 85 | url: data.html_url, 86 | language: data.language, 87 | stargazers_count: data.stargazers_count, 88 | subscribers_count: data.subscribers_count, 89 | forks_count: data.forks_count, 90 | tags: data.topics, 91 | updated_at: data.pushed_at, 92 | created_at: data.created_at, 93 | checked_at: new Date().toISOString(), 94 | }; 95 | // add overrides 96 | if (overrieds) { 97 | repoMeta = Object.assign(repoMeta, overrieds); 98 | } 99 | return repoMeta; 100 | } 101 | } 102 | -------------------------------------------------------------------------------- /book.toml: -------------------------------------------------------------------------------- 1 | [book] 2 | title = "Track Awesome List" 3 | description = "Track Awesome List" 4 | src = "dist/content" 5 | language = "zh" 6 | authors = ["Owen Young"] 7 | 8 | [build] 9 | create-missing = true 10 | 11 | [preprocessor.morsels] 12 | command = "mdbook-morsels" 13 | 14 | [preprocessor.toc] 15 | command = "mdbook-toc" 16 | renderer = ["html"] 17 | 18 | [output.html] 19 | git-repository-url = "https://github.com/theowenyoung/blog" 20 | edit-url-template = "https://github.com/theowenyoung/blog/edit/main/{path}" 21 | 22 | [output.html.fold] 23 | enable = true # whether or not to enable section folding 24 | level = 1 25 | [output.html.search] 26 | enable = false 27 | [output.html.print] 28 | enable = false 29 | 30 | # Plugin configuration options (optional) 31 | [output.morsels] 32 | # Relative path to a Morsels indexer configuration file from the project directory. 33 | # The config file will also automatically be created here if it dosen't exist. 34 | config = "morsels_config.json" 35 | -------------------------------------------------------------------------------- /build-by-source.ts: -------------------------------------------------------------------------------- 1 | import { 2 | CSS, 3 | groupBy, 4 | jsonfeedToAtom, 5 | mustache, 6 | path, 7 | render, 8 | } from "./deps.ts"; 9 | import { 10 | BuildOptions, 11 | BuiltMarkdownInfo, 12 | DayInfo, 13 | Feed, 14 | FeedInfo, 15 | FeedItem, 16 | FileInfo, 17 | Item, 18 | Nav, 19 | RunOptions, 20 | WeekOfYear, 21 | } from "./interface.ts"; 22 | import { 23 | CONTENT_DIR, 24 | FEED_NAV, 25 | HOME_NAV, 26 | INDEX_HTML_PATH, 27 | INDEX_MARKDOWN_PATH, 28 | SEARCH_NAV, 29 | SPONSOR_NAV, 30 | SPONSOR_URL, 31 | SUBSCRIBE_NAV, 32 | SUBSCRIPTION_URL, 33 | } from "./constant.ts"; 34 | import { 35 | formatHumanTime, 36 | formatNumber, 37 | getBaseFeed, 38 | getDistRepoContentPath, 39 | getDomain, 40 | getPublicPath, 41 | getRepoHTMLURL, 42 | nav1ToHtml, 43 | nav1ToMarkdown, 44 | nav2ToHtml, 45 | nav2ToMarkdown, 46 | parseDayInfo, 47 | parseWeekInfo, 48 | pathnameToFeedUrl, 49 | pathnameToFilePath, 50 | pathnameToOverviewFilePath, 51 | pathnameToUrl, 52 | pathnameToWeekFilePath, 53 | readTextFile, 54 | relativedFilesToHtml, 55 | relativedFilesToMarkdown, 56 | slugy, 57 | startDateOfWeek, 58 | writeJSONFile, 59 | writeTextFile, 60 | } from "./util.ts"; 61 | import log from "./log.ts"; 62 | import { getFile, getHtmlFile, getItems } from "./db.ts"; 63 | import renderMarkdown from "./render-markdown.ts"; 64 | let htmlIndexTemplateContent = ""; 65 | export default async function main( 66 | fileInfo: FileInfo, 67 | runOptions: RunOptions, 68 | buildOptions: BuildOptions, 69 | ): Promise { 70 | const config = runOptions.config; 71 | const siteConfig = config.site; 72 | const dbMeta = buildOptions.dbMeta; 73 | const dbSources = dbMeta.sources; 74 | const sourceConfig = fileInfo.sourceConfig; 75 | const sourceCategory = sourceConfig.category; 76 | const sourceMeta = fileInfo.sourceMeta; 77 | const filepath = fileInfo.filepath; 78 | const fileConfig = sourceConfig.files[filepath]; 79 | const repoMeta = sourceMeta.meta; 80 | const sourceIdentifier = sourceConfig.identifier; 81 | const dbSource = dbSources[sourceIdentifier]; 82 | const originalFilepath = fileConfig.filepath; 83 | let commitMessage = ``; 84 | const sourceFileConfig = fileConfig; 85 | // get items 86 | 87 | const items = await getItems(sourceIdentifier, originalFilepath); 88 | // const getDbFinishTime = Date.now(); 89 | // log.debug(`get db items cost ${getDbFinishTime - startTime}ms`); 90 | const dbFileMeta = dbSource.files[originalFilepath]; 91 | const domain = getDomain(); 92 | const isBuildMarkdown = runOptions.markdown; 93 | const isBuildHtml = runOptions.html; 94 | if (!isBuildMarkdown && !isBuildHtml) { 95 | return { 96 | commitMessage: "", 97 | }; 98 | } 99 | if (!htmlIndexTemplateContent) { 100 | htmlIndexTemplateContent = await readTextFile("./templates/index.html.mu"); 101 | } 102 | let relativeFolder = sourceIdentifier; 103 | if (!sourceFileConfig.index) { 104 | // to README.md path 105 | const filepathExtname = path.extname(originalFilepath); 106 | const originalFilepathWithoutExt = originalFilepath.slice( 107 | 0, 108 | -filepathExtname.length, 109 | ); 110 | relativeFolder = path.join(relativeFolder, originalFilepathWithoutExt); 111 | } 112 | const baseFeed = getBaseFeed(); 113 | for (let i = 0; i < 2; i++) { 114 | const buildMarkdownStartTime = Date.now(); 115 | const isDay = i === 0; 116 | const nav1: Nav[] = [ 117 | { 118 | name: HOME_NAV, 119 | markdown_url: "/" + INDEX_MARKDOWN_PATH, 120 | url: "/", 121 | }, 122 | { 123 | name: SEARCH_NAV, 124 | url: pathnameToUrl("/search/"), 125 | }, 126 | { 127 | name: FEED_NAV, 128 | url: pathnameToFeedUrl(fileConfig.pathname, isDay), 129 | }, 130 | { 131 | name: SUBSCRIBE_NAV, 132 | url: SUBSCRIPTION_URL, 133 | }, 134 | { 135 | name: SPONSOR_NAV, 136 | url: SPONSOR_URL, 137 | }, 138 | 139 | { 140 | name: `😺 ${sourceIdentifier}`, 141 | url: sourceFileConfig.index ? repoMeta.url : getRepoHTMLURL( 142 | repoMeta.url, 143 | repoMeta.default_branch, 144 | originalFilepath, 145 | ), 146 | }, 147 | { 148 | name: `⭐ ${formatNumber(repoMeta.stargazers_count)}`, 149 | }, 150 | { 151 | name: `🏷️ ${sourceCategory}`, 152 | }, 153 | ]; 154 | 155 | const nav2: Nav[] = [ 156 | { 157 | name: "Daily", 158 | markdown_url: pathnameToFilePath(fileConfig.pathname), 159 | url: fileConfig.pathname, 160 | active: i === 0, 161 | }, 162 | { 163 | name: "Weekly", 164 | 165 | markdown_url: pathnameToWeekFilePath(fileConfig.pathname), 166 | url: fileConfig.pathname + "week/", 167 | active: i === 1, 168 | }, 169 | { 170 | name: "Overview", 171 | markdown_url: pathnameToOverviewFilePath(fileConfig.pathname), 172 | url: fileConfig.pathname + "readme/", 173 | active: i === 2, 174 | }, 175 | ]; 176 | 177 | let relatedFiles: Nav[] = []; 178 | if (sourceFileConfig.index && Object.keys(sourceConfig.files).length > 1) { 179 | const files = sourceConfig.files; 180 | const fileKeys = Object.keys(files).filter((key) => { 181 | return key !== originalFilepath; 182 | }); 183 | relatedFiles = fileKeys.map((fileKey) => { 184 | const file = files[fileKey]; 185 | return { 186 | name: file.name, 187 | markdown_url: isDay 188 | ? pathnameToFilePath(file.pathname) 189 | : pathnameToWeekFilePath(file.pathname), 190 | url: isDay ? file.pathname : file.pathname + "week/", 191 | }; 192 | }); 193 | } 194 | 195 | const feedTitle = `Track ${fileConfig.name} Updates ${ 196 | isDay ? "Daily" : "Weekly" 197 | }`; 198 | const feedDescription = repoMeta.description; 199 | const groups = groupBy( 200 | items, 201 | isDay ? "updated_day" : "updated_week", 202 | ) as Record< 203 | string, 204 | Item[] 205 | >; 206 | const groupKeys = Object.keys(groups); 207 | // sort 208 | groupKeys.sort((a: string, b: string) => { 209 | if (isDay) { 210 | return parseDayInfo(Number(b)).date.getTime() - 211 | parseDayInfo(Number(a)).date.getTime(); 212 | } else { 213 | return parseWeekInfo(Number(b)).date.getTime() - 214 | parseWeekInfo(Number(a)).date.getTime(); 215 | } 216 | }); 217 | 218 | const dailyRelativeFolder = isDay 219 | ? relativeFolder 220 | : path.join(relativeFolder, `week`); 221 | 222 | let feedItems: FeedItem[] = groupKeys.map((key) => { 223 | const groupItems = groups[key]; 224 | const categoryGroup = groupBy(groupItems, "category") as Record< 225 | string, 226 | Item[] 227 | >; 228 | let groupMarkdown = ""; 229 | let groupHtml = ""; 230 | let summary = ""; 231 | const categoryKeys: string[] = Object.keys(categoryGroup); 232 | const today = new Date(); 233 | const tomorrow = new Date(today); 234 | tomorrow.setDate(tomorrow.getDate() + 1); 235 | let datePublished: Date = tomorrow; 236 | let dateModified: Date = new Date(0); 237 | let total = 0; 238 | categoryKeys.forEach((key: string) => { 239 | const categoryItem = categoryGroup[key][0]; 240 | if (key) { 241 | groupMarkdown += `\n\n### ${key}\n`; 242 | groupHtml += `

${categoryItem.category_html}

`; 243 | } else { 244 | groupMarkdown += `\n`; 245 | } 246 | categoryGroup[key].forEach((item) => { 247 | total++; 248 | groupMarkdown += `\n${item.markdown}`; 249 | groupHtml += `\n${item.html}`; 250 | const itemUpdatedAt = new Date(item.updated_at); 251 | if (itemUpdatedAt.getTime() > dateModified.getTime()) { 252 | dateModified = itemUpdatedAt; 253 | } 254 | if (itemUpdatedAt.getTime() < datePublished.getTime()) { 255 | datePublished = itemUpdatedAt; 256 | } 257 | }); 258 | }); 259 | let dayInfo: DayInfo | WeekOfYear; 260 | if (isDay) { 261 | dayInfo = parseDayInfo(Number(key)); 262 | } else { 263 | dayInfo = parseWeekInfo(Number(key)); 264 | } 265 | summary = `${total} awesome projects updated on ${dayInfo.name}`; 266 | const slug = dayInfo.path + "/"; 267 | const itemUrl = `${domain}/${dayInfo.path}/`; 268 | const url = `${domain}/${slug}`; 269 | const feedItem: FeedItem = { 270 | id: itemUrl, 271 | title: `${fileConfig.name} Updates on ${dayInfo.name}`, 272 | _short_title: dayInfo.name, 273 | _slug: slug, 274 | summary, 275 | _filepath: pathnameToFilePath("/" + slug), 276 | url: itemUrl, 277 | date_published: datePublished.toISOString(), 278 | date_modified: dateModified.toISOString(), 279 | content_text: groupMarkdown, 280 | content_html: groupHtml, 281 | }; 282 | return feedItem; 283 | }); 284 | 285 | // sort feedItems by date published 286 | feedItems.sort((a, b) => { 287 | const aDate = new Date(a.date_published); 288 | const bDate = new Date(b.date_published); 289 | return bDate.getTime() - aDate.getTime(); 290 | }); 291 | 292 | const feedSeoTitle = 293 | `Track ${fileConfig.name} (${sourceIdentifier}) Updates ${ 294 | isDay ? "Daily" : "Weekly" 295 | }`; 296 | const feedInfo: FeedInfo = { 297 | ...baseFeed, 298 | title: feedTitle, 299 | _seo_title: `${feedSeoTitle} - ${siteConfig.title}`, 300 | _site_title: siteConfig.title, 301 | description: repoMeta.description || "", 302 | home_page_url: `${domain}/${dailyRelativeFolder}/`, 303 | feed_url: `${domain}/${dailyRelativeFolder}/feed.json`, 304 | }; 305 | const feed: Feed = { 306 | ...feedInfo, 307 | items: feedItems, 308 | }; 309 | const markdownDoc = `# ${feed.title}${ 310 | feed.description ? `\n\n${feed.description}` : "" 311 | } 312 | 313 | ${nav1ToMarkdown(nav1)} 314 | 315 | ${nav2ToMarkdown(nav2)}${relativedFilesToMarkdown(relatedFiles)}${ 316 | feedItems.map((item) => { 317 | return `\n\n## [${item._short_title}](/${CONTENT_DIR}/${item._slug}${INDEX_MARKDOWN_PATH})${item.content_text}`; 318 | }).join("") 319 | }`; 320 | if (isBuildMarkdown) { 321 | const markdownDistPath = path.join( 322 | getDistRepoContentPath(), 323 | dailyRelativeFolder, 324 | INDEX_MARKDOWN_PATH, 325 | ); 326 | await writeTextFile(markdownDistPath, markdownDoc); 327 | const writeMarkdownTime = Date.now(); 328 | log.debug( 329 | `build ${markdownDistPath} success, cost ${ 330 | writeMarkdownTime - buildMarkdownStartTime 331 | }ms`, 332 | ); 333 | } 334 | // build html 335 | if (isBuildHtml) { 336 | // add body, css to feed 337 | // const body = renderMarkdown(markdownDoc); 338 | 339 | const body = `

${feed.title}

340 | ${feed.description ? "

" + feed.description + "

" : ""} 341 |

${nav1ToHtml(nav1)}

342 |

${nav2ToHtml(nav2)}

343 | ${relativedFilesToHtml(relatedFiles)} 344 | ${ 345 | feedItems.map((item) => { 346 | return `

${item._short_title}

${item.content_html}`; 347 | }).join("") 348 | }`; 349 | const htmlDoc = mustache.render(htmlIndexTemplateContent, { 350 | ...feedInfo, 351 | body, 352 | CSS, 353 | }); 354 | const htmlDistPath = path.join( 355 | getPublicPath(), 356 | dailyRelativeFolder, 357 | INDEX_HTML_PATH, 358 | ); 359 | await writeTextFile(htmlDistPath, htmlDoc); 360 | log.debug(`build ${htmlDistPath} success`); 361 | 362 | // build feed json 363 | const feedJsonDistPath = path.join( 364 | getPublicPath(), 365 | dailyRelativeFolder, 366 | "feed.json", 367 | ); 368 | // remote the current day feed, cause there is maybe some new items 369 | 370 | if (isDay) { 371 | // today start 372 | const today = new Date(); 373 | const todayStart = new Date( 374 | today.getUTCFullYear(), 375 | today.getUTCMonth(), 376 | today.getUTCDate(), 377 | ); 378 | const todayStartTimestamp = todayStart.getTime(); 379 | feedItems = feedItems.filter((item) => { 380 | const itemDate = new Date(item.date_published); 381 | return itemDate.getTime() < todayStartTimestamp; 382 | }); 383 | } else { 384 | // week 385 | // get week start date 386 | const startWeekDate = startDateOfWeek(new Date()); 387 | const startWeekDateTimestamp = startWeekDate.getTime(); 388 | feedItems = feedItems.filter((item) => { 389 | const itemDate = new Date(item.date_published); 390 | return itemDate.getTime() < startWeekDateTimestamp; 391 | }); 392 | } 393 | feed.items = feedItems; 394 | 395 | await writeJSONFile(feedJsonDistPath, feed); 396 | // build rss 397 | const rssFeed = { ...feed }; 398 | rssFeed.items = rssFeed.items.map(({ content_text: _, ...rest }) => rest); 399 | // @ts-ignore: node modules 400 | const feedOutput = jsonfeedToAtom(rssFeed, { 401 | language: "en", 402 | }); 403 | const rssDistPath = path.join( 404 | getPublicPath(), 405 | dailyRelativeFolder, 406 | "rss.xml", 407 | ); 408 | await writeTextFile(rssDistPath, feedOutput); 409 | } 410 | } 411 | 412 | // build overview markdown 413 | // first get readme content 414 | 415 | const buildOverviewMarkdownStartTime = Date.now(); 416 | const readmeContent = await getFile(sourceIdentifier, filepath); 417 | 418 | const overviewMarkdownPath = path.join( 419 | getDistRepoContentPath(), 420 | relativeFolder, 421 | "readme", 422 | INDEX_MARKDOWN_PATH, 423 | ); 424 | const overviewTitle = `${fileConfig.name} Overview`; 425 | const nav1: Nav[] = [ 426 | { 427 | name: HOME_NAV, 428 | markdown_url: "/" + INDEX_MARKDOWN_PATH, 429 | url: "/", 430 | }, 431 | { 432 | name: FEED_NAV, 433 | url: pathnameToFeedUrl(fileConfig.pathname, true), 434 | }, 435 | { 436 | name: SUBSCRIBE_NAV, 437 | url: SUBSCRIPTION_URL, 438 | }, 439 | { 440 | name: SPONSOR_NAV, 441 | url: SPONSOR_URL, 442 | }, 443 | { 444 | name: `😺 ${sourceIdentifier}`, 445 | url: sourceFileConfig.index ? repoMeta.url : getRepoHTMLURL( 446 | repoMeta.url, 447 | repoMeta.default_branch, 448 | originalFilepath, 449 | ), 450 | }, 451 | { 452 | name: `⭐ ${formatNumber(repoMeta.stargazers_count)}`, 453 | }, 454 | { 455 | name: `🏷️ ${sourceCategory}`, 456 | }, 457 | ]; 458 | 459 | const nav2: Nav[] = [ 460 | { 461 | name: "Daily", 462 | markdown_url: pathnameToFilePath(fileConfig.pathname), 463 | url: fileConfig.pathname, 464 | }, 465 | { 466 | name: "Weekly", 467 | 468 | markdown_url: pathnameToWeekFilePath(fileConfig.pathname), 469 | url: fileConfig.pathname + "week/", 470 | }, 471 | { 472 | name: "Overview", 473 | markdown_url: pathnameToOverviewFilePath(fileConfig.pathname), 474 | url: fileConfig.pathname + "readme/", 475 | active: true, 476 | }, 477 | ]; 478 | 479 | const readmeRendered = `# ${overviewTitle} 480 | 481 | ${repoMeta.description} 482 | 483 | ${nav1ToMarkdown(nav1)} 484 | 485 | ${nav2ToMarkdown(nav2)} 486 | 487 | --- 488 | 489 | ${readmeContent} 490 | `; 491 | await writeTextFile(overviewMarkdownPath, readmeRendered); 492 | const buildOverviewMarkdownEndTime = Date.now(); 493 | log.debug( 494 | `build ${overviewMarkdownPath} success, cost ${ 495 | buildOverviewMarkdownEndTime - buildOverviewMarkdownStartTime 496 | }ms`, 497 | ); 498 | if (isBuildHtml) { 499 | const readmeHtmlContent = await getHtmlFile(sourceIdentifier, filepath); 500 | // add body, css to feed 501 | // const body = renderMarkdown(readmeRendered); 502 | const body = `

${overviewTitle}

503 |

${repoMeta.description}

504 |

${nav1ToHtml(nav1)}

505 |

${nav2ToHtml(nav2)}

506 | ${readmeHtmlContent} 507 | `; 508 | const overviewSeoTitle = 509 | `${fileConfig.name} (${sourceIdentifier}) Overview`; 510 | const overviewFeedInfo: FeedInfo = { 511 | ...baseFeed, 512 | title: overviewTitle, 513 | _site_title: siteConfig.title, 514 | _seo_title: `${overviewSeoTitle} - ${siteConfig.title}`, 515 | description: repoMeta.description, 516 | home_page_url: `${domain}/${relativeFolder}/readme/`, 517 | feed_url: `${domain}/${relativeFolder}/feed.json`, 518 | }; 519 | const htmlDoc = mustache.render(htmlIndexTemplateContent, { 520 | ...overviewFeedInfo, 521 | body: body, 522 | CSS, 523 | }); 524 | const htmlDistPath = path.join( 525 | getPublicPath(), 526 | relativeFolder, 527 | "readme", 528 | INDEX_HTML_PATH, 529 | ); 530 | await writeTextFile(htmlDistPath, htmlDoc); 531 | log.debug(`build ${htmlDistPath} success`); 532 | } 533 | 534 | return { 535 | commitMessage, 536 | }; 537 | } 538 | -------------------------------------------------------------------------------- /build-by-time.ts: -------------------------------------------------------------------------------- 1 | import { CSS, groupBy, mustache } from "./deps.ts"; 2 | import { path } from "./deps.ts"; 3 | import { 4 | BuildOptions, 5 | BuiltMarkdownInfo, 6 | Config, 7 | Feed, 8 | FeedItem, 9 | Item, 10 | ItemDetail, 11 | Nav, 12 | RunOptions, 13 | } from "./interface.ts"; 14 | import { 15 | FEED_NAV, 16 | HOME_NAV, 17 | INDEX_HTML_PATH, 18 | INDEX_MARKDOWN_PATH, 19 | SEARCH_NAV, 20 | SPONSOR_NAV, 21 | SPONSOR_URL, 22 | SUBSCRIBE_NAV, 23 | SUBSCRIPTION_URL, 24 | } from "./constant.ts"; 25 | import { 26 | formatHumanTime, 27 | getBaseFeed, 28 | getDistRepoContentPath, 29 | getDomain, 30 | getPaginationHtmlByNumber, 31 | getPublicPath, 32 | nav1ToHtml, 33 | nav1ToMarkdown, 34 | parseDayInfo, 35 | parseWeekInfo, 36 | pathnameToFeedUrl, 37 | pathnameToFilePath, 38 | pathnameToUrl, 39 | readTextFile, 40 | writeJSONFile, 41 | writeTextFile, 42 | } from "./util.ts"; 43 | import renderMarkdown from "./render-markdown.ts"; 44 | import log from "./log.ts"; 45 | import { getDayItems } from "./db.ts"; 46 | let htmlIndexTemplateContent = ""; 47 | function groupByFile(item: Item) { 48 | return item.source_identifier + "/" + item.file; 49 | } 50 | export default async function main( 51 | number: number, 52 | options: RunOptions, 53 | buildOptions: BuildOptions, 54 | ): Promise { 55 | // test is day or week 56 | const domain = getDomain(); 57 | const dbIndex = buildOptions.dbIndex; 58 | const isDay = number.toString().length === 8; 59 | const isBuildMarkdown = options.markdown || false; 60 | const isBuildSite = options.html || false; 61 | const { paginationText, paginationHtml } = buildOptions; 62 | 63 | if (!isBuildMarkdown && !isBuildSite) { 64 | log.info("skip build timeline markdown and html"); 65 | return { 66 | commitMessage: "", 67 | }; 68 | } 69 | 70 | const config = options.config; 71 | const siteConfig = config.site; 72 | let title = ""; 73 | let commitMessage = ""; 74 | let items: Record = {}; 75 | let distMarkdownRelativePath = ""; 76 | const baseFeed = getBaseFeed(); 77 | let feedTitle = ""; 78 | let feedDescription = ""; 79 | if (!htmlIndexTemplateContent) { 80 | htmlIndexTemplateContent = await readTextFile("./templates/index.html.mu"); 81 | } 82 | if (isDay) { 83 | const dayInfo = parseDayInfo(number); 84 | commitMessage = `Update day ${dayInfo.path}`; 85 | title = `Awesome List Updates on ${dayInfo.name}`; 86 | distMarkdownRelativePath = dayInfo.path; 87 | // get items 88 | items = await getDayItems(number, dbIndex, isDay); 89 | } else { 90 | const weekInfo = parseWeekInfo(number); 91 | commitMessage = `Update week ${weekInfo.path}`; 92 | title = `Awesome List Updates on ${weekInfo.name}`; 93 | distMarkdownRelativePath = weekInfo.path; 94 | // get items 95 | items = await getDayItems(number, dbIndex, isDay); 96 | } 97 | feedTitle = `${title}`; 98 | const feedItems = itemsToFeedItems(items, config, isDay); 99 | feedDescription = `${feedItems.length} awesome lists updated ${ 100 | isDay ? "today" : "this week" 101 | }.`; 102 | 103 | const nav1: Nav[] = [ 104 | { 105 | name: HOME_NAV, 106 | markdown_url: "/" + INDEX_MARKDOWN_PATH, 107 | url: "/", 108 | }, 109 | { 110 | name: SEARCH_NAV, 111 | url: pathnameToUrl("/search/"), 112 | }, 113 | { 114 | name: FEED_NAV, 115 | url: pathnameToFeedUrl("/", isDay), 116 | }, 117 | { 118 | name: SUBSCRIBE_NAV, 119 | url: SUBSCRIPTION_URL, 120 | }, 121 | { 122 | name: SPONSOR_NAV, 123 | url: SPONSOR_URL, 124 | }, 125 | ]; 126 | const feed: Feed = { 127 | ...baseFeed, 128 | title: feedTitle, 129 | _site_title: siteConfig.title, 130 | description: feedDescription, 131 | _seo_title: `${feedTitle} - ${siteConfig.title}`, 132 | feed_url: `${domain}/feed.json`, 133 | home_page_url: domain, 134 | items: feedItems, 135 | }; 136 | 137 | const markdownDoc = `# ${feed.title} 138 | 139 | ${feed.description} 140 | 141 | ${nav1ToMarkdown(nav1)} 142 | 143 | ${ 144 | feedItems.map((item, index) => { 145 | return `\n\n## [${index + 1}. ${item.title}](${ 146 | pathnameToFilePath("/" + item._slug) 147 | })${item.content_text}`; 148 | }).join("") 149 | }${paginationText}`; 150 | if (isBuildMarkdown) { 151 | // build daily markdown 152 | // sort 153 | const distRepoPath = getDistRepoContentPath(); 154 | const dailyMarkdownPath = path.join( 155 | distRepoPath, 156 | distMarkdownRelativePath, 157 | INDEX_MARKDOWN_PATH, 158 | ); 159 | 160 | await writeTextFile(dailyMarkdownPath, markdownDoc); 161 | // log.debug(`build ${dailyMarkdownPath} success`); 162 | } 163 | if (isBuildSite) { 164 | // add body, css to feed 165 | 166 | const body = `

${feed.title}

167 |

${feed.description}

168 |

${nav1ToHtml(nav1)}

169 | ${ 170 | feedItems.map((item, index) => { 171 | return `

${ 172 | index + 1 173 | }. ${item.title}

${item.content_html}`; 174 | }).join("") 175 | }${paginationHtml}`; 176 | const htmlDoc = mustache.render(htmlIndexTemplateContent, { 177 | ...feed, 178 | body, 179 | CSS, 180 | }); 181 | const htmlDistPath = path.join( 182 | getPublicPath(), 183 | distMarkdownRelativePath, 184 | INDEX_HTML_PATH, 185 | ); 186 | await writeTextFile(htmlDistPath, htmlDoc); 187 | log.debug(`build ${htmlDistPath} success`); 188 | 189 | // build feed json 190 | const feedJsonDistPath = path.join( 191 | getPublicPath(), 192 | distMarkdownRelativePath, 193 | "feed.json", 194 | ); 195 | await writeJSONFile(feedJsonDistPath, feed); 196 | } 197 | return { 198 | commitMessage, 199 | }; 200 | } 201 | 202 | export function itemsToFeedItems( 203 | items: Record, 204 | config: Config, 205 | isDay: boolean, 206 | ): FeedItem[] { 207 | const allItems: Item[] = []; 208 | for (const itemSha1 of Object.keys(items)) { 209 | const item = items[itemSha1]; 210 | allItems.push(item); 211 | } 212 | const domain = getDomain(); 213 | const sourcesConfig = config.sources; 214 | const groups = groupBy(allItems, groupByFile) as Record< 215 | string, 216 | Item[] 217 | >; 218 | const groupKeys = Object.keys(groups); 219 | let feedItems: FeedItem[] = groupKeys.map((key) => { 220 | const items = groups[key]; 221 | 222 | let groupMarkdown = ""; 223 | let groupHtml = ""; 224 | let summary = ""; 225 | 226 | const categoryGroup = groupBy(items, "category") as Record< 227 | string, 228 | Item[] 229 | >; 230 | 231 | const categoryKeys = Object.keys(categoryGroup); 232 | if (categoryKeys.length === 0) { 233 | throw new Error(`${key} has no categories`); 234 | } 235 | let firstItem: Item | undefined; 236 | 237 | const today = new Date(); 238 | const tomorrow = new Date(today); 239 | tomorrow.setDate(tomorrow.getDate() + 1); 240 | let datePublished: Date = tomorrow; 241 | let dateModified: Date = new Date(0); 242 | let total = 0; 243 | categoryKeys.forEach((key) => { 244 | const categoryItem = categoryGroup[key][0]; 245 | if (key) { 246 | groupMarkdown += `\n\n### ${key}\n`; 247 | groupHtml += `

${categoryItem.category_html}

`; 248 | } else { 249 | groupMarkdown += `\n`; 250 | } 251 | total++; 252 | categoryGroup[key].forEach((item) => { 253 | groupMarkdown += "\n" + item.markdown; 254 | groupHtml += item.html; 255 | firstItem = item; 256 | const itemUpdated = new Date(item.updated_at); 257 | if (itemUpdated.getTime() < datePublished.getTime()) { 258 | datePublished = itemUpdated; 259 | } 260 | if (itemUpdated.getTime() > dateModified.getTime()) { 261 | dateModified = itemUpdated; 262 | } 263 | }); 264 | }); 265 | summary = `${total} awesome projects updated`; 266 | if (!firstItem) { 267 | throw new Error(`${key} has no firstItem`); 268 | } 269 | console.log("firstItem.source_identifier", firstItem.source_identifier); 270 | // get file path 271 | const sourceFileConfig = 272 | sourcesConfig[firstItem.source_identifier].files[firstItem.file]; 273 | 274 | const slug = isDay 275 | ? sourceFileConfig.pathname.slice(1) 276 | : sourceFileConfig.pathname.slice(1) + "week/"; 277 | 278 | const itemUrl = `${domain}/${slug}`; 279 | const feedItem: FeedItem = { 280 | id: itemUrl, 281 | title: sourceFileConfig.name, 282 | _slug: slug, 283 | _filepath: pathnameToFilePath("/" + slug), 284 | url: itemUrl, 285 | summary: summary, 286 | content_text: groupMarkdown, 287 | content_html: groupHtml, 288 | date_published: datePublished.toISOString(), 289 | date_modified: dateModified.toISOString(), 290 | }; 291 | return feedItem; 292 | }); 293 | // sort feedItems by date published 294 | feedItems = feedItems.sort((a, b) => { 295 | const aDate = new Date(a.date_published); 296 | const bDate = new Date(b.date_published); 297 | return bDate.getTime() - aDate.getTime(); 298 | }); 299 | return feedItems; 300 | } 301 | export function itemsToFeedItemsByDate( 302 | items: Record, 303 | config: Config, 304 | isDay: boolean, 305 | ): FeedItem[] { 306 | // const allItems: ItemDetail[] = getItemsDetails(items); 307 | const domain = getDomain(); 308 | const sourcesConfig = config.sources; 309 | const groups = groupBy( 310 | items, 311 | isDay ? "updated_day" : "updated_week", 312 | ) as Record< 313 | string, 314 | ItemDetail[] 315 | >; 316 | let groupKeys = Object.keys(groups); 317 | // sort 318 | groupKeys = groupKeys.sort((a: string, b: string) => { 319 | if (isDay) { 320 | return parseDayInfo(Number(b)).date.getTime() - 321 | parseDayInfo(Number(a)).date.getTime(); 322 | } else { 323 | return parseWeekInfo(Number(b)).date.getTime() - 324 | parseWeekInfo(Number(a)).date.getTime(); 325 | } 326 | }); 327 | let feedItems: FeedItem[] = groupKeys.map((key) => { 328 | const items = groups[key]; 329 | 330 | let groupMarkdown = ""; 331 | let groupHtml = ""; 332 | const categoryGroup = groupBy(items, groupByFile) as Record< 333 | string, 334 | ItemDetail[] 335 | >; 336 | 337 | const categoryKeys = Object.keys(categoryGroup); 338 | if (categoryKeys.length === 0) { 339 | throw new Error(`${key} has no categories`); 340 | } 341 | let firstItem: Item | undefined; 342 | 343 | const today = new Date(); 344 | const tomorrow = new Date(today); 345 | tomorrow.setDate(tomorrow.getDate() + 1); 346 | let datePublished: Date = tomorrow; 347 | let dateModified: Date = new Date(0); 348 | let total = 0; 349 | let summary = "including "; 350 | categoryKeys.forEach((key, index) => { 351 | const firstSourceItem = categoryGroup[key][0]; 352 | const sourceFileConfig = sourcesConfig[firstSourceItem.source_identifier] 353 | .files[firstSourceItem.file]; 354 | total++; 355 | summary += `${sourceFileConfig.name}${ 356 | index < categoryKeys.length - 1 ? ", " : "" 357 | }`; 358 | groupMarkdown += `\n\n#### [${index + 1}. ${sourceFileConfig.name}](${ 359 | pathnameToFilePath(sourceFileConfig.pathname) 360 | })`; 361 | groupHtml += `

${ 362 | index + 1 363 | }. ${sourceFileConfig.name}

`; 364 | // group by category 365 | const categoryItems = categoryGroup[key]; 366 | const categoryGroupByCategory = groupBy( 367 | categoryItems, 368 | "category", 369 | ); 370 | 371 | const categoryGroupByCategoryKeys = Object.keys( 372 | categoryGroupByCategory, 373 | ); 374 | categoryGroupByCategoryKeys.forEach((categoryKey) => { 375 | const categoryItems = categoryGroupByCategory[categoryKey]; 376 | const categoryItem = categoryItems[0]; 377 | if (categoryKey) { 378 | groupMarkdown += `\n\n##### ${categoryKey}\n`; 379 | groupHtml += `
${categoryItem.category_html}
`; 380 | } 381 | categoryItems.forEach((item: ItemDetail) => { 382 | groupMarkdown += "\n" + item.markdown; 383 | groupHtml += item.html; 384 | firstItem = item; 385 | const itemUpdated = new Date(item.updated_at); 386 | if (itemUpdated.getTime() < datePublished.getTime()) { 387 | datePublished = itemUpdated; 388 | } 389 | if (itemUpdated.getTime() > dateModified.getTime()) { 390 | dateModified = itemUpdated; 391 | } 392 | }); 393 | }); 394 | }); 395 | if (!firstItem) { 396 | throw new Error(`${key} has no firstItem`); 397 | } 398 | // get file path 399 | const sourceFileConfig = 400 | sourcesConfig[firstItem.source_identifier].files[firstItem.file]; 401 | 402 | const dayInfo = isDay 403 | ? parseDayInfo(Number(key)) 404 | : parseWeekInfo(Number(key)); 405 | const slug = dayInfo.path + "/"; 406 | const itemUrl = `${domain}/${slug}`; 407 | summary = `${total} awesome list updated on ${dayInfo.name}, ` + summary; 408 | const feedItem: FeedItem = { 409 | id: itemUrl, 410 | title: `Awesome List Updated on ${dayInfo.name}`, 411 | _short_title: dayInfo.name, 412 | _slug: slug, 413 | summary, 414 | _filepath: pathnameToFilePath("/" + slug), 415 | url: itemUrl, 416 | content_text: groupMarkdown, 417 | content_html: groupHtml, 418 | date_published: datePublished.toISOString(), 419 | date_modified: dateModified.toISOString(), 420 | }; 421 | return feedItem; 422 | }); 423 | // sort feedItems by date published 424 | feedItems = feedItems.sort((a, b) => { 425 | const aDate = new Date(a.date_published); 426 | const bDate = new Date(b.date_published); 427 | return bDate.getTime() - aDate.getTime(); 428 | }); 429 | return feedItems; 430 | } 431 | -------------------------------------------------------------------------------- /build-html.ts: -------------------------------------------------------------------------------- 1 | import { CSS, groupBy, jsonfeedToAtom, mustache } from "./deps.ts"; 2 | import { fs, path } from "./deps.ts"; 3 | import { 4 | DayInfo, 5 | FeedInfo, 6 | File, 7 | FileInfo, 8 | FileMetaWithSource, 9 | Item, 10 | List, 11 | ListItem, 12 | RunOptions, 13 | WeekOfYear, 14 | } from "./interface.ts"; 15 | import renderMarkdown from "./render-markdown.ts"; 16 | import { 17 | INDEX_MARKDOWN_PATH, 18 | SUBSCRIPTION_URL, 19 | TOP_REPOS_COUNT, 20 | } from "./constant.ts"; 21 | import { 22 | exists, 23 | formatHumanTime, 24 | getBaseFeed, 25 | getDayNumber, 26 | getDbIndex, 27 | getDbMeta, 28 | getDistRepoContentPath, 29 | getDistRepoGitUrl, 30 | getDistRepoPath, 31 | getIndexFileConfig, 32 | getnextPaginationTextByNumber, 33 | getPublicPath, 34 | getRepoHTMLURL, 35 | getStaticPath, 36 | getWeekNumber, 37 | pathnameToFeedUrl, 38 | pathnameToFilePath, 39 | readTextFile, 40 | slug, 41 | walkFile, 42 | writeDbMeta, 43 | writeJSONFile, 44 | writeTextFile, 45 | } from "./util.ts"; 46 | import log from "./log.ts"; 47 | import { getItemsByDays, getUpdatedDays, getUpdatedFiles } from "./db.ts"; 48 | import buildBySource from "./build-by-source.ts"; 49 | import buildByTime, { itemsToFeedItemsByDate } from "./build-by-time.ts"; 50 | 51 | export default async function buildHtml(options: RunOptions) { 52 | const config = options.config; 53 | const sourcesConfig = config.sources; 54 | const sourcesKeys = Object.keys(sourcesConfig); 55 | const isBuildSite = options.html; 56 | const specificSourceIdentifiers = options.sourceIdentifiers; 57 | 58 | if (isBuildSite) { 59 | const htmlIndexTemplateContent = await readTextFile( 60 | "./templates/index.html.mu", 61 | ); 62 | // build from markdown 63 | const markdownPath = getDistRepoContentPath(); 64 | 65 | for await (const entry of await walkFile(markdownPath)) { 66 | const { path: filePath, isFile } = entry; 67 | if (isFile && filePath.endsWith(".md")) { 68 | const relativePath = path.relative(path.join(markdownPath), filePath); 69 | const file = await readTextFile(filePath); 70 | const html = renderMarkdown(file); 71 | const htmlPath = path.join( 72 | getPublicPath(), 73 | relativePath.replace(/README\.md$/, "index.html"), 74 | ); 75 | // const htmlDoc = mustache.render(htmlIndexTemplateContent, { 76 | // ...indexFeed, 77 | // body: html, 78 | // CSS, 79 | // }); 80 | await writeTextFile(htmlPath, html); 81 | } 82 | } 83 | } else { 84 | log.info("skip build html..."); 85 | } 86 | } 87 | -------------------------------------------------------------------------------- /build-search.ts: -------------------------------------------------------------------------------- 1 | import { fs, mustache, path } from "./deps.ts"; 2 | import { 3 | getConfig, 4 | getPublicPath, 5 | readTextFile, 6 | writeTextFile, 7 | } from "./util.ts"; 8 | import { RunOptions } from "./interface.ts"; 9 | export default async function buildSearch(runOptions: RunOptions) { 10 | const config = runOptions.config; 11 | const sourcesConfig = config.sources; 12 | const siteConfig = config.site; 13 | const htmlSearchTemplateContent = await readTextFile( 14 | "./templates/search.html.mu", 15 | ); 16 | // copy search index 17 | await fs.copy( 18 | "./temp-morsels/", 19 | path.join(getPublicPath(), "search-index/"), 20 | { 21 | overwrite: true, 22 | }, 23 | ); 24 | const searchPageData = { 25 | title: "Search Awesome Projects", 26 | _site_title: siteConfig.title, 27 | description: config.site.description, 28 | _seo_title: `Search Awesome Projects - ${config.site.title}`, 29 | home_page_url: config.site.url + "/search/", 30 | }; 31 | const searchHtmlDoc = mustache.render( 32 | htmlSearchTemplateContent, 33 | searchPageData, 34 | ); 35 | 36 | const htmlSearchPath = path.join( 37 | getPublicPath(), 38 | "search/index.html", 39 | ); 40 | await writeTextFile(htmlSearchPath, searchHtmlDoc); 41 | } 42 | 43 | if (import.meta.main) { 44 | const config = await getConfig(); 45 | 46 | await buildSearch({ 47 | config, 48 | sourceIdentifiers: [], 49 | fetchRepoUpdates: false, 50 | markdown: false, 51 | fetch: false, 52 | dayMarkdown: false, 53 | serve: false, 54 | port: 8000, 55 | }); 56 | } 57 | -------------------------------------------------------------------------------- /constant.ts: -------------------------------------------------------------------------------- 1 | export const INDEX_MARKDOWN_PATH = "README.md"; 2 | export const RECENTLY_UPDATED_COUNT = 10; 3 | export const TOP_REPOS_COUNT = 50; 4 | export const PROD_DOMAIN = "https://www.trackawesomelist.com"; 5 | export const DEV_DOMAIN = "http://localhost:8000"; 6 | export const INDEX_HTML_PATH = "index.html"; 7 | export const DEFAULT_CATEGORY = "Miscellaneous"; 8 | export const CONTENT_DIR = "content"; 9 | export const SUBSCRIPTION_URL = 10 | "https://trackawesomelist.us17.list-manage.com/subscribe?u=d2f0117aa829c83a63ec63c2f&id=36a103854c"; 11 | 12 | export const HOME_NAV = "🏠 Home"; 13 | export const SEARCH_NAV = "🔍 Search"; 14 | export const FEED_NAV = "🔥 Feed"; 15 | export const SUBSCRIBE_NAV = "📮 Subscribe"; 16 | export const SPONSOR_NAV = "❤️ Sponsor"; 17 | export const SPONSOR_URL = "https://github.com/sponsors/theowenyoung"; 18 | 19 | export const GITHUB_NAV = "😺 Github"; 20 | export const WEBSITE_NAV = "🌐 Website"; 21 | export const GITHUB_REPO = 22 | "https://github.com/trackawesomelist/trackawesomelist/"; 23 | -------------------------------------------------------------------------------- /db-meta-init.json: -------------------------------------------------------------------------------- 1 | { 2 | "sources": {}, 3 | "checked_at": "2019-01-01T00:00:00Z" 4 | } 5 | -------------------------------------------------------------------------------- /db.ts: -------------------------------------------------------------------------------- 1 | import { 2 | fromMarkdown, 3 | gfm, 4 | gfmFromMarkdown, 5 | gfmToMarkdown, 6 | render, 7 | toMarkdown, 8 | } from "./deps.ts"; 9 | import { 10 | DayInfo, 11 | DBIndex, 12 | DBMeta, 13 | ExpiredValue, 14 | File, 15 | FileInfo, 16 | Item, 17 | WeekOfYear, 18 | } from "./interface.ts"; 19 | import log from "./log.ts"; 20 | import formatMarkdownItem from "./format-markdown-item.ts"; 21 | import { 22 | getDayNumber, 23 | getDbContentHtmlPath, 24 | getDbContentPath, 25 | getDbItemsPath, 26 | getWeekNumber, 27 | parseDayInfo, 28 | parseWeekInfo, 29 | readJSONFile, 30 | readTextFile, 31 | writeJSONFile, 32 | writeTextFile, 33 | } from "./util.ts"; 34 | export type StringOrNumber = string | number; 35 | export function getFile( 36 | sourceIdentifier: string, 37 | filepath: string, 38 | ): Promise { 39 | const fileDbPath = getDbContentPath(sourceIdentifier, filepath); 40 | return readTextFile(fileDbPath); 41 | } 42 | export function getHtmlFile( 43 | sourceIdentifier: string, 44 | filepath: string, 45 | ): Promise { 46 | const fileDbPath = getDbContentHtmlPath(sourceIdentifier, filepath); 47 | return readTextFile(fileDbPath); 48 | } 49 | export async function updateFile( 50 | fileInfo: FileInfo, 51 | content: string, 52 | stars: Record, 53 | ) { 54 | const file = fileInfo.filepath; 55 | const sourceConfig = fileInfo.sourceConfig; 56 | const sourceIdentifier = sourceConfig.identifier; 57 | // check items length 58 | const tree = fromMarkdown(content, "utf8", { 59 | extensions: [gfm()], 60 | mdastExtensions: [gfmFromMarkdown()], 61 | }); 62 | 63 | // format link etc. 64 | const overviewMarkdownTree = await formatMarkdownItem(tree, fileInfo, stars); 65 | const overviewMarkdownContent = toMarkdown( 66 | overviewMarkdownTree, 67 | { 68 | extensions: [gfmToMarkdown()], 69 | }, 70 | ); 71 | const dbContentPath = getDbContentPath(sourceIdentifier, file); 72 | await writeTextFile(dbContentPath, overviewMarkdownContent); 73 | // also write html 74 | const dbContentHtmlPath = getDbContentHtmlPath(sourceIdentifier, file); 75 | const htmlContent = render(overviewMarkdownContent, { 76 | allowIframes: true, 77 | }); 78 | await writeTextFile(dbContentHtmlPath, htmlContent); 79 | } 80 | export async function updateItems( 81 | fileInfo: FileInfo, 82 | items: Record, 83 | dbIndex: DBIndex, 84 | ) { 85 | const file = fileInfo.filepath; 86 | const sourceConfig = fileInfo.sourceConfig; 87 | const sourceIdentifier = sourceConfig.identifier; 88 | const sourceCategory = sourceConfig.category; 89 | const itemKeys = Object.keys(items); 90 | if (itemKeys.length === 0) { 91 | return; 92 | } 93 | const dbItemsPath = getDbItemsPath( 94 | fileInfo.sourceConfig.identifier, 95 | fileInfo.filepath, 96 | ); 97 | await writeJSONFile(dbItemsPath, items); 98 | 99 | // write to index 100 | // delete old index 101 | const keys = Object.keys(dbIndex); 102 | for (const key of keys) { 103 | if (key.startsWith(sourceIdentifier + ":")) { 104 | delete dbIndex[key]; 105 | } 106 | } 107 | for (const itemKey of itemKeys) { 108 | const item = items[itemKey]; 109 | dbIndex[`${sourceIdentifier}:${fileInfo.filepath}:${item.sha1}`] = { 110 | t: new Date(item.updated_at).getTime(), 111 | d: item.updated_day, 112 | w: item.updated_week, 113 | }; 114 | } 115 | } 116 | 117 | export interface UpdatedItemsParam { 118 | since_date: Date; 119 | source_identifiers?: string[]; 120 | } 121 | export interface ItemsResult { 122 | items: Record; 123 | since_id: number; 124 | has_next: boolean; 125 | } 126 | export function getItems( 127 | sourceIdentifier: string, 128 | file: string, 129 | ): Promise> { 130 | const dbItemsPath = getDbItemsPath(sourceIdentifier, file); 131 | return readJSONFile(dbItemsPath) as Promise>; 132 | } 133 | export async function getItemsByDays( 134 | days: number[], 135 | dbIndex: DBIndex, 136 | isDay: boolean, 137 | ): Promise> { 138 | const keys = Object.keys(dbIndex); 139 | const items: Record = {}; 140 | const indexKey = isDay ? "d" : "w"; 141 | const todos: Record = {}; 142 | for (const key of keys) { 143 | const item = dbIndex[key]; 144 | 145 | if (days.includes(item[indexKey])) { 146 | const arr = key.split(":"); 147 | const sourceIdentifier = arr[0]; 148 | const file = arr[1]; 149 | const sha1 = arr[2]; 150 | const dbItemsPath = getDbItemsPath(sourceIdentifier, file); 151 | if (!todos[dbItemsPath]) { 152 | todos[dbItemsPath] = []; 153 | } 154 | todos[dbItemsPath].push(sha1); 155 | } 156 | } 157 | const todoKeys = Object.keys(todos); 158 | const promises: Promise[] = []; 159 | for (const todoKey of todoKeys) { 160 | promises.push(readJSONFile(todoKey)); 161 | } 162 | let rIndex = 0; 163 | const results = await Promise.all(promises) as unknown as Record< 164 | string, 165 | Item 166 | >[]; 167 | for (const result of results) { 168 | for (const sha1 of todos[todoKeys[rIndex]]) { 169 | if (!result[sha1]) { 170 | throw new Error(`sha1 ${sha1} not found in ${todoKeys[rIndex]}`); 171 | } 172 | items[sha1] = result[sha1]; 173 | } 174 | rIndex++; 175 | } 176 | return items; 177 | } 178 | export async function getDayItems( 179 | dayNumber: number, 180 | dbIndex: DBIndex, 181 | isDay: boolean, 182 | ): Promise> { 183 | const keys = Object.keys(dbIndex); 184 | const items: Record = {}; 185 | const indexKey = isDay ? "d" : "w"; 186 | 187 | const todos: Record = {}; 188 | for (const key of keys) { 189 | const item = dbIndex[key]; 190 | if (item[indexKey] === dayNumber) { 191 | const arr = key.split(":"); 192 | const sourceIdentifier = arr[0]; 193 | const file = arr[1]; 194 | const sha1 = arr[2]; 195 | const dbItemsPath = getDbItemsPath(sourceIdentifier, file); 196 | if (!todos[dbItemsPath]) { 197 | todos[dbItemsPath] = []; 198 | } 199 | todos[dbItemsPath].push(sha1); 200 | } 201 | } 202 | 203 | const todoKeys = Object.keys(todos); 204 | const promises: Promise[] = []; 205 | for (const todoKey of todoKeys) { 206 | promises.push(readJSONFile(todoKey)); 207 | } 208 | let rIndex = 0; 209 | const results = await Promise.all(promises) as unknown as Record< 210 | string, 211 | Item 212 | >[]; 213 | for (const result of results) { 214 | for (const sha1 of todos[todoKeys[rIndex]]) { 215 | if (!result[sha1]) { 216 | throw new Error(`sha1 ${sha1} not found in ${todoKeys[rIndex]}`); 217 | } 218 | items[sha1] = result[sha1]; 219 | } 220 | rIndex++; 221 | } 222 | return items; 223 | } 224 | export function getUpdatedFiles( 225 | options: UpdatedItemsParam, 226 | dbIndex: DBIndex, 227 | ): File[] { 228 | const filesSet: Set = new Set(); 229 | const keys = Object.keys(dbIndex); 230 | for (const key of keys) { 231 | const item = dbIndex[key]; 232 | const arr = key.split(":"); 233 | const sourceIdentifier = arr[0]; 234 | const file = arr[1]; 235 | if (options.since_date) { 236 | if (item.t < options.since_date?.getTime()) { 237 | continue; 238 | } 239 | } 240 | if (options.source_identifiers && options.source_identifiers.length > 0) { 241 | if (!options.source_identifiers.includes(sourceIdentifier)) { 242 | continue; 243 | } 244 | } 245 | filesSet.add(`${sourceIdentifier}:${file}`); 246 | } 247 | const files: File[] = []; 248 | 249 | for (const file of filesSet) { 250 | const arr = file.split(":"); 251 | const sourceIdentifier = arr[0]; 252 | const filepath = arr[1]; 253 | files.push({ 254 | source_identifier: sourceIdentifier, 255 | file: filepath, 256 | }); 257 | } 258 | 259 | return files; 260 | } 261 | export function getUpdatedDays( 262 | dbIndex: DBIndex, 263 | options: UpdatedItemsParam, 264 | isDay: boolean, 265 | ): (DayInfo | WeekOfYear)[] { 266 | const days: (DayInfo | WeekOfYear)[] = []; 267 | const daysSet: Set = new Set(); 268 | const keys = Object.keys(dbIndex); 269 | const indexKey = isDay ? "d" : "w"; 270 | for (const key of keys) { 271 | const item = dbIndex[key]; 272 | const arr = key.split(":"); 273 | const sourceIdentifier = arr[0]; 274 | const file = arr[1]; 275 | const sha1 = arr[2]; 276 | if (options.since_date) { 277 | if (item.t < options.since_date?.getTime()) { 278 | continue; 279 | } 280 | } 281 | if (options.source_identifiers && options.source_identifiers.length > 0) { 282 | if (!options.source_identifiers.includes(sourceIdentifier)) { 283 | continue; 284 | } 285 | } 286 | daysSet.add(item[indexKey]); 287 | } 288 | return Array.from(daysSet).sort((a, b) => b - a).map((day) => { 289 | if (isDay) { 290 | return parseDayInfo(day); 291 | } else { 292 | return parseWeekInfo(day); 293 | } 294 | }); 295 | } 296 | -------------------------------------------------------------------------------- /deno.json: -------------------------------------------------------------------------------- 1 | { 2 | "lint": { 3 | "files": { 4 | "exclude": ["workers-site/"] 5 | } 6 | }, 7 | "fmt": { 8 | "files": { 9 | "exclude": [ 10 | "node_modules/", 11 | "public/", 12 | "example/", 13 | "example/**/*", 14 | "workers-site/", 15 | "current/", 16 | "dev-current/", 17 | "dev-archive/", 18 | "archive/", 19 | "s3/", 20 | "zip/" 21 | ] 22 | } 23 | }, 24 | "test": { 25 | "files": { 26 | "exclude": [ 27 | "s3", 28 | "node_modules/", 29 | "public/", 30 | "example/", 31 | "workers-site/", 32 | "current/", 33 | "dev-current/", 34 | "dev-archive/", 35 | "archive/" 36 | ] 37 | } 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /deps.ts: -------------------------------------------------------------------------------- 1 | // std 2 | export * as YAML from "https://deno.land/std@0.158.0/encoding/yaml.ts"; 3 | export * as TOML from "https://deno.land/std@0.158.0/encoding/toml.ts"; 4 | export * as path from "https://deno.land/std@0.158.0/path/mod.ts"; 5 | export * as fs from "https://deno.land/std@0.158.0/fs/mod.ts"; 6 | export * as dotenv from "https://deno.land/std@0.158.0/dotenv/mod.ts"; 7 | export * as datetime from "https://deno.land/std@0.158.0/datetime/mod.ts"; 8 | export * as async from "https://deno.land/std@0.158.0/async/mod.ts"; 9 | export * as flags from "https://deno.land/std@0.158.0/flags/mod.ts"; 10 | export * as colors from "https://deno.land/std@0.158.0/fmt/colors.ts"; 11 | export { delay } from "https://deno.land/std@0.158.0/async/delay.ts"; 12 | export { DateTimeFormatter } from "https://deno.land/std@0.158.0/datetime/formatter.ts"; 13 | export { Command } from "https://deno.land/x/cliffy@v0.25.2/command/mod.ts"; 14 | export { serve } from "https://deno.land/std@0.158.0/http/server.ts"; 15 | export { contentType } from "https://deno.land/std@0.158.0/media_types/mod.ts"; 16 | export { 17 | serveDir, 18 | serveFile, 19 | } from "https://deno.land/x/std@0.159.0/http/file_server.ts"; 20 | export * as posixPath from "https://deno.land/std@0.158.0/path/posix.ts"; 21 | export { config as dotenvConfig } from "https://deno.land/std@0.158.0/dotenv/mod.ts"; 22 | export { readLines } from "https://deno.land/std@0.153.0/io/buffer.ts"; 23 | export * as base64 from "https://deno.land/std@0.153.0/encoding/base64.ts"; 24 | // third party 25 | export { titleCase } from "https://esm.sh/title-case@3.0.3"; 26 | export { default as camelCase } from "https://deno.land/x/lodash@4.17.15-es/camelCase.js"; 27 | export { default as groupBy } from "https://deno.land/x/lodash@4.17.15-es/groupBy.js"; 28 | export { CSS, render } from "https://deno.land/x/gfm@0.1.22/mod.ts"; 29 | // npm modules 30 | export { default as mustache } from "https://esm.sh/mustache@4.2.0"; 31 | export { default as pLimit } from "https://esm.sh/p-limit@4.0.0"; 32 | export { gfm } from "https://esm.sh/micromark-extension-gfm@2.0.1"; 33 | export { 34 | gfmFromMarkdown, 35 | gfmToMarkdown, 36 | } from "https://esm.sh/mdast-util-gfm@2.0.2"; 37 | // export { default as kebabCase } from "https://jspm.dev/lodash@4.17.21/kebabCase"; 38 | export { toMarkdown } from "https://esm.sh/mdast-util-to-markdown@1.5.0"; 39 | export { fromMarkdown } from "https://esm.sh/mdast-util-from-markdown@1.3.0"; 40 | export { EXIT, visit } from "https://esm.sh/unist-util-visit@4.1.2"; 41 | export { selectAll } from "https://esm.sh/unist-util-select@4.0.3"; 42 | export { remove } from "https://esm.sh/unist-util-remove@3.1.1"; 43 | export { u } from "https://esm.sh/unist-builder@3.0.1"; 44 | export { default as remarkInlineLinks } from "https://esm.sh/remark-inline-links@6.0.1"; 45 | export { default as remarkEmoji } from "https://esm.sh/remark-emoji@3.1.0"; 46 | export { default as remarkGemoji } from "./lib/gemoji.js"; 47 | export type { 48 | Content, 49 | Link, 50 | Root, 51 | TableCell, 52 | TableRow, 53 | } from "https://esm.sh/v92/@types/mdast@3.0.10/index.d.ts"; 54 | export { default as jsonfeedToAtom } from "https://jspm.dev/jsonfeed-to-atom@1.2.2"; 55 | import transliteration from "https://jspm.dev/transliteration@2.3.5"; 56 | // @ts-ignore: npm module 57 | const slug = transliteration.slugify; 58 | export { slug }; 59 | export { default as kebabCase } from "https://jspm.dev/lodash@4.17.21/kebabCase"; 60 | -------------------------------------------------------------------------------- /error.ts: -------------------------------------------------------------------------------- 1 | export class NotFound extends Error { 2 | constructor(message: string) { 3 | super(message); 4 | this.name = "NotFound"; 5 | } 6 | } 7 | -------------------------------------------------------------------------------- /example/books.md: -------------------------------------------------------------------------------- 1 | # Introduction 2 | 3 | Introduction text. 4 | 5 | # Table of Contents 6 | 7 | - [Major Cloud Providers' Always-Free Limits](#major-cloud-providers) 8 | - [Cloud management solutions](#cloud-management-solutions) 9 | - [Analytics, Events and Statistics](#analytics-events-and-statistics) 10 | - [APIs, Data and ML](#apis-data-and-ml) 11 | - [Artifact Repos](#artifact-repos) 12 | - [BaaS](#baas) 13 | 14 | ## BY PROGRAMMING LANGUAGE 15 | 16 | Originally, this list included a section called "Language Agnostic" for books about programming subjects not restricted to a specific programming language. 17 | That section got so big, we decided to split it into its own file, the [BY SUBJECT file](free-programming-books-subjects.md). 18 | 19 | ### ABAP 20 | 21 | - [SAP Code Style Guides - Clean ABAP](https://github.com/SAP/styleguides/blob/master/clean-abap/CleanABAP.md) 22 | 23 | ### Ada 24 | 25 | - [A Guide to Ada for C and C++ Programmers](http://www.cs.uni.edu/~mccormic/4740/guide-c2ada.pdf) (PDF) 26 | - [Ada Distilled](http://www.adapower.com/pdfs/AdaDistilled07-27-2003.pdf) (PDF) 27 | - [Ada for the C++ or Java Developer](https://www.adacore.com/uploads/books/pdf/Ada_for_the_C_or_Java_Developer-cc.pdf) - Quentin Ochem (PDF) 28 | - [Ada Programming](https://en.wikibooks.org/wiki/Ada_Programming) - Wikibooks 29 | - [Ada Reference Manual - ISO/IEC 8652:2012(E) Language and Standard Libraries](http://www.ada-auth.org/standards/12rm/RM-Final.pdf) (PDF) 30 | - [Introduction To Ada](https://learn.adacore.com/courses/intro-to-ada/index.html) 31 | - [Introduction To SPARK](https://learn.adacore.com/courses/SPARK_for_the_MISRA_C_Developer/index.html) 32 | - [The Big Online Book of Linux Ada Programming](http://www.pegasoft.ca/resources/boblap/book.html) 33 | 34 | ### Workflow 35 | 36 | - [Declare Peace on Virtual Machines. A guide to simplifying vm-based development on a Mac](https://leanpub.com/declarepeaceonvms/read) 37 | 38 | ### xBase (dBase / Clipper / Harbour) 39 | 40 | - [Application Development with Harbour](https://en.wikibooks.org/wiki/Application_Development_with_Harbour) - Wikibooks 41 | - [CA-Clipper 5.2 Norton Guide](https://web.archive.org/web/20190516192814/http://www.ousob.com/ng/clguide/) 42 | - [Clipper Tutorial: a Guide to Open Source Clipper(s)]() - Wikibooks 43 | 44 | ## BY PROGRAMMING LANGUAGE 45 | 46 | Originally, this list included a section called "Language Agnostic" for books about programming subjects not restricted to a specific programming language. 47 | That section got so big, we decided to split it into its own file, the [BY SUBJECT file](free-programming-books-subjects.md). 48 | 49 | ### ABAP 50 | 51 | - [SAP Code Style Guides - Clean ABAP](https://github.com/SAP/styleguides/blob/master/clean-abap/CleanABAP.md) 52 | 53 | ### Ada 54 | 55 | - [A Guide to Ada for C and C++ Programmers](http://www.cs.uni.edu/~mccormic/4740/guide-c2ada.pdf) (PDF) 56 | - [Ada Distilled](http://www.adapower.com/pdfs/AdaDistilled07-27-2003.pdf) (PDF) 57 | - [Ada for the C++ or Java Developer](https://www.adacore.com/uploads/books/pdf/Ada_for_the_C_or_Java_Developer-cc.pdf) - Quentin Ochem (PDF) 58 | - [Ada Programming](https://en.wikibooks.org/wiki/Ada_Programming) - Wikibooks 59 | - [Ada Reference Manual - ISO/IEC 8652:2012(E) Language and Standard Libraries](http://www.ada-auth.org/standards/12rm/RM-Final.pdf) (PDF) 60 | - [Introduction To Ada](https://learn.adacore.com/courses/intro-to-ada/index.html) 61 | - [Introduction To SPARK](https://learn.adacore.com/courses/SPARK_for_the_MISRA_C_Developer/index.html) 62 | - [The Big Online Book of Linux Ada Programming](http://www.pegasoft.ca/resources/boblap/book.html) 63 | 64 | ### Workflow 65 | 66 | - [Declare Peace on Virtual Machines. A guide to simplifying vm-based development on a Mac](https://leanpub.com/declarepeaceonvms/read) 67 | 68 | ### xBase (dBase / Clipper / Harbour) 69 | 70 | - [Application Development with Harbour](https://en.wikibooks.org/wiki/Application_Development_with_Harbour) - Wikibooks 71 | - [CA-Clipper 5.2 Norton Guide](https://web.archive.org/web/20190516192814/http://www.ousob.com/ng/clguide/) 72 | - [Clipper Tutorial: a Guide to Open Source Clipper(s)]() - Wikibooks 73 | -------------------------------------------------------------------------------- /example/data/1-raw/EbookFoundation/free-programming-books/books/markdownlist_free-programming-books-langs.md: -------------------------------------------------------------------------------- 1 | ## BY PROGRAMMING LANGUAGE 2 | 3 | Originally, this list included a section called "Language Agnostic" for books about programming subjects not restricted to a specific programming language. 4 | That section got so big, we decided to split it into its own file, the [BY SUBJECT file](free-programming-books-subjects.md). 5 | 6 | ### ABAP 7 | 8 | - [SAP Code Style Guides - Clean ABAP](https://github.com/SAP/styleguides/blob/master/clean-abap/CleanABAP.md) 9 | 10 | ### Ada 11 | 12 | - [A Guide to Ada for C and C++ Programmers](http://www.cs.uni.edu/~mccormic/4740/guide-c2ada.pdf) (PDF) 13 | - [Ada Distilled](http://www.adapower.com/pdfs/AdaDistilled07-27-2003.pdf) (PDF) 14 | - [Ada for the C++ or Java Developer](https://www.adacore.com/uploads/books/pdf/Ada_for_the_C_or_Java_Developer-cc.pdf) - Quentin Ochem (PDF) 15 | - [Ada Programming](https://en.wikibooks.org/wiki/Ada_Programming) - Wikibooks 16 | - [Ada Reference Manual - ISO/IEC 8652:2012(E) Language and Standard Libraries](http://www.ada-auth.org/standards/12rm/RM-Final.pdf) (PDF) 17 | - [Introduction To Ada](https://learn.adacore.com/courses/intro-to-ada/index.html) 18 | - [Introduction To SPARK](https://learn.adacore.com/courses/SPARK_for_the_MISRA_C_Developer/index.html) 19 | - [The Big Online Book of Linux Ada Programming](http://www.pegasoft.ca/resources/boblap/book.html) 20 | 21 | ### Workflow 22 | 23 | - [Declare Peace on Virtual Machines. A guide to simplifying vm-based development on a Mac](https://leanpub.com/declarepeaceonvms/read) 24 | 25 | ### xBase (dBase / Clipper / Harbour) 26 | 27 | - [Application Development with Harbour](https://en.wikibooks.org/wiki/Application_Development_with_Harbour) - Wikibooks 28 | - [CA-Clipper 5.2 Norton Guide](https://web.archive.org/web/20190516192814/http://www.ousob.com/ng/clguide/) 29 | - [Clipper Tutorial: a Guide to Open Source Clipper(s)]() - Wikibooks 30 | -------------------------------------------------------------------------------- /example/data/2-formated/EbookFoundation/free-programming-books/books/free-programming-books-zh.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "", 3 | "items": {} 4 | } 5 | -------------------------------------------------------------------------------- /example/data/2-formated/EbookFoundation/free-programming-books/readme.json: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trackawesomelist/trackawesomelist-source/cac86ac7e22af4f8c6d27dbe660ac04ad45a9332/example/data/2-formated/EbookFoundation/free-programming-books/readme.json -------------------------------------------------------------------------------- /example/data/data.json: -------------------------------------------------------------------------------- 1 | { 2 | "sources": { 3 | "xxxx/xxx": { 4 | "updated": "2019-01-01T00:00:00.000Z" 5 | }, 6 | "yxxxx/xxx": { 7 | "updated": "2019-01-01T00:00:00.000Z" 8 | } 9 | }, 10 | "items": { 11 | "xxxx/xxx": { 12 | "README.md": { 13 | "sha1": { 14 | "updated_at": 12341, 15 | "updates_day": 1234, 16 | "update_week": 234234 17 | } 18 | } 19 | } 20 | }, 21 | 22 | -------------------------------------------------------------------------------- /example/data/github_awesome_nodejs.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "awesome", 3 | "url": "github.com/sindresorhus/awesome", 4 | "description": "", 5 | "description_html": "", 6 | "items": { 7 | "item": { 8 | "date_published": "2019-01-01" 9 | } 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /example/mac.md: -------------------------------------------------------------------------------- 1 | # Awesome Mac 2 | 3 | ## Text Editors 4 | 5 | - [Bootstrap Studio](https://bootstrapstudio.io/) - A powerful desktop app for creating responsive websites using the Bootstrap framework. 6 | - [Brackets](http://brackets.io) - A modern, open source text editor that understands web design. [![Open-Source Software][oss icon]](https://github.com/adobe/brackets/) ![Freeware][freeware icon] 7 | -------------------------------------------------------------------------------- /example/public-apis-simple.md: -------------------------------------------------------------------------------- 1 |
2 |

Public APIs

3 | A collective list of free APIs for use in software and web development 4 |
5 | 6 |
7 | 8 |
9 | Status 10 |
11 | 12 | Number of Categories 13 | 14 | 15 | Number of APIs 16 | 17 |
18 | 19 | Tests of push and pull 20 | 21 | 22 | Validate links 23 | 24 | 25 | Tests of validate package 26 | 27 |
28 | 29 |
30 | 31 |
32 | The Project 33 |
34 | Contributing Guide • 35 | API for this project • 36 | Issues • 37 | Pull Requests • 38 | License 39 | 40 |
41 | 42 |
43 | Alternative sites for the project (unofficials) 44 |
45 | Free APIs • 46 | Dev Resources • 47 | Public APIs Site • 48 | Apihouse • 49 | Collective APIs 50 |
51 | 52 |
53 | 54 | --- 55 | 56 |
57 | 58 | 88 | 89 |
90 | 91 | --- 92 | 93 |
94 | 95 | ## Index 96 | 97 | - [Animals](#animals) 98 | - [Anime](#anime) 99 | - [Anti-Malware](#anti-malware) 100 | - [Art & Design](#art--design) 101 | - [Authentication & Authorization](#authentication--authorization) 102 | - [Blockchain](#blockchain) 103 | - [Books](#books) 104 | - [Business](#business) 105 | - [Calendar](#calendar) 106 | - [Cloud Storage & File Sharing](#cloud-storage--file-sharing) 107 | - [Continuous Integration](#continuous-integration) 108 | - [Cryptocurrency](#cryptocurrency) 109 | - [Currency Exchange](#currency-exchange) 110 | - [Data Validation](#data-validation) 111 | - [Development](#development) 112 | - [Dictionaries](#dictionaries) 113 | - [Documents & Productivity](#documents--productivity) 114 | - [Email](#email) 115 | - [Entertainment](#entertainment) 116 | - [Environment](#environment) 117 | - [Events](#events) 118 | - [Finance](#finance) 119 | - [Food & Drink](#food--drink) 120 | - [Games & Comics](#games--comics) 121 | - [Geocoding](#geocoding) 122 | - [Government](#government) 123 | - [Health](#health) 124 | - [Jobs](#jobs) 125 | - [Machine Learning](#machine-learning) 126 | - [Music](#music) 127 | - [News](#news) 128 | - [Open Data](#open-data) 129 | - [Open Source Projects](#open-source-projects) 130 | - [Patent](#patent) 131 | - [Personality](#personality) 132 | - [Phone](#phone) 133 | - [Photography](#photography) 134 | - [Programming](#programming) 135 | - [Science & Math](#science--math) 136 | - [Security](#security) 137 | - [Shopping](#shopping) 138 | - [Social](#social) 139 | - [Sports & Fitness](#sports--fitness) 140 | - [Test Data](#test-data) 141 | - [Text Analysis](#text-analysis) 142 | - [Tracking](#tracking) 143 | - [Transportation](#transportation) 144 | - [URL Shorteners](#url-shorteners) 145 | - [Vehicle](#vehicle) 146 | - [Video](#video) 147 | - [Weather](#weather) 148 | 149 | ### Animals 150 | 151 | | API | Description | Auth | HTTPS | CORS | 152 | | ---------------------------------------------------------------- | ---------------------------------------- | -------- | ----- | ------- | 153 | | [AdoptAPet](https://www.adoptapet.com/public/apis/pet_list.html) | Resource to help get pets adopted | `apiKey` | Yes | Yes | 154 | | [Axolotl](https://theaxolotlapi.netlify.app/) | Collection of axolotl pictures and facts | No | Yes | No | 155 | | [xeno-canto](https://xeno-canto.org/explore/api) | Bird recordings | No | Yes | Unknown | 156 | | [Zoo Animals](https://zoo-animal-api.herokuapp.com/) | Facts and pictures of zoo animals | No | Yes | Yes | 157 | 158 | **[⬆ Back to Index](#index)** 159 | 160 | ### Anime 161 | 162 | | API | Description | Auth | HTTPS | CORS | 163 | | -------------------------------------------------------- | ------------------------------------------------------------------------ | -------- | ----- | ------- | 164 | | [AniAPI](https://aniapi.com/docs/) | Anime discovery, streaming & syncing with trackers | `OAuth` | Yes | Yes | 165 | | [AniDB](https://wiki.anidb.net/HTTP_API_Definition) | Anime Database | `apiKey` | No | Unknown | 166 | | [AniList](https://github.com/AniList/ApiV2-GraphQL-Docs) | Anime discovery & tracking | `OAuth` | Yes | Unknown | 167 | | [Waifu.im](https://waifu.im/docs) | Get waifu pictures from an archive of over 4000 images and multiple tags | No | Yes | Yes | 168 | | [Waifu.pics](https://waifu.pics/docs) | Image sharing platform for anime images | No | Yes | No | 169 | 170 | **[⬆ Back to Index](#index)** 171 | -------------------------------------------------------------------------------- /example/public/EbookFoundation/free-programming-books/books/free-programming-books-zh.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trackawesomelist/trackawesomelist-source/cac86ac7e22af4f8c6d27dbe660ac04ad45a9332/example/public/EbookFoundation/free-programming-books/books/free-programming-books-zh.md -------------------------------------------------------------------------------- /example/public/EbookFoundation/free-programming-books/books/free-programming-books-zh/feed.json: -------------------------------------------------------------------------------- 1 | { 2 | "items": [ 3 | { 4 | "id": "1", 5 | "title": "Item 1", 6 | "content_html": "This is a description for Item 1" 7 | } 8 | ] 9 | } 10 | -------------------------------------------------------------------------------- /example/public/EbookFoundation/free-programming-books/books/free-programming-books-zh/index.html: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /example/public/EbookFoundation/free-programming-books/index.html: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trackawesomelist/trackawesomelist-source/cac86ac7e22af4f8c6d27dbe660ac04ad45a9332/example/public/EbookFoundation/free-programming-books/index.html -------------------------------------------------------------------------------- /example/public/EbookFoundation/free-programming-books/readme.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trackawesomelist/trackawesomelist-source/cac86ac7e22af4f8c6d27dbe660ac04ad45a9332/example/public/EbookFoundation/free-programming-books/readme.md -------------------------------------------------------------------------------- /example/repo-meta.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": 54346799, 3 | "node_id": "MDEwOlJlcG9zaXRvcnk1NDM0Njc5OQ==", 4 | "name": "public-apis", 5 | "full_name": "public-apis/public-apis", 6 | "private": false, 7 | "owner": { 8 | "login": "public-apis", 9 | "id": 51121562, 10 | "node_id": "MDEyOk9yZ2FuaXphdGlvbjUxMTIxNTYy", 11 | "avatar_url": "https://avatars.githubusercontent.com/u/51121562?v=4", 12 | "gravatar_id": "", 13 | "url": "https://api.github.com/users/public-apis", 14 | "html_url": "https://github.com/public-apis", 15 | "followers_url": "https://api.github.com/users/public-apis/followers", 16 | "following_url": "https://api.github.com/users/public-apis/following{/other_user}", 17 | "gists_url": "https://api.github.com/users/public-apis/gists{/gist_id}", 18 | "starred_url": "https://api.github.com/users/public-apis/starred{/owner}{/repo}", 19 | "subscriptions_url": "https://api.github.com/users/public-apis/subscriptions", 20 | "organizations_url": "https://api.github.com/users/public-apis/orgs", 21 | "repos_url": "https://api.github.com/users/public-apis/repos", 22 | "events_url": "https://api.github.com/users/public-apis/events{/privacy}", 23 | "received_events_url": "https://api.github.com/users/public-apis/received_events", 24 | "type": "Organization", 25 | "site_admin": false 26 | }, 27 | "html_url": "https://github.com/public-apis/public-apis", 28 | "description": "A collective list of free APIs", 29 | "fork": false, 30 | "url": "https://api.github.com/repos/public-apis/public-apis", 31 | "forks_url": "https://api.github.com/repos/public-apis/public-apis/forks", 32 | "keys_url": "https://api.github.com/repos/public-apis/public-apis/keys{/key_id}", 33 | "collaborators_url": "https://api.github.com/repos/public-apis/public-apis/collaborators{/collaborator}", 34 | "teams_url": "https://api.github.com/repos/public-apis/public-apis/teams", 35 | "hooks_url": "https://api.github.com/repos/public-apis/public-apis/hooks", 36 | "issue_events_url": "https://api.github.com/repos/public-apis/public-apis/issues/events{/number}", 37 | "events_url": "https://api.github.com/repos/public-apis/public-apis/events", 38 | "assignees_url": "https://api.github.com/repos/public-apis/public-apis/assignees{/user}", 39 | "branches_url": "https://api.github.com/repos/public-apis/public-apis/branches{/branch}", 40 | "tags_url": "https://api.github.com/repos/public-apis/public-apis/tags", 41 | "blobs_url": "https://api.github.com/repos/public-apis/public-apis/git/blobs{/sha}", 42 | "git_tags_url": "https://api.github.com/repos/public-apis/public-apis/git/tags{/sha}", 43 | "git_refs_url": "https://api.github.com/repos/public-apis/public-apis/git/refs{/sha}", 44 | "trees_url": "https://api.github.com/repos/public-apis/public-apis/git/trees{/sha}", 45 | "statuses_url": "https://api.github.com/repos/public-apis/public-apis/statuses/{sha}", 46 | "languages_url": "https://api.github.com/repos/public-apis/public-apis/languages", 47 | "stargazers_url": "https://api.github.com/repos/public-apis/public-apis/stargazers", 48 | "contributors_url": "https://api.github.com/repos/public-apis/public-apis/contributors", 49 | "subscribers_url": "https://api.github.com/repos/public-apis/public-apis/subscribers", 50 | "subscription_url": "https://api.github.com/repos/public-apis/public-apis/subscription", 51 | "commits_url": "https://api.github.com/repos/public-apis/public-apis/commits{/sha}", 52 | "git_commits_url": "https://api.github.com/repos/public-apis/public-apis/git/commits{/sha}", 53 | "comments_url": "https://api.github.com/repos/public-apis/public-apis/comments{/number}", 54 | "issue_comment_url": "https://api.github.com/repos/public-apis/public-apis/issues/comments{/number}", 55 | "contents_url": "https://api.github.com/repos/public-apis/public-apis/contents/{+path}", 56 | "compare_url": "https://api.github.com/repos/public-apis/public-apis/compare/{base}...{head}", 57 | "merges_url": "https://api.github.com/repos/public-apis/public-apis/merges", 58 | "archive_url": "https://api.github.com/repos/public-apis/public-apis/{archive_format}{/ref}", 59 | "downloads_url": "https://api.github.com/repos/public-apis/public-apis/downloads", 60 | "issues_url": "https://api.github.com/repos/public-apis/public-apis/issues{/number}", 61 | "pulls_url": "https://api.github.com/repos/public-apis/public-apis/pulls{/number}", 62 | "milestones_url": "https://api.github.com/repos/public-apis/public-apis/milestones{/number}", 63 | "notifications_url": "https://api.github.com/repos/public-apis/public-apis/notifications{?since,all,participating}", 64 | "labels_url": "https://api.github.com/repos/public-apis/public-apis/labels{/name}", 65 | "releases_url": "https://api.github.com/repos/public-apis/public-apis/releases{/id}", 66 | "deployments_url": "https://api.github.com/repos/public-apis/public-apis/deployments", 67 | "created_at": "2016-03-20T23:49:42Z", 68 | "updated_at": "2022-10-01T01:27:15Z", 69 | "pushed_at": "2022-09-26T16:04:55Z", 70 | "git_url": "git://github.com/public-apis/public-apis.git", 71 | "ssh_url": "git@github.com:public-apis/public-apis.git", 72 | "clone_url": "https://github.com/public-apis/public-apis.git", 73 | "svn_url": "https://github.com/public-apis/public-apis", 74 | "homepage": "http://public-apis.org", 75 | "size": 5031, 76 | "stargazers_count": 210463, 77 | "watchers_count": 210463, 78 | "language": "Python", 79 | "has_issues": true, 80 | "has_projects": false, 81 | "has_downloads": true, 82 | "has_wiki": false, 83 | "has_pages": false, 84 | "forks_count": 24096, 85 | "mirror_url": null, 86 | "archived": false, 87 | "disabled": false, 88 | "open_issues_count": 61, 89 | "license": { 90 | "key": "mit", 91 | "name": "MIT License", 92 | "spdx_id": "MIT", 93 | "url": "https://api.github.com/licenses/mit", 94 | "node_id": "MDc6TGljZW5zZTEz" 95 | }, 96 | "allow_forking": true, 97 | "is_template": false, 98 | "web_commit_signoff_required": false, 99 | "topics": [ 100 | "api", 101 | "apis", 102 | "dataset", 103 | "development", 104 | "free", 105 | "list", 106 | "lists", 107 | "open-source", 108 | "public", 109 | "public-api", 110 | "public-apis", 111 | "resources", 112 | "software" 113 | ], 114 | "visibility": "public", 115 | "forks": 24096, 116 | "open_issues": 61, 117 | "watchers": 210463, 118 | "default_branch": "master", 119 | "permissions": { 120 | "admin": false, 121 | "maintain": false, 122 | "push": false, 123 | "triage": false, 124 | "pull": true 125 | }, 126 | "temp_clone_token": "", 127 | "organization": { 128 | "login": "public-apis", 129 | "id": 51121562, 130 | "node_id": "MDEyOk9yZ2FuaXphdGlvbjUxMTIxNTYy", 131 | "avatar_url": "https://avatars.githubusercontent.com/u/51121562?v=4", 132 | "gravatar_id": "", 133 | "url": "https://api.github.com/users/public-apis", 134 | "html_url": "https://github.com/public-apis", 135 | "followers_url": "https://api.github.com/users/public-apis/followers", 136 | "following_url": "https://api.github.com/users/public-apis/following{/other_user}", 137 | "gists_url": "https://api.github.com/users/public-apis/gists{/gist_id}", 138 | "starred_url": "https://api.github.com/users/public-apis/starred{/owner}{/repo}", 139 | "subscriptions_url": "https://api.github.com/users/public-apis/subscriptions", 140 | "organizations_url": "https://api.github.com/users/public-apis/orgs", 141 | "repos_url": "https://api.github.com/users/public-apis/repos", 142 | "events_url": "https://api.github.com/users/public-apis/events{/privacy}", 143 | "received_events_url": "https://api.github.com/users/public-apis/received_events", 144 | "type": "Organization", 145 | "site_admin": false 146 | }, 147 | "network_count": 24096, 148 | "subscribers_count": 3683 149 | } 150 | -------------------------------------------------------------------------------- /example/simple.md: -------------------------------------------------------------------------------- 1 | # Title 2 | 3 | Introduction 4 | 5 | ## Subtitle1 6 | 7 | Subintroduction1 8 | 9 | - Item1 10 | - Item2 11 | 12 | ## Subtitle2 13 | 14 | Subintroduction2 15 | 16 | - Item1 17 | - Item2 18 | -------------------------------------------------------------------------------- /fetch-sources.ts: -------------------------------------------------------------------------------- 1 | import { 2 | getDayNumber, 3 | getDbCachedStars, 4 | getDbIndex, 5 | getDbMeta, 6 | getWeekNumber, 7 | sha1, 8 | writeDbCachedStars, 9 | writeDbIndex, 10 | writeDbMeta, 11 | writeJSONFile, 12 | } from "./util.ts"; 13 | import parser from "./parser/mod.ts"; 14 | import log from "./log.ts"; 15 | import { 16 | FileInfo, 17 | Item, 18 | ParsedItemsFilePath, 19 | RepoMetaOverride, 20 | RunOptions, 21 | } from "./interface.ts"; 22 | import initItems from "./init-items.ts"; 23 | import Github from "./adapters/github.ts"; 24 | import { getItems, updateFile, updateItems } from "./db.ts"; 25 | import renderMarkdown from "./render-markdown.ts"; 26 | export default async function (options: RunOptions) { 27 | const force = options.forceFetch; 28 | const isRebuild = options.rebuild; 29 | const config = options.config; 30 | const file_min_updated_hours = config.file_min_updated_hours; 31 | const sourcesMap = config.sources; 32 | let sourceIdentifiers = options.sourceIdentifiers; 33 | let isSpecificSource = true; 34 | if (sourceIdentifiers.length === 0) { 35 | isSpecificSource = false; 36 | sourceIdentifiers = Object.keys(sourcesMap); 37 | } 38 | // limit 39 | const limit = options.limit; 40 | if (limit && limit > 0) { 41 | sourceIdentifiers = sourceIdentifiers.slice(0, limit); 42 | } 43 | const dbMeta = await getDbMeta(); 44 | const dbIndex = await getDbIndex(); 45 | const dbCachedStars = await getDbCachedStars(); 46 | const dbSources = dbMeta.sources; 47 | 48 | const invalidFiles: ParsedItemsFilePath[] = []; 49 | let sourceIndex = 0; 50 | 51 | try { 52 | for (const sourceIdentifier of sourceIdentifiers) { 53 | sourceIndex++; 54 | log.info( 55 | `[${sourceIndex}/${sourceIdentifiers.length}] Fetching source: ${sourceIdentifier}`, 56 | ); 57 | const source = sourcesMap[sourceIdentifier]; 58 | if (source && source.skip) { 59 | log.info(`source ${sourceIdentifier} is skipped`); 60 | continue; 61 | } 62 | const files = source.files; 63 | 64 | if (!dbSources[sourceIdentifier] || (isSpecificSource && isRebuild)) { 65 | // need to init source 66 | await initItems(source, options, dbMeta, dbIndex, dbCachedStars); 67 | continue; 68 | } else { 69 | // check is all files is init 70 | const dbSource = dbSources[sourceIdentifier]; 71 | const dbFiles = dbSource.files; 72 | const dbFileKeys = Object.keys(dbFiles); 73 | const isAllFilesInit = Object.keys(files).every((file) => { 74 | return dbFileKeys.includes(file); 75 | }); 76 | if (!isAllFilesInit) { 77 | // need to init source 78 | await initItems(source, options, dbMeta, dbIndex, dbCachedStars); 79 | continue; 80 | } 81 | } 82 | 83 | const dbSource = dbSources[sourceIdentifier]; 84 | const dbFiles = dbSource.files; 85 | const api = new Github(source); 86 | const fileKeys = Object.keys(files); 87 | let fileIndex = 0; 88 | // get file content and save it to raw data path 89 | for (const file of fileKeys) { 90 | fileIndex++; 91 | const dbFileMeta = dbFiles[file]; 92 | let isRebuild = false; 93 | 94 | if (dbFileMeta) { 95 | const dbFileMetaUpdatedAt = new Date(dbFileMeta.updated_at); 96 | if (dbFileMetaUpdatedAt.getTime() === 0) { 97 | log.info( 98 | `[${fileIndex}/${fileKeys.length}] ${source.identifier}/${file} is parsed failed, try to rebuild it.`, 99 | ); 100 | isRebuild = true; 101 | } 102 | } 103 | 104 | if (!dbFileMeta) { 105 | // reinit items 106 | isRebuild = true; 107 | } 108 | 109 | if (isRebuild) { 110 | await initItems(source, options, dbMeta, dbIndex, dbCachedStars); 111 | break; 112 | } 113 | 114 | // check is updated 115 | 116 | const dbFileUpdated = new Date(dbFileMeta.checked_at); 117 | 118 | const now = new Date(); 119 | const diff = now.getTime() - dbFileUpdated.getTime(); 120 | 121 | if (!force && (diff / 1000 / 60 / 60) < file_min_updated_hours) { 122 | // add max number function 123 | // not updated 124 | log.info( 125 | `${fileIndex}/${fileKeys.length}${sourceIdentifier}/${file} updated less than ${file_min_updated_hours} hours, skip`, 126 | ); 127 | continue; 128 | } else if (force) { 129 | log.info( 130 | `${sourceIdentifier}/${file} updated less than ${file_min_updated_hours} hours, force update`, 131 | ); 132 | } 133 | log.info( 134 | `${sourceIndex}/${sourceIdentifiers.length} try updating ${sourceIdentifier}/${file}`, 135 | ); 136 | const content = await api.getConent(file, source.default_branch); 137 | const contentSha1 = await sha1(content); 138 | const dbFileSha1 = dbFileMeta.sha1; 139 | log.debug( 140 | "dbFileSha1", 141 | dbFileSha1, 142 | "latest file contentSha1", 143 | contentSha1, 144 | ); 145 | 146 | if (dbFileSha1 === contentSha1 && !force) { 147 | log.info(`${file} is up to date, cause sha1 is same`); 148 | // update checked_at 149 | dbFileMeta.checked_at = new Date().toISOString(); 150 | continue; 151 | } else { 152 | let items: Record = {}; 153 | try { 154 | items = await getItems(sourceIdentifier, file); 155 | } catch (e) { 156 | log.warn(`get items error`, e); 157 | // try to reinit 158 | await initItems(source, options, dbMeta, dbIndex, dbCachedStars); 159 | continue; 160 | } 161 | const fileInfo: FileInfo = { 162 | sourceConfig: source, 163 | filepath: file, 164 | sourceMeta: dbSource, 165 | }; 166 | 167 | const docItems = await parser(content, fileInfo, dbCachedStars); 168 | //compare updated items 169 | const newItems: Record = {}; 170 | let newCount = 0; 171 | let totalCount = 0; 172 | let fileUpdatedAt = new Date(0); 173 | 174 | for (const docItem of docItems) { 175 | const itemSha1 = await sha1(docItem.rawMarkdown); 176 | totalCount++; 177 | // check markdown 178 | if (items[itemSha1]) { 179 | // it's a old item, 180 | // stay the same 181 | newItems[itemSha1] = { 182 | source_identifier: sourceIdentifier, 183 | file, 184 | sha1: itemSha1, 185 | markdown: docItem.formatedMarkdown, 186 | html: renderMarkdown(docItem.formatedMarkdown), 187 | category: docItem.category, 188 | category_html: renderMarkdown(docItem.category), 189 | updated_at: items[itemSha1].updated_at, 190 | checked_at: now.toISOString(), 191 | updated_day: items[itemSha1].updated_day, 192 | updated_week: items[itemSha1].updated_week, 193 | }; 194 | if (new Date(items[itemSha1].updated_at) > fileUpdatedAt) { 195 | fileUpdatedAt = new Date(items[itemSha1].updated_at); 196 | } 197 | } else { 198 | newCount++; 199 | const now = new Date(); 200 | // yes 201 | // this is a new item 202 | // add it to items 203 | newItems[itemSha1] = { 204 | source_identifier: sourceIdentifier, 205 | file, 206 | sha1: itemSha1, 207 | markdown: docItem.formatedMarkdown, 208 | html: renderMarkdown(docItem.formatedMarkdown), 209 | category: docItem.category, 210 | category_html: renderMarkdown(docItem.category), 211 | updated_at: now.toISOString(), 212 | checked_at: now.toISOString(), 213 | updated_day: getDayNumber(now), 214 | updated_week: getWeekNumber(now), 215 | }; 216 | if (now > fileUpdatedAt) { 217 | fileUpdatedAt = now; 218 | } 219 | } 220 | } 221 | 222 | await updateFile(fileInfo, content, dbCachedStars); 223 | await updateItems(fileInfo, newItems, dbIndex); 224 | 225 | dbFiles[file] = { 226 | ...dbFiles[file], 227 | updated_at: fileUpdatedAt.toISOString(), 228 | checked_at: now.toISOString(), 229 | sha1: contentSha1, 230 | }; 231 | log.info( 232 | `${sourceIndex}/${sourceIdentifiers.length} ${sourceIdentifier}/${file} updated, ${newCount} new items, ${totalCount} total items`, 233 | ); 234 | if (totalCount < 10) { 235 | invalidFiles.push({ 236 | sourceIdentifier, 237 | originalFilepath: file, 238 | }); 239 | } 240 | // if total count is 0, print it`` 241 | // also update repoMeta 242 | 243 | const metaOverrides: RepoMetaOverride = {}; 244 | if (source.default_branch) { 245 | metaOverrides.default_branch = source.default_branch; 246 | } 247 | const meta = await api.getRepoMeta(metaOverrides); 248 | dbSource.meta = meta; 249 | dbMeta.sources[sourceIdentifier].meta = { 250 | ...dbSource.meta, 251 | ...meta, 252 | }; 253 | } 254 | } 255 | dbMeta.sources[sourceIdentifier].files = dbFiles; 256 | dbMeta.sources[sourceIdentifier].updated_at = new Date().toISOString(); 257 | } 258 | // write to dbMeta 259 | await writeDbMeta(dbMeta); 260 | await writeDbIndex(dbIndex); 261 | await writeDbCachedStars(dbCachedStars); 262 | } catch (e) { 263 | // write to dbMeta 264 | await writeDbMeta(dbMeta); 265 | await writeDbIndex(dbIndex); 266 | await writeDbCachedStars(dbCachedStars); 267 | throw e; 268 | } 269 | if (invalidFiles.length > 0) { 270 | log.error(`Some files is invalid, please check it manually`); 271 | log.error(invalidFiles); 272 | await writeJSONFile("temp-invalid-files.json", invalidFiles); 273 | } 274 | } 275 | -------------------------------------------------------------------------------- /format-category.ts: -------------------------------------------------------------------------------- 1 | import { Content, remove, Root, toMarkdown } from "./deps.ts"; 2 | 3 | export default function formatItemMarkdown( 4 | item: Content | Root, 5 | ): string { 6 | // visit and remove sup item 7 | remove(item, (node, _n) => { 8 | // remove hash link 9 | // remote html 10 | if (node.type === "html") { 11 | return true; 12 | } 13 | if (node.type === "link" && node.url.startsWith("#")) { 14 | return true; 15 | } else { 16 | return false; 17 | } 18 | }); 19 | return toMarkdown(item).trim(); 20 | } 21 | -------------------------------------------------------------------------------- /format-markdown-item.ts: -------------------------------------------------------------------------------- 1 | import { DocItem, ExpiredValue, FileInfo } from "./interface.ts"; 2 | import { Content, Link, pLimit, Root, toMarkdown, visit } from "./deps.ts"; 3 | import { 4 | childrenToMarkdown, 5 | childrenToRoot, 6 | getDomain, 7 | gotGithubStar, 8 | isMock, 9 | promiseLimit, 10 | } from "./util.ts"; 11 | import log from "./log.ts"; 12 | const GithubSpecialOwner = [ 13 | "marketplace", 14 | "help", 15 | "blog", 16 | "about", 17 | "explore", 18 | "topics", 19 | "issues", 20 | "pulls", 21 | "notifications", 22 | "settings", 23 | "new", 24 | "organizations", 25 | "repositories", 26 | "packages", 27 | "people", 28 | "dashboard", 29 | "projects", 30 | "stars", 31 | "gists", 32 | "security", 33 | "marketplace", 34 | "pricing", 35 | "customer-stories", 36 | "nonprofit", 37 | "education", 38 | "nonprofit", 39 | "education", 40 | "enterprise", 41 | "login", 42 | "join", 43 | "watching", 44 | "new", 45 | "integrations", 46 | "marketplace", 47 | "pricing", 48 | "customer-stories", 49 | "nonprofit", 50 | "education", 51 | "nonprofit", 52 | "education", 53 | "enterprise", 54 | "login", 55 | "join", 56 | "watching", 57 | "new", 58 | "integrations", 59 | "marketplace", 60 | "pricing", 61 | "customer-stories", 62 | "nonprofit", 63 | "education", 64 | "nonprofit", 65 | "education", 66 | "enterprise", 67 | "login", 68 | "join", 69 | "watching", 70 | "new", 71 | "integrations", 72 | "marketplace", 73 | "pricing", 74 | "customer-stories", 75 | "nonprofit", 76 | "education", 77 | "nonprofit", 78 | "education", 79 | "enterprise", 80 | "login", 81 | "join", 82 | "watching", 83 | "new", 84 | "integrations", 85 | "marketplace", 86 | "pricing", 87 | "customer-stories", 88 | "nonprofit", 89 | "education", 90 | "nonprofit", 91 | "education", 92 | "enterprise", 93 | "login", 94 | "join", 95 | "watching", 96 | "new", 97 | "integrations", 98 | "marketplace", 99 | "pricing", 100 | "customer-stories", 101 | "nonprofit", 102 | "education", 103 | "nonprofit", 104 | "education", 105 | "enterprise", 106 | "login", 107 | "join", 108 | "watching", 109 | "new", 110 | "integrations", 111 | "marketplace", 112 | "pricing", 113 | "customer-stories", 114 | "nonprofit", 115 | "education", 116 | "nonprofit", 117 | "education", 118 | "enterprise", 119 | "login", 120 | "join", 121 | "watching", 122 | "new", 123 | "integrations", 124 | "marketplace", 125 | "pricing", 126 | "customer-stories", 127 | "nonprofit", 128 | "education", 129 | "nonprofit", 130 | "education", 131 | "enterprise", 132 | "login", 133 | "join", 134 | "watching", 135 | "new", 136 | "integrations", 137 | "marketplace", 138 | "pricing", 139 | "customer-stories", 140 | "nonprofit", 141 | "education", 142 | "nonprofit", 143 | "education", 144 | "enterprise", 145 | "login", 146 | "join", 147 | "watching", 148 | "new", 149 | "integrations", 150 | "marketplace", 151 | "pricing", 152 | "customer-stories", 153 | "nonprofit", 154 | "education", 155 | "nonprofit", 156 | "education", 157 | "enterprise", 158 | "login", 159 | "join", 160 | "watching", 161 | "new", 162 | "integrations", 163 | "marketplace", 164 | "pricing", 165 | "features", 166 | ]; 167 | export interface MatchedNode { 168 | node: Link; 169 | meta: Record; 170 | } 171 | export default async function formatItemMarkdown( 172 | item: Content | Root, 173 | fileInfo: FileInfo, 174 | dbCachedStars: Record, 175 | ): Promise { 176 | const sourceConfig = fileInfo.sourceConfig; 177 | const filepath = fileInfo.filepath; 178 | const fileConfig = sourceConfig.files[filepath]; 179 | const sourceMeta = fileInfo.sourceMeta; 180 | const repoMeta = sourceMeta.meta; 181 | const repoUrl = repoMeta.url; 182 | const defaultBranch = repoMeta.default_branch; 183 | const { options } = fileConfig; 184 | // get all github link, and add badge 185 | const matchedNodes: MatchedNode[] = []; 186 | visit(item, (node) => { 187 | if (node.type === "html") { 188 | if (node.value.includes(" { 192 | const url = p1; 193 | let formated = p1; 194 | if (url.startsWith("http")) { 195 | // do nothing 196 | } else if (url.startsWith("/")) { 197 | formated = `${repoUrl}/raw/${defaultBranch}${url}`; 198 | } else { 199 | formated = `${repoUrl}/raw/${defaultBranch}/${url}`; 200 | } 201 | const urlObj = new URL(formated); 202 | if (urlObj.hostname === "github.com") { 203 | formated = formated.replace("/blob/", "/raw/"); 204 | } 205 | return `src="${formated}"`; 206 | }); 207 | } 208 | } 209 | if ( 210 | node.type === "link" && 211 | (node.url.startsWith("http:") || node.url.startsWith("https:")) 212 | ) { 213 | const url = node.url; 214 | try { 215 | const urlObj = new URL(url); 216 | if ( 217 | urlObj.hostname === "github.com" && node.children && 218 | node.children.length > 0 && node.children[0].type === "text" && 219 | !node.children[0].value.startsWith(![]) 220 | ) { 221 | // disable white list pathname 222 | const pathname = urlObj.pathname; 223 | const pathArr = pathname.split("/"); 224 | const owner = pathArr[1]; 225 | const repo = pathArr[2]; 226 | 227 | if (owner && repo && !GithubSpecialOwner.includes(owner)) { 228 | matchedNodes.push({ 229 | node, 230 | meta: { 231 | owner, 232 | repo, 233 | }, 234 | }); 235 | } 236 | } 237 | } catch (e) { 238 | log.debug("url parse error", url, e); 239 | } 240 | } else if ( 241 | node.type === "link" && !node.url.startsWith("#") && 242 | !node.url.includes("://") 243 | ) { 244 | // transform relative link to absolute link 245 | const url = node.url; 246 | if (url.startsWith("/")) { 247 | node.url = `${repoUrl}/blob/${defaultBranch}${url}`; 248 | } else { 249 | node.url = `${repoUrl}/blob/${defaultBranch}/${filepath}/${url}`; 250 | } 251 | } else if (node.type === "image" && !node.url.startsWith("http")) { 252 | const url = node.url; 253 | if (url.startsWith("/")) { 254 | node.url = `${repoUrl}/raw/${defaultBranch}${url}`; 255 | } else { 256 | node.url = `${repoUrl}/raw/${defaultBranch}/${url}`; 257 | } 258 | } 259 | // check is there is blob, replace to raw 260 | if (node.type === "image" && node.url.includes("blob")) { 261 | const urlObj = new URL(node.url); 262 | if (urlObj.hostname === "github.com") { 263 | node.url = node.url.replace("/blob/", "/raw/"); 264 | } 265 | } 266 | }); 267 | if (!isMock()) { 268 | const limit = pLimit(30); 269 | await Promise.all( 270 | matchedNodes.map((matched) => { 271 | const { owner, repo } = matched.meta; 272 | const node = matched.node; 273 | return limit(() => 274 | gotGithubStar(owner, repo, dbCachedStars).then((star: string) => { 275 | if (star) { 276 | const badge = ` (⭐${star})`; 277 | node.children = [ 278 | ...node.children, 279 | { 280 | type: "text", 281 | value: badge, 282 | }, 283 | ]; 284 | } 285 | }).catch((_e) => { 286 | // ignore error 287 | }) 288 | ); 289 | }), 290 | ); 291 | } 292 | return item; 293 | } 294 | -------------------------------------------------------------------------------- /get-git-blame.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable @typescript-eslint/no-non-null-assertion */ 2 | /** 3 | * Spawns git blame and parses results into JSON, via stream (so, no problem on huge files) 4 | */ 5 | 6 | import { camelCase, readLines } from "./deps.ts"; 7 | 8 | interface BlameOptions { 9 | /** 10 | * Annotate only the given line range. May be specified multiple times. Overlapping ranges are allowed. 11 | * @see {@link https://git-scm.com/docs/git-blame#_specifying_ranges} 12 | */ 13 | range: string; 14 | ignoreWhitespace: boolean; 15 | workTree: string; 16 | gitDir: string; 17 | rev: string; 18 | } 19 | 20 | interface LineInfo { 21 | sourceLine: number; 22 | resultLine: number; 23 | hash: string; 24 | numberOfLines: number; 25 | author: string; 26 | authorMail: string; 27 | authorTime: number; 28 | authorTz: string; 29 | commiter: string; 30 | commiterMail: string; 31 | commiterTime: number; 32 | commiterTz: string; 33 | summary: string; 34 | previous: string; 35 | filename: string; 36 | [k: string]: string | number; 37 | } 38 | 39 | export default async function getGitBlame( 40 | filename: string, 41 | options: Partial = {}, 42 | gitPath = "git", 43 | ): Promise> { 44 | /** 45 | * @see {@link https://git-scm.com/docs/git-blame#_options} 46 | */ 47 | const args = ["--no-pager", "blame", "--line-porcelain"]; 48 | if (typeof options.workTree === "string") { 49 | args.unshift(`--work-tree=${options.workTree}`); 50 | } 51 | if (typeof options.gitDir === "string") { 52 | args.unshift(`--git-dir=${options.gitDir}`); 53 | } 54 | if (typeof options.ignoreWhitespace === "boolean") { 55 | args.push("-w"); 56 | } 57 | if (typeof options.range === "string") { 58 | args.push(`-L${options.range}`); 59 | } 60 | if (typeof options.rev === "string") { 61 | args.push(options.rev); 62 | } 63 | const cmd = [gitPath, ...args, "--", filename]; 64 | const process = Deno.run({ 65 | cmd, 66 | cwd: options.workTree, 67 | stdin: "piped", 68 | stdout: "piped", 69 | }); 70 | let currentLine: Partial; 71 | const linesMap: Map> = new Map(); 72 | // return linesMap; 73 | 74 | for await (const line of readLines(process.stdout)) { 75 | // https://git-scm.com/docs/git-blame#_the_porcelain_format 76 | // Each blame entry always starts with a line of: 77 | // <40-byte hex sha1> 78 | // like: 49790775624c422f67057f7bb936f35df920e391 94 120 3 79 | 80 | const parsedLine = 81 | /^(?[a-f0-9]{40,40})\s(?\d+)\s(?\d+)\s(?\d+)$/ 82 | .exec( 83 | line, 84 | ); 85 | if (parsedLine?.groups) { 86 | // this is a new line info 87 | const sourceLine = parseInt(parsedLine.groups.sourceline, 10); 88 | const resultLine = parseInt(parsedLine?.groups.resultLine, 10); 89 | const numberOfLines = parseInt(parsedLine?.groups.numLines, 10); 90 | currentLine = { 91 | hash: parsedLine.groups.hash, 92 | sourceLine, 93 | resultLine, 94 | numberOfLines, 95 | }; 96 | // set for all lines 97 | for (let i = resultLine; i < resultLine + numberOfLines; i++) { 98 | linesMap.set(i, currentLine); 99 | } 100 | } else { 101 | if (currentLine!) { 102 | const commitInfo = 103 | /^(?[a-z]+(-(?[a-z]+))?)\s(?.+)$/.exec( 104 | line, 105 | ); 106 | if (commitInfo?.groups) { 107 | const property = camelCase(commitInfo.groups.token); 108 | let value: string | number = commitInfo.groups.data; 109 | switch (commitInfo.groups.subtoken) { 110 | case "mail": 111 | // remove <> from email 112 | value = value.slice(1, -1); 113 | break; 114 | 115 | case "time": 116 | // parse datestamp into number 117 | value = parseInt(value, 10); 118 | break; 119 | } 120 | currentLine![property] = value; 121 | } 122 | } 123 | } 124 | } 125 | return linesMap as Map; 126 | } 127 | -------------------------------------------------------------------------------- /init-db.ts: -------------------------------------------------------------------------------- 1 | import { 2 | getDbIndexFilePath, 3 | getDbMetaFilePath, 4 | writeJSONFile, 5 | } from "./util.ts"; 6 | import log from "./log.ts"; 7 | import dbInitMeta from "./db-meta-init.json" assert { type: "json" }; 8 | export default async function initDb() { 9 | const dbMetaFilePath = getDbMetaFilePath(); 10 | const dbIndexFilePath = getDbIndexFilePath(); 11 | if (!await Deno.stat(dbMetaFilePath).catch(() => false)) { 12 | log.info("db meta not found, auto init"); 13 | // copy db-meta-init.json 14 | await writeJSONFile(dbMetaFilePath, dbInitMeta); 15 | } 16 | if (!await Deno.stat(dbIndexFilePath).catch(() => false)) { 17 | log.info("db index not found, auto init"); 18 | // copy db-meta-init.json 19 | await writeJSONFile(dbIndexFilePath, {}); 20 | } 21 | } 22 | 23 | if (import.meta.main) { 24 | initDb(); 25 | } 26 | -------------------------------------------------------------------------------- /init-items.ts: -------------------------------------------------------------------------------- 1 | import { 2 | DBIndex, 3 | DBMeta, 4 | ExpiredValue, 5 | FileInfo, 6 | Item, 7 | RepoMetaOverride, 8 | RunOptions, 9 | Source, 10 | } from "./interface.ts"; 11 | import renderMarkdown from "./render-markdown.ts"; 12 | import Github from "./adapters/github.ts"; 13 | import { 14 | exists, 15 | getCachePath, 16 | getDayNumber, 17 | getWeekNumber, 18 | readTextFile, 19 | sha1, 20 | } from "./util.ts"; 21 | import log from "./log.ts"; 22 | import { fs, path } from "./deps.ts"; 23 | import parser from "./parser/mod.ts"; 24 | import getGitBlame from "./get-git-blame.ts"; 25 | import { updateFile, updateItems } from "./db.ts"; 26 | export default async function initItems( 27 | source: Source, 28 | options: RunOptions, 29 | dbMeta: DBMeta, 30 | dbIndex: DBIndex, 31 | dbCachedStars: Record, 32 | ) { 33 | // first get repo meta info from api 34 | const api = new Github(source); 35 | const metaOverrides: RepoMetaOverride = {}; 36 | if (source.default_branch) { 37 | metaOverrides.default_branch = source.default_branch; 38 | } 39 | const meta = await api.getRepoMeta(metaOverrides); 40 | const sources = dbMeta.sources; 41 | //check repo folder is empty 42 | const repoPath = path.join(getCachePath(false), "repos", source.identifier); 43 | 44 | const isExist = await exists(repoPath); 45 | log.debug(`repo ${repoPath} exist cache, try to pull updates`); 46 | 47 | // then git clone the entire repo, and parse the files 48 | if (isExist) { 49 | // try to update 50 | if (options.fetchRepoUpdates) { 51 | const args: string[] = [ 52 | "--work-tree", 53 | repoPath, 54 | "--git-dir", 55 | path.join(repoPath, ".git"), 56 | ]; 57 | 58 | const p = Deno.run({ 59 | cmd: ["git"].concat(args).concat(["pull"]), 60 | }); 61 | await p.status(); 62 | } 63 | } else { 64 | // ensure parent folder exists 65 | await fs.ensureDir(path.dirname(repoPath)); 66 | log.info(`cloning ${api.getCloneUrl()} to ${repoPath}`); 67 | // try to clone 68 | const p = Deno.run({ 69 | cmd: [ 70 | "git", 71 | "clone", 72 | "-b", 73 | meta.default_branch, 74 | api.getCloneUrl(), 75 | repoPath, 76 | ], 77 | }); 78 | await p.status(); 79 | } 80 | const now = new Date(); 81 | sources[source.identifier] = sources[source.identifier] || { 82 | created_at: now.toISOString(), 83 | updated_at: now.toISOString(), 84 | meta, 85 | files: {}, 86 | }; 87 | 88 | for (const file of Object.keys(source.files)) { 89 | const fileConfig = source.files[file]; 90 | const blameInfoMap = await getGitBlame(file, { 91 | workTree: repoPath, 92 | gitDir: path.join(repoPath, ".git"), 93 | }); 94 | const items: Record = {}; 95 | const cachedFilePath = path.join(repoPath, file); 96 | const content = await readTextFile(cachedFilePath); 97 | const fileInfo: FileInfo = { 98 | sourceConfig: source, 99 | sourceMeta: sources[source.identifier], 100 | filepath: file, 101 | }; 102 | const docItems = await parser(content, fileInfo, dbCachedStars); 103 | // console.log("docItems", docItems); 104 | let latestUpdatedAt = new Date(0); 105 | for (const docItem of docItems) { 106 | const now = new Date(); 107 | const commitInfo = blameInfoMap.get(docItem.line); 108 | if (commitInfo) { 109 | const itemSha1 = await sha1(docItem.rawMarkdown); 110 | const commitTime = commitInfo.committerTime; 111 | const commitDate = new Date(Number(commitTime) * 1000); 112 | const updatedAt = commitDate.toISOString(); 113 | items[itemSha1] = { 114 | category: docItem.category, 115 | category_html: renderMarkdown(docItem.category), 116 | updated_at: updatedAt, 117 | source_identifier: source.identifier, 118 | file, 119 | markdown: docItem.formatedMarkdown, 120 | html: renderMarkdown(docItem.formatedMarkdown), 121 | sha1: itemSha1, 122 | checked_at: now.toISOString(), 123 | updated_day: getDayNumber(new Date(updatedAt)), 124 | updated_week: getWeekNumber(new Date(updatedAt)), 125 | }; 126 | if (commitDate.getTime() > latestUpdatedAt.getTime()) { 127 | latestUpdatedAt = commitDate; 128 | } 129 | } else { 130 | throw new Error( 131 | `no commit info for ${source.identifier} ${file} ${docItem.line}`, 132 | ); 133 | } 134 | } 135 | const contentSha1 = await sha1(content); 136 | // try to get items updated time 137 | // get created time and updated time from blameinfo 138 | let createdAt = now; 139 | for (const blame of blameInfoMap.values()) { 140 | const commitTime = blame.committerTime; 141 | const commitDate = new Date(Number(commitTime) * 1000); 142 | if (commitDate < createdAt) { 143 | createdAt = commitDate; 144 | } 145 | } 146 | 147 | sources[source.identifier].files[file] = { 148 | sha1: contentSha1, 149 | updated_at: latestUpdatedAt.toISOString(), 150 | meta_created_at: now.toISOString(), 151 | created_at: createdAt.toISOString(), 152 | checked_at: now.toISOString(), 153 | }; 154 | //write to file 155 | // await writeJSONFile(formatedPath, itemsJson); 156 | // write to db 157 | 158 | await updateFile(fileInfo, content); 159 | await updateItems(fileInfo, items, dbIndex); 160 | 161 | log.info( 162 | `init ${source.identifier}/${file} success, total ${ 163 | Object.keys(items).length 164 | } items`, 165 | ); 166 | } 167 | dbMeta.sources = sources; 168 | } 169 | -------------------------------------------------------------------------------- /interface.ts: -------------------------------------------------------------------------------- 1 | import { Content } from "./deps.ts"; 2 | export type ExpiredValue = [number, string]; 3 | export interface WeekOfYear { 4 | year: number; 5 | week: number; 6 | number: number; 7 | path: string; 8 | date: Date; 9 | id: string; 10 | name: string; 11 | } 12 | export interface CustomRequestOptions { 13 | expires?: number; 14 | } 15 | export interface BuiltMarkdownInfo { 16 | commitMessage: string; 17 | } 18 | export interface RepoMetaOverride { 19 | default_branch?: string; 20 | } 21 | export interface ParseOptions { 22 | min_heading_level?: number; 23 | max_heading_level?: number; 24 | heading_level?: number; // only need for heading type 25 | type: "table" | "list" | "heading"; 26 | is_parse_category?: boolean; 27 | } 28 | export interface DayInfo { 29 | year: number; 30 | month: number; 31 | day: number; 32 | number: number; 33 | path: string; 34 | name: string; 35 | id: string; 36 | date: Date; 37 | } 38 | export type LevelName = "debug" | "info" | "warn" | "error" | "fatal"; 39 | export interface File { 40 | source_identifier: string; 41 | file: string; 42 | } 43 | export enum Level { 44 | Debug = 0, 45 | Info = 1, 46 | Warn = 2, 47 | Error = 3, 48 | Fatal = 4, 49 | } 50 | export interface ApiInfo { 51 | url: string; 52 | headers: Headers; 53 | } 54 | export interface RawSource { 55 | category?: string; 56 | default_branch?: string; 57 | url?: string; 58 | files?: Record | string; 59 | skip?: boolean; 60 | } 61 | export interface ParsedItemsFilePath { 62 | originalFilepath: string; 63 | sourceIdentifier: string; 64 | } 65 | export interface Site { 66 | title: string; 67 | description: string; 68 | url: string; 69 | } 70 | export interface RawConfig { 71 | sources: Record; 72 | file_min_updated_hours: number; 73 | site: Site; 74 | } 75 | export interface RawSourceFile { 76 | index?: boolean; 77 | name?: string; 78 | options?: ParseOptions; 79 | } 80 | 81 | export interface FileConfigInfo { 82 | sourceConfig: Source; 83 | filepath: string; 84 | } 85 | export interface FileInfo extends FileConfigInfo { 86 | sourceMeta: DbMetaSource; 87 | filepath: string; 88 | } 89 | export interface FormatMarkdownItemOptions { 90 | repoUrl: string; 91 | defaultBranch: string; 92 | filepath: string; 93 | } 94 | export interface RawSourceFileWithType extends RawSourceFile { 95 | options: ParseOptions; 96 | } 97 | export interface Nav { 98 | name: string; 99 | active?: boolean; 100 | markdown_url?: string; 101 | url?: string; 102 | } 103 | export interface FeedConfig { 104 | nav1: Nav[]; 105 | nav2?: Nav[]; 106 | } 107 | export interface FileConfig extends RawSourceFile { 108 | filepath: string; 109 | pathname: string; 110 | name: string; 111 | options: ParseOptions; 112 | skip?: boolean; 113 | } 114 | export interface Source { 115 | identifier: string; 116 | url: string; 117 | default_branch?: string; 118 | category: string; 119 | files: Record; 120 | skip?: boolean; 121 | } 122 | export interface ListItem { 123 | name: string; 124 | updated: string; 125 | url: string; 126 | meta: RepoMeta; 127 | star: string; 128 | source_url: string; 129 | } 130 | export interface List { 131 | category: string; 132 | items: ListItem[]; 133 | } 134 | export interface Config extends RawConfig { 135 | sources: Record; 136 | } 137 | export interface RunOptions extends CliOptions { 138 | config: Config; 139 | sourceIdentifiers: string[]; 140 | } 141 | export interface CliOptions { 142 | debug?: boolean; 143 | force?: boolean; 144 | forceFetch?: boolean; 145 | push?: boolean; 146 | autoInit?: boolean; 147 | fetchRepoUpdates: boolean; 148 | markdown: boolean; 149 | fetch: boolean; 150 | cleanMarkdown?: boolean; 151 | cleanHtml?: boolean; 152 | dayMarkdown: boolean; 153 | rebuild?: boolean; 154 | html?: boolean; 155 | serve: boolean; 156 | port: number; 157 | limit?: number; 158 | } 159 | export interface Item { 160 | updated_at: string; 161 | updated_day: number; 162 | updated_week: number; 163 | category: string; 164 | category_html: string; 165 | markdown: string; 166 | html: string; 167 | sha1: string; 168 | source_identifier: string; 169 | file: string; 170 | checked_at: string; 171 | } 172 | export interface ItemDetail extends Item { 173 | updated_day: number; 174 | updated_week: number; 175 | updated_day_info: DayInfo; 176 | updated_week_info: WeekOfYear; 177 | } 178 | export interface DocItem { 179 | rawMarkdown: string; 180 | formatedMarkdown: string; 181 | category: string; 182 | line: number; 183 | } 184 | 185 | export interface Pagination { 186 | title: string; 187 | pathname: string; 188 | } 189 | export interface PaginationInfo { 190 | prev: Pagination | undefined; 191 | next: Pagination | undefined; 192 | } 193 | export interface BuildOptions { 194 | paginationText: string; 195 | paginationHtml: string; 196 | dbMeta: DBMeta; 197 | dbIndex: DBIndex; 198 | } 199 | export interface RepoMeta { 200 | name: string; 201 | description: string; 202 | url: string; 203 | default_branch: string; 204 | language: string | undefined; 205 | stargazers_count: number; 206 | subscribers_count: number; 207 | forks_count: number; 208 | tags: string[]; 209 | created_at: string; 210 | updated_at: string; 211 | checked_at: string; 212 | } 213 | 214 | export interface ItemsJson { 215 | items: Record; 216 | } 217 | 218 | export interface ParsedFilename { 219 | name: string; 220 | ext: string; 221 | type: string; 222 | } 223 | export interface FileMeta { 224 | sha1: string; 225 | checked_at: string; 226 | created_at: string; 227 | updated_at: string; 228 | meta_created_at: string; 229 | } 230 | 231 | export interface FileMetaWithSource extends FileMeta { 232 | sourceIdentifier: string; 233 | filepath: string; 234 | } 235 | 236 | export interface DbMetaSource { 237 | files: Record; 238 | meta: RepoMeta; 239 | created_at: string; 240 | updated_at: string; 241 | } 242 | export interface DBMeta { 243 | sources: Record; 244 | checked_at: string; 245 | } 246 | export interface IndexItem { 247 | t: number; 248 | d: number; 249 | w: number; 250 | } 251 | export type DBIndex = Record; 252 | export interface Author { 253 | url: string; 254 | name: string; 255 | avatar?: string; 256 | } 257 | 258 | export interface FeedItem { 259 | id: string; 260 | image?: string; 261 | url: string; 262 | _slug: string; 263 | _filepath: string; 264 | summary: string; 265 | date_published: string; 266 | date_modified: string; 267 | tags?: string[]; 268 | authors?: Author[]; 269 | title: string; 270 | _short_title?: string; 271 | author?: Author; 272 | content_text?: string; 273 | content_html: string; 274 | } 275 | 276 | export interface BaseFeed { 277 | version: string; 278 | icon: string; 279 | favicon: string; 280 | language: string; 281 | } 282 | export interface FeedInfo extends BaseFeed { 283 | title: string; 284 | _site_title: string; 285 | _seo_title: string; 286 | description: string; 287 | home_page_url: string; 288 | feed_url: string; 289 | } 290 | export interface Feed extends FeedInfo { 291 | items: FeedItem[]; 292 | } 293 | -------------------------------------------------------------------------------- /lib/gemoji.js: -------------------------------------------------------------------------------- 1 | import { visit } from "https://esm.sh/unist-util-visit@4.1.1"; 2 | import { gemoji, nameToEmoji } from "https://cdn.skypack.dev/gemoji@7?dts"; 3 | const find = /:(\+1|[-\w]+):/g; 4 | 5 | const own = {}.hasOwnProperty; 6 | 7 | export default function remarkGemoji() { 8 | return (tree) => { 9 | visit(tree, "text", (node) => { 10 | const value = node.value; 11 | /** @type {string[]} */ 12 | const slices = []; 13 | find.lastIndex = 0; 14 | let match = find.exec(value); 15 | let start = 0; 16 | 17 | while (match) { 18 | const emoji = /** @type {keyof nameToEmoji} */ (match[1]); 19 | const position = match.index; 20 | 21 | if (own.call(nameToEmoji, emoji) || emoji === "octocat") { 22 | if (start !== position) { 23 | slices.push(value.slice(start, position)); 24 | } 25 | let finalEmoji = nameToEmoji[emoji]; 26 | if (!finalEmoji && emoji === "octocat") { 27 | finalEmoji = "🐙"; 28 | } 29 | 30 | slices.push(finalEmoji); 31 | start = position + match[0].length; 32 | } else { 33 | find.lastIndex = position + 1; 34 | } 35 | 36 | match = find.exec(value); 37 | } 38 | 39 | if (slices.length > 0) { 40 | slices.push(value.slice(start)); 41 | node.value = slices.join(""); 42 | } 43 | }); 44 | }; 45 | } 46 | -------------------------------------------------------------------------------- /local.kak: -------------------------------------------------------------------------------- 1 | set-option global lsp_toml_path "~/.config/kak-lsp/kak-lsp-deno.toml" 2 | -------------------------------------------------------------------------------- /log.ts: -------------------------------------------------------------------------------- 1 | import { colors } from "./deps.ts"; 2 | import { Level, LevelName } from "./interface.ts"; 3 | 4 | export class Timing { 5 | #t = performance.now(); 6 | 7 | reset() { 8 | this.#t = performance.now(); 9 | } 10 | 11 | stop(message: string) { 12 | const now = performance.now(); 13 | const d = Math.round(now - this.#t); 14 | let cf = colors.green; 15 | if (d > 10000) { 16 | cf = colors.red; 17 | } else if (d > 1000) { 18 | cf = colors.yellow; 19 | } 20 | console.debug(colors.dim("TIMING"), message, "in", cf(d + "ms")); 21 | this.#t = now; 22 | } 23 | } 24 | 25 | export class Logger { 26 | #level: Level = Level.Info; 27 | 28 | get level(): Level { 29 | return this.#level; 30 | } 31 | 32 | setLevel(level: LevelName): void { 33 | switch (level) { 34 | case "debug": 35 | this.#level = Level.Debug; 36 | break; 37 | case "info": 38 | this.#level = Level.Info; 39 | break; 40 | case "warn": 41 | this.#level = Level.Warn; 42 | break; 43 | case "error": 44 | this.#level = Level.Error; 45 | break; 46 | case "fatal": 47 | this.#level = Level.Fatal; 48 | break; 49 | } 50 | } 51 | 52 | debug(...args: unknown[]): void { 53 | if (this.#level <= Level.Debug) { 54 | console.debug(colors.dim("DEBUG"), ...args); 55 | } 56 | } 57 | 58 | info(...args: unknown[]): void { 59 | if (this.#level <= Level.Info) { 60 | console.log(colors.green("INFO"), ...args); 61 | } 62 | } 63 | 64 | warn(...args: unknown[]): void { 65 | if (this.#level <= Level.Warn) { 66 | console.warn(colors.yellow("WARN"), ...args); 67 | } 68 | } 69 | 70 | error(...args: unknown[]): void { 71 | if (this.#level <= Level.Error) { 72 | console.error(colors.red("ERROR"), ...args); 73 | } 74 | } 75 | 76 | fatal(...args: unknown[]): void { 77 | if (this.#level <= Level.Fatal) { 78 | console.error(colors.red("FATAL"), ...args); 79 | Deno.exit(1); 80 | } 81 | } 82 | 83 | timing(): { reset(): void; stop(message: string): void } { 84 | if (this.level === Level.Debug) { 85 | return new Timing(); 86 | } 87 | return { reset: () => {}, stop: () => {} }; 88 | } 89 | } 90 | 91 | export default new Logger(); 92 | -------------------------------------------------------------------------------- /main.ts: -------------------------------------------------------------------------------- 1 | import log from "./log.ts"; 2 | import fetchSources from "./fetch-sources.ts"; 3 | import build from "./build.ts"; 4 | import serverMarkdown from "./serve-markdown.ts"; 5 | import servePublic from "./serve-public.ts"; 6 | import { getConfig, getFormatedSource, getSqlitePath, isDev } from "./util.ts"; 7 | import { CliOptions, RunOptions } from "./interface.ts"; 8 | import initDb from "./init-db.ts"; 9 | import buildHtml from "./build-html.ts"; 10 | // import db init meta json 11 | export default async function main(cliOptions: CliOptions, ...args: string[]) { 12 | if (cliOptions.debug) { 13 | log.setLevel("debug"); 14 | } 15 | const config = await getConfig(); 16 | let sourceIdentifiers: string[] = args.length > 0 17 | ? args 18 | : Object.keys(config.sources); 19 | if ( 20 | cliOptions.limit && cliOptions.limit > 0 21 | ) { 22 | sourceIdentifiers = sourceIdentifiers.slice(0, cliOptions.limit); 23 | } 24 | // check if source exists 25 | for (const sourceIdentifier of sourceIdentifiers) { 26 | if (config.sources[sourceIdentifier] === undefined) { 27 | config.sources[sourceIdentifier] = getFormatedSource( 28 | sourceIdentifier, 29 | null, 30 | ); 31 | } 32 | } 33 | const isBuildHtml = cliOptions.html || false; 34 | const autoInit = cliOptions.autoInit; 35 | if (autoInit || (isDev())) { 36 | await initDb(); 37 | } 38 | // init sqlite db 39 | // te 40 | // Open a database 41 | const runOptions: RunOptions = { 42 | config: config, 43 | sourceIdentifiers: args, 44 | ...cliOptions, 45 | }; 46 | log.info( 47 | `run options: ${ 48 | JSON.stringify({ sourceIdentifiers: args, ...cliOptions }, null, 2) 49 | }`, 50 | ); 51 | if (cliOptions.fetch) { 52 | await fetchSources(runOptions); 53 | } else { 54 | log.info("skip fetch sources"); 55 | } 56 | // 2. build markdowns, and htmls 57 | await build(runOptions); 58 | 59 | // 3. build html 60 | // 61 | // if (isBuildHtml) { 62 | // await buildHtml(runOptions); 63 | // } 64 | 65 | // 3. serve site 66 | if (runOptions.serve) { 67 | log.info("serve site"); 68 | // check is there is html 69 | if (isBuildHtml) { 70 | servePublic(); 71 | } else { 72 | // serve to markdown preview files 73 | await serverMarkdown(runOptions); 74 | } 75 | } else { 76 | log.info("skip serve site"); 77 | } 78 | } 79 | -------------------------------------------------------------------------------- /migrations/to-new-config.ts: -------------------------------------------------------------------------------- 1 | import oldMeta from "./old-meta.json" assert { type: "json" }; 2 | import { YAML } from "../deps.ts"; 3 | import { Config, RawSource } from "../interface.ts"; 4 | import { DEFAULT_CATEGORY } from "../constant.ts"; 5 | export function migrate() { 6 | const awesomelist = oldMeta.awesomeList; 7 | const sources: Record = {}; 8 | const newConfig = YAML.parse(Deno.readTextFileSync("./config.yml")) as Config; 9 | const newSources = newConfig.sources; 10 | for (const repo of awesomelist) { 11 | const source: RawSource = { 12 | category: repo.category, 13 | default_branch: repo.defaultBranch, 14 | files: { 15 | [repo.readmePath]: { 16 | index: true, 17 | }, 18 | }, 19 | }; 20 | sources[repo.repo] = source; 21 | } 22 | const mergedSources = { 23 | ...sources, 24 | ...newSources, 25 | }; 26 | 27 | //resort the sources keys, by category 28 | const sortedSources = Object.fromEntries( 29 | Object.entries(mergedSources).sort((a, b) => { 30 | const aCategory = a[1]?.category || DEFAULT_CATEGORY; 31 | const bCategory = b[1]?.category || DEFAULT_CATEGORY; 32 | if (aCategory > bCategory) { 33 | return 1; 34 | } else if (aCategory < bCategory) { 35 | return -1; 36 | } else { 37 | return 0; 38 | } 39 | }), 40 | ); 41 | 42 | const yamlSource = YAML.stringify({ 43 | sources: sortedSources, 44 | }); 45 | Deno.writeTextFileSync("./temp-config.yml", yamlSource); 46 | } 47 | 48 | if (import.meta.main) { 49 | migrate(); 50 | } 51 | -------------------------------------------------------------------------------- /morsels_config.json: -------------------------------------------------------------------------------- 1 | { 2 | "indexing_config": { 3 | "exclude": [ 4 | "index.html", 5 | "print.html", 6 | "404.html", 7 | "morsels_config.json" 8 | ], 9 | "include": [ 10 | "*/*/readme/index.html" 11 | ], 12 | "loaders": { 13 | "HtmlLoader": { 14 | "exclude_selectors": [ 15 | "script,style,#sidebar,#menu-bar" 16 | ], 17 | "selectors": [ 18 | { 19 | "attr_map": {}, 20 | "field_name": "title", 21 | "selector": "title" 22 | }, 23 | { 24 | "attr_map": {}, 25 | "field_name": "h1", 26 | "selector": "h1" 27 | }, 28 | { 29 | "attr_map": {}, 30 | "field_name": "body", 31 | "selector": "body" 32 | }, 33 | { 34 | "attr_map": { 35 | "id": "headingLink" 36 | }, 37 | "field_name": "heading", 38 | "selector": "h1,h2,h3,h4,h5,h6" 39 | } 40 | ], 41 | "type": "HtmlLoader" 42 | } 43 | } 44 | }, 45 | "preset": "small" 46 | } 47 | -------------------------------------------------------------------------------- /package-lock.json: -------------------------------------------------------------------------------- 1 | {} 2 | -------------------------------------------------------------------------------- /parser/markdown/heading.ts: -------------------------------------------------------------------------------- 1 | import { 2 | DocItem, 3 | ExpiredValue, 4 | FileInfo, 5 | ParseOptions, 6 | } from "../../interface.ts"; 7 | import { 8 | Content, 9 | fromMarkdown, 10 | gfm, 11 | gfmFromMarkdown, 12 | gfmToMarkdown, 13 | Link, 14 | remarkInlineLinks, 15 | Root, 16 | toMarkdown, 17 | visit, 18 | } from "../../deps.ts"; 19 | import { childrenToRoot, promiseLimit } from "../../util.ts"; 20 | import log from "../../log.ts"; 21 | import formatMarkdownItem from "../../format-markdown-item.ts"; 22 | import { uglyFormatItemIdentifier } from "./util.ts"; 23 | export default function ( 24 | content: string, 25 | fileInfo: FileInfo, 26 | dbCachedStars: Record, 27 | ): Promise { 28 | const sourceConfig = fileInfo.sourceConfig; 29 | const fileConfig = sourceConfig.files[fileInfo.filepath]; 30 | const options = fileConfig.options; 31 | const isParseCategory = options.is_parse_category === undefined 32 | ? true 33 | : options.is_parse_category; 34 | const items: DocItem[] = []; 35 | const tree = fromMarkdown(content, "utf8", { 36 | extensions: [gfm()], 37 | mdastExtensions: [gfmFromMarkdown()], 38 | }); 39 | // @ts-ignore: remarkInlineLinks is not typed 40 | remarkInlineLinks()(tree); 41 | 42 | let index = 0; 43 | let currentLevel = 0; 44 | let currentSubCategory = ""; 45 | let currentCategory = ""; 46 | let lowestHeadingLevel = 3; 47 | // first check valided sections 48 | let isReachedValidSection = false; 49 | const validSections: Content[] = []; 50 | for (const rootNode of tree.children) { 51 | if (!isReachedValidSection) { 52 | // check is valid now 53 | if ( 54 | rootNode.type === "heading" && 55 | rootNode.depth === options.max_heading_level 56 | ) { 57 | isReachedValidSection = true; 58 | } else { 59 | continue; 60 | } 61 | } 62 | if (rootNode.type === "heading") { 63 | currentLevel = rootNode.depth; 64 | if ( 65 | currentLevel > lowestHeadingLevel 66 | ) { 67 | lowestHeadingLevel = currentLevel; 68 | } 69 | validSections.push(rootNode); 70 | } else if (rootNode.type === "list") { 71 | // check if all links is author link 72 | // if so, it's a table of content 73 | // ignore it 74 | let internalLinkCount = 0; 75 | let externalLinkCount = 0; 76 | visit(childrenToRoot(rootNode.children), "link", (node) => { 77 | if (!node.url.startsWith("#")) { 78 | internalLinkCount++; 79 | } else { 80 | externalLinkCount++; 81 | } 82 | }); 83 | // for fix some repo's toc include a little external links 84 | // we still treat it as toc if internal link count is more than 80% 85 | // for example: https://github.com/EbookFoundation/free-programming-books/blob/main/books/free-programming-books-langs.md#bootstrap 86 | if ( 87 | externalLinkCount === 0 || 88 | (internalLinkCount > 10 && externalLinkCount < 2) 89 | ) { 90 | validSections.push(rootNode); 91 | } 92 | } else if (rootNode.type !== "thematicBreak") { 93 | validSections.push(rootNode); 94 | } 95 | } 96 | const min_heading_level = options.min_heading_level || lowestHeadingLevel; 97 | const max_heading_level = options.max_heading_level || 2; 98 | const heading_level = options.heading_level || 3; 99 | const funcs: (() => Promise)[] = []; 100 | let tempItemSections: Content[] = []; 101 | for (const rootNode of validSections) { 102 | if (rootNode.type === "heading" && rootNode.depth <= heading_level) { 103 | currentLevel = rootNode.depth; 104 | if (currentLevel === heading_level) { 105 | // yes this is item start 106 | if (tempItemSections.length > 0) { 107 | const item = childrenToRoot(tempItemSections); 108 | let category = ""; 109 | if (currentCategory) { 110 | category = currentCategory.trim().replace(/\n/g, " "); 111 | } 112 | if (currentSubCategory) { 113 | if (category) { 114 | category += " / "; 115 | } 116 | category += currentSubCategory.trim().replace(/\n/g, " "); 117 | } 118 | const line = 119 | tempItemSections[tempItemSections.length - 1].position!.end 120 | .line; 121 | const itemIdentifier = uglyFormatItemIdentifier(fileInfo, item); 122 | const fn = () => { 123 | return formatMarkdownItem(item, fileInfo, dbCachedStars).then( 124 | (formatedItem) => { 125 | return { 126 | formatedMarkdown: toMarkdown(formatedItem, { 127 | extensions: [gfmToMarkdown()], 128 | }).trim(), 129 | rawMarkdown: itemIdentifier, 130 | category: isParseCategory ? category : "", 131 | line, 132 | }; 133 | }, 134 | ); 135 | }; 136 | funcs.push(fn); 137 | } 138 | 139 | tempItemSections = [rootNode]; 140 | } 141 | if ( 142 | currentLevel < min_heading_level && currentLevel >= max_heading_level 143 | ) { 144 | currentCategory = toMarkdown(childrenToRoot(rootNode.children), { 145 | extensions: [gfmToMarkdown()], 146 | }); 147 | } else if (currentLevel === min_heading_level) { 148 | currentSubCategory = toMarkdown(childrenToRoot(rootNode.children), { 149 | extensions: [gfmToMarkdown()], 150 | }); 151 | } 152 | } else { 153 | tempItemSections.push(rootNode); 154 | } 155 | } 156 | 157 | // add last item 158 | if (tempItemSections.length > 1) { 159 | const item = childrenToRoot(tempItemSections); 160 | let category = ""; 161 | // TODO category issue 162 | if (currentCategory) { 163 | category = currentCategory.trim().replace(/\n/g, " "); 164 | } 165 | if (currentSubCategory) { 166 | if (category) { 167 | category += " / "; 168 | } 169 | category += currentSubCategory.trim().replace(/\n/g, " "); 170 | } 171 | const line = tempItemSections[tempItemSections.length - 1].position!.end 172 | .line; 173 | const itemIdentifier = uglyFormatItemIdentifier(fileInfo, item); 174 | const fn = () => { 175 | return formatMarkdownItem(item, fileInfo, dbCachedStars).then( 176 | (formatedItem) => { 177 | return { 178 | formatedMarkdown: toMarkdown(formatedItem, { 179 | extensions: [gfmToMarkdown()], 180 | }).trim(), 181 | rawMarkdown: itemIdentifier, 182 | category: isParseCategory ? category : "", 183 | line: line, 184 | }; 185 | }, 186 | ); 187 | }; 188 | funcs.push(fn); 189 | } 190 | return promiseLimit(funcs); 191 | } 192 | 193 | function uglyRemoveAutoGeneratedMarkdown( 194 | fileInfo: FileInfo, 195 | item: Root, 196 | ): Root { 197 | const sourceConfig = fileInfo.sourceConfig; 198 | const fileConfig = sourceConfig.files[fileInfo.filepath]; 199 | const sourceIdentifier = sourceConfig.identifier; 200 | if (sourceIdentifier === "stefanbuck/awesome-browser-extensions-for-github") { 201 | // remove the last part 202 | const children = item.children; 203 | return { 204 | type: "root", 205 | children: children.slice(0, children.length - 1), 206 | }; 207 | } else { 208 | return item; 209 | } 210 | } 211 | -------------------------------------------------------------------------------- /parser/markdown/list.ts: -------------------------------------------------------------------------------- 1 | import { 2 | DocItem, 3 | ExpiredValue, 4 | FileInfo, 5 | ParseOptions, 6 | } from "../../interface.ts"; 7 | import { 8 | Content, 9 | fromMarkdown, 10 | gfm, 11 | gfmFromMarkdown, 12 | gfmToMarkdown, 13 | Link, 14 | remarkInlineLinks, 15 | toMarkdown, 16 | visit, 17 | } from "../../deps.ts"; 18 | import { childrenToRoot, getRepoHTMLURL, promiseLimit } from "../../util.ts"; 19 | import log from "../../log.ts"; 20 | import formatMarkdownItem from "../../format-markdown-item.ts"; 21 | import formatCategory from "../../format-category.ts"; 22 | import { uglyFormatItemIdentifier } from "./util.ts"; 23 | export default function ( 24 | content: string, 25 | fileInfo: FileInfo, 26 | dbCachedStars: Record, 27 | ): Promise { 28 | const sourceConfig = fileInfo.sourceConfig; 29 | const fileConfig = sourceConfig.files[fileInfo.filepath]; 30 | const parseOptions = fileConfig.options; 31 | const isParseCategory = parseOptions.is_parse_category === undefined 32 | ? true 33 | : parseOptions.is_parse_category; 34 | const items: DocItem[] = []; 35 | const tree = fromMarkdown(content, "utf8", { 36 | extensions: [gfm()], 37 | mdastExtensions: [gfmFromMarkdown()], 38 | }); 39 | // transform inline links to link 40 | // @ts-ignore: remarkInlineLinks is not typed 41 | remarkInlineLinks()(tree); 42 | let index = 0; 43 | let currentLevel = 0; 44 | let currentSubCategory = ""; 45 | let currentCategory = ""; 46 | let lowestHeadingLevel = 3; 47 | // first check valided sections 48 | const validSections: Content[] = []; 49 | let isReachedValidSection = false; 50 | const max_heading_level = parseOptions.max_heading_level || 2; 51 | for (const rootNode of tree.children) { 52 | // start with the first valid ma x_heading_level 53 | 54 | if (!isReachedValidSection) { 55 | // check is valid now 56 | if ( 57 | rootNode.type === "heading" && 58 | rootNode.depth === max_heading_level 59 | ) { 60 | isReachedValidSection = true; 61 | } else { 62 | continue; 63 | } 64 | } 65 | 66 | if (rootNode.type === "heading") { 67 | currentLevel = rootNode.depth; 68 | 69 | if ( 70 | currentLevel > lowestHeadingLevel 71 | ) { 72 | lowestHeadingLevel = currentLevel; 73 | } 74 | validSections.push(rootNode); 75 | } else if (rootNode.type === "list") { 76 | // check if all links is author link 77 | // if so, it's a table of content 78 | // ignore it 79 | let internalLinkCount = 0; 80 | let externalLinkCount = 0; 81 | visit(childrenToRoot(rootNode.children), "link", (node) => { 82 | if (!node.url.startsWith("#")) { 83 | internalLinkCount++; 84 | } else { 85 | externalLinkCount++; 86 | } 87 | }); 88 | // for fix some repo's toc include a little external links 89 | // we still treat it as toc if internal link count is more than 80% 90 | // for example: https://github.com/EbookFoundation/free-programming-books/blob/main/books/free-programming-books-langs.md#bootstrap 91 | if ( 92 | externalLinkCount === 0 || 93 | (internalLinkCount > 10 && externalLinkCount < 2) 94 | ) { 95 | validSections.push(rootNode); 96 | } 97 | } 98 | } 99 | const min_heading_level = parseOptions.min_heading_level || 100 | lowestHeadingLevel; 101 | const funcs: (() => Promise)[] = []; 102 | for (const rootNode of validSections) { 103 | if (rootNode.type === "heading") { 104 | currentLevel = rootNode.depth; 105 | 106 | if ( 107 | currentLevel < min_heading_level && currentLevel >= max_heading_level 108 | ) { 109 | currentCategory = formatCategory( 110 | childrenToRoot(rootNode.children), 111 | ); 112 | } else if (currentLevel === min_heading_level) { 113 | currentSubCategory = formatCategory( 114 | childrenToRoot(rootNode.children), 115 | ); 116 | } 117 | } else if (rootNode.type === "list") { 118 | for (const item of rootNode.children) { 119 | if (item.type === "listItem") { 120 | let category = ""; 121 | if (currentCategory) { 122 | category = currentCategory.trim().replace(/\n/g, " "); 123 | } 124 | if (currentSubCategory) { 125 | if (category) { 126 | category += " / "; 127 | } 128 | category += currentSubCategory.trim().replace(/\n/g, " "); 129 | } 130 | const itemIdentifier = uglyFormatItemIdentifier(fileInfo, item); 131 | // console.log("itemIdentifier", itemIdentifier); 132 | if (uglyIsValidCategory(fileInfo, category)) { 133 | funcs.push(() => { 134 | return formatMarkdownItem(item, fileInfo, dbCachedStars).then( 135 | (formatedItem) => { 136 | return { 137 | formatedMarkdown: toMarkdown(formatedItem, { 138 | extensions: [gfmToMarkdown()], 139 | }).trim(), 140 | rawMarkdown: itemIdentifier, 141 | category: isParseCategory ? category : "", 142 | line: item.position!.end.line, 143 | }; 144 | }, 145 | ); 146 | }); 147 | } 148 | } 149 | } 150 | } 151 | } 152 | 153 | return promiseLimit(funcs); 154 | } 155 | 156 | function uglyIsValidCategory( 157 | fileInfo: FileInfo, 158 | category: string, 159 | ): boolean { 160 | const sourceConfig = fileInfo.sourceConfig; 161 | const fileConfig = sourceConfig.files[fileInfo.filepath]; 162 | const sourceIdentifier = sourceConfig.identifier; 163 | if (sourceIdentifier === "KotlinBy/awesome-kotlin") { 164 | if (category.startsWith("Github Trending / ")) { 165 | return false; 166 | } 167 | } 168 | return true; 169 | } 170 | -------------------------------------------------------------------------------- /parser/markdown/list_test.ts: -------------------------------------------------------------------------------- 1 | import markdownlist from "./list.ts"; 2 | import { getFakeFileInfo } from "./util.ts"; 3 | import { 4 | getDbCachedStars, 5 | readTextFile, 6 | writeDbCachedStars, 7 | } from "../../util.ts"; 8 | import { assertEquals } from "../../test-deps.ts"; 9 | Deno.test("markdown list test #3", async () => { 10 | const content = await readTextFile("./example/mac.md"); 11 | const dbCachedStars = await getDbCachedStars(); 12 | const items = await markdownlist(content, getFakeFileInfo(), dbCachedStars); 13 | await writeDbCachedStars(dbCachedStars); 14 | }); 15 | -------------------------------------------------------------------------------- /parser/markdown/table.ts: -------------------------------------------------------------------------------- 1 | import { 2 | DocItem, 3 | ExpiredValue, 4 | FileConfigInfo, 5 | FileInfo, 6 | ParseOptions, 7 | } from "../../interface.ts"; 8 | import { 9 | Content, 10 | fromMarkdown, 11 | remarkInlineLinks, 12 | TableRow, 13 | toMarkdown, 14 | visit, 15 | } from "../../deps.ts"; 16 | import { childrenToRoot, promiseLimit, writeTextFile } from "../../util.ts"; 17 | import _log from "../../log.ts"; 18 | import formatMarkdownItem from "../../format-markdown-item.ts"; 19 | import { gfm, gfmFromMarkdown, gfmToMarkdown } from "../../deps.ts"; 20 | 21 | import { uglyFormatItemIdentifier } from "./util.ts"; 22 | export default async function ( 23 | content: string, 24 | fileInfo: FileInfo, 25 | dbCachedStars: Record, 26 | ): Promise { 27 | const sourceConfig = fileInfo.sourceConfig; 28 | const fileConfig = sourceConfig.files[fileInfo.filepath]; 29 | const options = fileConfig.options; 30 | const parseOptions = fileConfig.options; 31 | const isParseCategory = parseOptions.is_parse_category === undefined 32 | ? true 33 | : parseOptions.is_parse_category; 34 | 35 | const items: DocItem[] = []; 36 | const tree = fromMarkdown(content, "utf8", { 37 | extensions: [gfm()], 38 | mdastExtensions: [gfmFromMarkdown()], 39 | }); 40 | // @ts-ignore: remarkInlineLinks is not typed 41 | remarkInlineLinks()(tree); 42 | 43 | let index = 0; 44 | let currentLevel = 0; 45 | let currentSubCategory = ""; 46 | let currentCategory = ""; 47 | let lowestHeadingLevel = 3; 48 | // first check valided sections 49 | const validSections: Content[] = []; 50 | for (const rootNode of tree.children) { 51 | if (rootNode.type === "heading") { 52 | currentLevel = rootNode.depth; 53 | if (currentLevel > lowestHeadingLevel) { 54 | lowestHeadingLevel = currentLevel; 55 | } 56 | validSections.push(rootNode); 57 | } else if (rootNode.type === "table") { 58 | validSections.push(rootNode); 59 | } 60 | } 61 | const min_heading_level = options.min_heading_level || lowestHeadingLevel; 62 | const max_heading_level = options.max_heading_level || 2; 63 | const funcs: (() => Promise)[] = []; 64 | // console.log("validSections", validSections); 65 | for (const rootNode of validSections) { 66 | // console.log("rootNode", rootNode); 67 | if (rootNode.type === "heading") { 68 | currentLevel = rootNode.depth; 69 | if ( 70 | currentLevel < min_heading_level && currentLevel >= max_heading_level 71 | ) { 72 | currentCategory = toMarkdown(childrenToRoot(rootNode.children)); 73 | } else if (currentLevel === min_heading_level) { 74 | currentSubCategory = toMarkdown(childrenToRoot(rootNode.children)); 75 | } 76 | } else if (rootNode.type === "table") { 77 | // console.log("rootNode", rootNode); 78 | // await writeTextFile("temp.json", JSON.stringify(rootNode)); 79 | let rowIndex = 0; 80 | for (const item of rootNode.children) { 81 | // console.log("item", item); 82 | if (item.type === "tableRow") { 83 | if (rowIndex === 0) { 84 | // first row is header 85 | rowIndex++; 86 | continue; 87 | } 88 | let category = ""; 89 | if (currentCategory) { 90 | category = currentCategory.trim().replace(/\n/g, " "); 91 | } 92 | if (currentSubCategory) { 93 | if (category) { 94 | category += " / "; 95 | } 96 | category += currentSubCategory.trim().replace(/\n/g, " "); 97 | } 98 | const itemIdentifier = uglyFormatItemIdentifier(fileInfo, item); 99 | funcs.push(() => { 100 | return formatMarkdownItem(item as TableRow, fileInfo, dbCachedStars) 101 | .then( 102 | (formatedItem) => { 103 | let markdown = "- "; 104 | // transform table row to item 105 | (formatedItem as TableRow).children.forEach( 106 | (child, cellIndex) => { 107 | const tableHeaderCell = 108 | rootNode.children[0].children[cellIndex]; 109 | let tableHeaderCellMarkdown = ""; 110 | try { 111 | tableHeaderCellMarkdown = toMarkdown( 112 | tableHeaderCell, 113 | { 114 | extensions: [gfmToMarkdown()], 115 | }, 116 | ).trim(); 117 | } catch (e) { 118 | console.log("e", e); 119 | console.log("tableHeaderCell", tableHeaderCell); 120 | } 121 | const rowCellMarkdown = toMarkdown( 122 | child, 123 | { 124 | extensions: [gfmToMarkdown()], 125 | }, 126 | ).trim(); 127 | if (cellIndex > 0) { 128 | markdown += 129 | ` ${tableHeaderCellMarkdown}: ${rowCellMarkdown}\n\n`; 130 | } else { 131 | markdown += 132 | `${tableHeaderCellMarkdown}: ${rowCellMarkdown}\n\n`; 133 | } 134 | }, 135 | ); 136 | 137 | return { 138 | formatedMarkdown: markdown, 139 | rawMarkdown: itemIdentifier, 140 | category: isParseCategory ? category : "", 141 | line: item.position!.end.line, 142 | }; 143 | }, 144 | ); 145 | }); 146 | rowIndex++; 147 | } 148 | } 149 | } 150 | } 151 | 152 | return promiseLimit(funcs); 153 | } 154 | -------------------------------------------------------------------------------- /parser/markdown/table_test.ts: -------------------------------------------------------------------------------- 1 | import markdowntable from "./markdowntable.ts"; 2 | import { readTextFile } from "../util.ts"; 3 | import { assertEquals } from "../test-deps.ts"; 4 | Deno.test("markdown table test #1", async () => { 5 | const content = await readTextFile("./example/public-apis-simple.md"); 6 | 7 | const items = await markdowntable(content); 8 | // assertEquals(items, [ 9 | // { markdown: "* Item1\n", categories: ["Subtitle1\n"] }, 10 | // { markdown: "* Item2\n", categories: ["Subtitle1\n"] }, 11 | // { markdown: "* Item1\n", categories: ["Subtitle2\n"] }, 12 | // { markdown: "* Item2\n", categories: ["Subtitle2\n"] }, 13 | // ]); 14 | 15 | // console.log("items", items); 16 | }); 17 | 18 | Deno.test("markdown table test #2", async () => { 19 | const content = await readTextFile("./example/books.md"); 20 | 21 | const items = markdowntable(content); 22 | }); 23 | -------------------------------------------------------------------------------- /parser/markdown/util.ts: -------------------------------------------------------------------------------- 1 | import { DocItem, FileInfo, ParseOptions } from "../../interface.ts"; 2 | import { 3 | Content, 4 | EXIT, 5 | fromMarkdown, 6 | Root, 7 | TableRow, 8 | toMarkdown, 9 | visit, 10 | } from "../../deps.ts"; 11 | import { childrenToRoot, promiseLimit, writeTextFile } from "../../util.ts"; 12 | import _log from "../../log.ts"; 13 | import formatMarkdownItem from "../../format-markdown-item.ts"; 14 | import { gfm, gfmFromMarkdown, gfmToMarkdown } from "../../deps.ts"; 15 | export function getValidSections(tree: Root, options: ParseOptions): Content[] { 16 | let currentLevel = 0; 17 | let currentSubCategory = ""; 18 | let currentCategory = ""; 19 | let lowestHeadingLevel = 3; 20 | // first check valided sections 21 | const validSections: Content[] = []; 22 | for (const rootNode of tree.children) { 23 | if (rootNode.type === "heading") { 24 | currentLevel = rootNode.depth; 25 | if (currentLevel > lowestHeadingLevel) { 26 | lowestHeadingLevel = currentLevel; 27 | } 28 | validSections.push(rootNode); 29 | } else if (rootNode.type === "list") { 30 | // check if all links is author link 31 | // if so, it's a table of content 32 | // ignore it 33 | let isToc = true; 34 | visit(childrenToRoot(rootNode.children), "link", (node) => { 35 | if (!node.url.startsWith("#")) { 36 | isToc = false; 37 | } 38 | }); 39 | if (!isToc) { 40 | validSections.push(rootNode); 41 | } 42 | } 43 | } 44 | return validSections; 45 | } 46 | export function uglyFormatItemIdentifier( 47 | _fileInfo: FileInfo, 48 | item: Content | Root, 49 | ): string { 50 | // use link name as identifier 51 | let linkItem; 52 | visit(item, "link", (node) => { 53 | linkItem = node; 54 | return EXIT; 55 | }); 56 | 57 | if (linkItem) { 58 | const finalMarkdown = toMarkdown(linkItem, { 59 | extensions: [gfmToMarkdown()], 60 | }).trim(); 61 | return finalMarkdown; 62 | } else { 63 | return toMarkdown(item, { 64 | extensions: [gfmToMarkdown()], 65 | }).trim(); 66 | } 67 | } 68 | 69 | export function getFakeFileInfo(): FileInfo { 70 | return { 71 | "sourceConfig": { 72 | "identifier": "jaywcjlove/awesome-mac", 73 | "url": "https://github.com/jaywcjlove/awesome-mac", 74 | "files": { 75 | "README.md": { 76 | "filepath": "README.md", 77 | "pathname": "/jaywcjlove/awesome-mac/", 78 | "name": "Awesome Mac", 79 | "index": true, 80 | "options": { "type": "list" }, 81 | }, 82 | }, 83 | "category": "Platforms", 84 | }, 85 | "sourceMeta": { 86 | "created_at": "2022-10-24T18:24:24.090Z", 87 | "updated_at": "2022-10-24T18:35:54.686Z", 88 | "meta": { 89 | "default_branch": "master", 90 | "name": "awesome-mac", 91 | "description": 92 | " Now we have become very big, Different from the original idea. Collect premium software in various categories.", 93 | "url": "https://github.com/jaywcjlove/awesome-mac", 94 | "language": "JavaScript", 95 | "stargazers_count": 54167, 96 | "subscribers_count": 1410, 97 | "forks_count": 5538, 98 | "tags": [ 99 | "apple", 100 | "awesome", 101 | "awesome-list", 102 | "awesome-lists", 103 | "list", 104 | "mac", 105 | "mac-osx", 106 | "macos", 107 | "macosx", 108 | "software", 109 | ], 110 | "updated_at": "2022-10-22T02:50:51Z", 111 | "created_at": "2016-07-17T15:33:47Z", 112 | "checked_at": "2022-10-24T18:24:22.929Z", 113 | }, 114 | "files": { 115 | "README.md": { 116 | "sha1": "0049556161b8f5ddc0a3f89dbb9fb952826fd605", 117 | "updated_at": "2022-10-22T02:50:11.000Z", 118 | "meta_created_at": "2022-10-24T18:27:22.017Z", 119 | "created_at": "2016-07-17T15:34:53.000Z", 120 | "checked_at": "2022-10-24T18:27:22.017Z", 121 | }, 122 | }, 123 | }, 124 | "filepath": "README.md", 125 | }; 126 | } 127 | -------------------------------------------------------------------------------- /parser/mod.ts: -------------------------------------------------------------------------------- 1 | import markdownList from "./markdown/list.ts"; 2 | import markdownTable from "./markdown/table.ts"; 3 | import markdownHeading from "./markdown/heading.ts"; 4 | import { DocItem, ExpiredValue, FileInfo } from "../interface.ts"; 5 | 6 | export default function ( 7 | content: string, 8 | options: FileInfo, 9 | dbCachedStars: Record, 10 | ): Promise { 11 | const fileConfig = options.sourceConfig.files[options.filepath]; 12 | const type = fileConfig.options.type; 13 | if (type === "list") { 14 | return markdownList(content, options, dbCachedStars); 15 | } 16 | if (type === "table") { 17 | return markdownTable(content, options, dbCachedStars); 18 | } 19 | if (type === "heading") { 20 | return markdownHeading(content, options, dbCachedStars); 21 | } 22 | throw new Error(`unknown type ${type}`); 23 | } 24 | -------------------------------------------------------------------------------- /render-markdown.ts: -------------------------------------------------------------------------------- 1 | import { DocItem, FileInfo, ParseOptions } from "./interface.ts"; 2 | import { 3 | Content, 4 | fromMarkdown, 5 | render, 6 | TableRow, 7 | toMarkdown, 8 | visit, 9 | } from "./deps.ts"; 10 | import { childrenToRoot, getDomain } from "./util.ts"; 11 | import _log from "./log.ts"; 12 | import { 13 | gfm, 14 | gfmFromMarkdown, 15 | gfmToMarkdown, 16 | remarkEmoji, 17 | remarkGemoji, 18 | } from "./deps.ts"; 19 | import { CONTENT_DIR, INDEX_MARKDOWN_PATH } from "./constant.ts"; 20 | export default function renderMarkdown(content: string): string { 21 | const domain = getDomain(); 22 | const tree = fromMarkdown(content, "utf8", { 23 | // @ts-ignore: remarkInlineLinks is not typed 24 | extensions: [gfm()], 25 | mdastExtensions: [gfmFromMarkdown()], 26 | }); 27 | // @ts-ignore: node function 28 | const remarkEmojiPlugin = remarkEmoji(); 29 | // @ts-ignore: node function 30 | remarkEmojiPlugin(tree); 31 | const remarkGemojiPlugin = remarkGemoji(); 32 | // @ts-ignore: node function 33 | remarkGemojiPlugin(tree); 34 | 35 | visit(tree, "link", (node) => { 36 | const { url } = node; 37 | if ( 38 | url && 39 | (url.startsWith("/" + CONTENT_DIR + "/")) && 40 | url.endsWith(INDEX_MARKDOWN_PATH) 41 | ) { 42 | node.url = url.slice(CONTENT_DIR.length + 1, -INDEX_MARKDOWN_PATH.length); 43 | } else if ( 44 | url && (url.startsWith("/")) && url.endsWith(INDEX_MARKDOWN_PATH) 45 | ) { 46 | node.url = url.slice(0, -INDEX_MARKDOWN_PATH.length); 47 | } 48 | }); 49 | 50 | const markdownDist = toMarkdown(tree, { 51 | extensions: [gfmToMarkdown()], 52 | }); 53 | return render(markdownDist, { 54 | allowIframes: true, 55 | }); 56 | } 57 | -------------------------------------------------------------------------------- /render-markdown_test.ts: -------------------------------------------------------------------------------- 1 | import render from "./render-markdown.ts"; 2 | import { assertEquals } from "./test-deps.ts"; 3 | Deno.test("renderMarkdown", () => { 4 | const result = render("[Hello](/test/README.md)"); 5 | assertEquals(result, `

Hello

\n`); 6 | }); 7 | -------------------------------------------------------------------------------- /scripts/check-404.ts: -------------------------------------------------------------------------------- 1 | import { getConfig, gotWithCache } from "../util.ts"; 2 | import { PROD_DOMAIN } from "../constant.ts"; 3 | 4 | async function check() { 5 | const config = await getConfig(); 6 | const sources = config.sources; 7 | const sourcesKeys = Object.keys(sources); 8 | for (const siteIdentifier of sourcesKeys) { 9 | const site = sources[siteIdentifier]; 10 | const files = site.files; 11 | const filesKeys = Object.keys(files); 12 | for (const fileIdentifier of filesKeys) { 13 | const file = files[fileIdentifier]; 14 | const url = new URL(file.pathname, PROD_DOMAIN); 15 | try { 16 | const response = await gotWithCache(url.href, {}); 17 | console.log("ok", url.href); 18 | } catch (e) { 19 | const ignored = ["PatrickJS/awesome-angular"]; 20 | if (ignored.includes(file.pathname.slice(1, -1))) { 21 | console.warn(`ignored ${url.href}`); 22 | continue; 23 | } 24 | 25 | console.log(`Error: ${url}`); 26 | throw e; 27 | } 28 | } 29 | } 30 | } 31 | 32 | if (import.meta.main) { 33 | await check(); 34 | } 35 | -------------------------------------------------------------------------------- /scripts/install-mdbook.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ); 4 | BIN_DIR="$SCRIPT_DIR/../bin" 5 | binname="mdbook-epub" 6 | mkdir -p $BIN_DIR 7 | if [[ "$OSTYPE" == "linux-gnu"* ]]; then 8 | cd -- /tmp 9 | # curl -OL https://github.com/theowenyoung/$binname/releases/latest/download/$binname-x86_64-unknown-linux-gnu.tar.gz 10 | # tar -xf /tmp/$binname-x86_64-unknown-linux-gnu.tar.gz -C $BIN_DIR 11 | curl -OL https://github.com/rust-lang/mdBook/releases/download/v0.4.21/mdbook-v0.4.21-x86_64-unknown-linux-gnu.tar.gz 12 | tar -xf /tmp/mdbook-v0.4.21-x86_64-unknown-linux-gnu.tar.gz -C $BIN_DIR 13 | # curl -OL https://github.com/HollowMan6/mdbook-pdf/releases/download/v0.1.3/mdbook-pdf-v0.1.3-x86_64-unknown-linux-gnu.zip 14 | # unzip /tmp/mdbook-pdf-v0.1.3-x86_64-unknown-linux-gnu.zip -d $BIN_DIR 15 | elif [[ "$OSTYPE" == "darwin"* ]]; then 16 | # Mac OSX 17 | cd -- /tmp/ 18 | # curl -OL https://github.com/theowenyoung/$binname/releases/latest/download/$binname-x86_64-apple-darwin.zip 19 | # unzip -o /tmp/$binname-x86_64-apple-darwin.zip -d $BIN_DIR 20 | curl -OL https://github.com/rust-lang/mdBook/releases/download/v0.4.21/mdbook-v0.4.21-x86_64-apple-darwin.tar.gz 21 | tar -xf /tmp/mdbook-v0.4.21-x86_64-apple-darwin.tar.gz -C $BIN_DIR 22 | # curl -OL https://github.com/HollowMan6/mdbook-pdf/releases/download/v0.1.3/mdbook-pdf-v0.1.3-x86_64-apple-darwin.zip 23 | # unzip -o /tmp/mdbook-pdf-v0.1.3-x86_64-apple-darwin.zip -d $BIN_DIR 24 | fi; 25 | 26 | chmod +x $BIN_DIR/* 27 | 28 | echo Install Success. 29 | 30 | echo Run \`make buildbook\` to build to book-dist folder 31 | -------------------------------------------------------------------------------- /scripts/install-morsels.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | SCRIPT_DIR=$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" &>/dev/null && pwd) 4 | BIN_DIR="$SCRIPT_DIR/../bin" 5 | binname="morsels" 6 | mkdir -p $BIN_DIR 7 | if [[ "$OSTYPE" == "linux-gnu"* ]]; then 8 | cd -- /tmp 9 | curl -OL https://github.com/ang-zeyu/morsels/releases/download/v0.7.3/indexer.x86_64-unknown-linux-gnu.zip 10 | unzip indexer.x86_64-unknown-linux-gnu.zip -d $BIN_DIR 11 | chmod +x $BIN_DIR/morsels 12 | elif [[ "$OSTYPE" == "darwin"* ]]; then 13 | # Mac OSX 14 | cd -- /tmp/ 15 | curl -OL https://github.com/ang-zeyu/morsels/releases/download/v0.7.3/indexer.x86_64-apple-darwin.zip 16 | unzip indexer.x86_64-apple-darwin.zip -d $BIN_DIR 17 | fi 18 | 19 | chmod +x $BIN_DIR/* 20 | 21 | echo Install Success. 22 | -------------------------------------------------------------------------------- /serve-markdown.ts: -------------------------------------------------------------------------------- 1 | import { CSS, mustache, path, serve, serveFile } from "./deps.ts"; 2 | 3 | import { 4 | getDistRepoPath, 5 | getStaticPath, 6 | readTextFile, 7 | urlToFilePath, 8 | } from "./util.ts"; 9 | import log from "./log.ts"; 10 | import { RunOptions } from "./interface.ts"; 11 | import render from "./render-markdown.ts"; 12 | export default async function serveSite(runOptions: RunOptions) { 13 | const port = runOptions.port; 14 | const BASE_PATH = getDistRepoPath(); 15 | const staticpath = getStaticPath(); 16 | const htmlTemplate = await readTextFile("./templates/index.html.mu"); 17 | const handler = async (request: Request): Promise => { 18 | const filepath = urlToFilePath(request.url); 19 | 20 | log.debug(`Request for ${filepath}`); 21 | let localPath = BASE_PATH + filepath; 22 | if (!filepath.endsWith(".md")) { 23 | // serve static fold 24 | localPath = path.join(staticpath, filepath); 25 | return await serveFile(request, localPath); 26 | } 27 | // check if file exists 28 | let finalPath: string | undefined; 29 | try { 30 | const fileInfo = Deno.statSync(localPath); 31 | if (fileInfo.isFile) { 32 | finalPath = localPath; 33 | } 34 | } catch (e) { 35 | log.warn(e); 36 | } 37 | if (finalPath) { 38 | const fileContent = await readTextFile(finalPath); 39 | log.debug(`serving file: ${finalPath}`); 40 | const body = render(fileContent); 41 | const htmlContent = mustache.render(htmlTemplate, { CSS, body }); 42 | return new Response(htmlContent, { 43 | status: 200, 44 | headers: { 45 | "content-type": "text/html", 46 | }, 47 | }); 48 | } else { 49 | return Promise.resolve(new Response("Not Found", { status: 404 })); 50 | } 51 | }; 52 | log.info( 53 | `HTTP webserver running. Access it at: http://localhost:${port}/`, 54 | ); 55 | serve(handler, { port }); 56 | } 57 | -------------------------------------------------------------------------------- /serve-public.ts: -------------------------------------------------------------------------------- 1 | import { serve, serveDir } from "./deps.ts"; 2 | import { getPublicPath } from "./util.ts"; 3 | export default function servePublic() { 4 | serve((req) => { 5 | return serveDir(req, { 6 | fsRoot: getPublicPath(), 7 | showIndex: true, 8 | }); 9 | }); 10 | } 11 | -------------------------------------------------------------------------------- /static/404.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 404 Not Found 8 | 9 | 10 |

404 Not Found

11 |

12 | Sorry for this, you may want to go 13 | Home Page. 14 |

15 | 16 | 17 | -------------------------------------------------------------------------------- /static/CNAME: -------------------------------------------------------------------------------- 1 | www.trackawesomelist.com -------------------------------------------------------------------------------- /static/android-chrome-192x192.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trackawesomelist/trackawesomelist-source/cac86ac7e22af4f8c6d27dbe660ac04ad45a9332/static/android-chrome-192x192.png -------------------------------------------------------------------------------- /static/android-chrome-512x512.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trackawesomelist/trackawesomelist-source/cac86ac7e22af4f8c6d27dbe660ac04ad45a9332/static/android-chrome-512x512.png -------------------------------------------------------------------------------- /static/apple-touch-icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trackawesomelist/trackawesomelist-source/cac86ac7e22af4f8c6d27dbe660ac04ad45a9332/static/apple-touch-icon.png -------------------------------------------------------------------------------- /static/badge-flat.svg: -------------------------------------------------------------------------------- 1 | 3 | Track Awesome List 4 | 5 | 6 | 7 | 8 | 9 | 10 | Track Awesome List 11 | 12 | -------------------------------------------------------------------------------- /static/badge.svg: -------------------------------------------------------------------------------- 1 | 3 | Track Awesome Litt 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | Track Awesome List 20 | 21 | -------------------------------------------------------------------------------- /static/browserconfig.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | #da532c 7 | 8 | 9 | 10 | -------------------------------------------------------------------------------- /static/favicon-16x16.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trackawesomelist/trackawesomelist-source/cac86ac7e22af4f8c6d27dbe660ac04ad45a9332/static/favicon-16x16.png -------------------------------------------------------------------------------- /static/favicon-32x32.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trackawesomelist/trackawesomelist-source/cac86ac7e22af4f8c6d27dbe660ac04ad45a9332/static/favicon-32x32.png -------------------------------------------------------------------------------- /static/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trackawesomelist/trackawesomelist-source/cac86ac7e22af4f8c6d27dbe660ac04ad45a9332/static/favicon.ico -------------------------------------------------------------------------------- /static/icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trackawesomelist/trackawesomelist-source/cac86ac7e22af4f8c6d27dbe660ac04ad45a9332/static/icon.png -------------------------------------------------------------------------------- /static/icon.svg: -------------------------------------------------------------------------------- 1 | 9 | 10 | 16 | 22 | 30 | 35 | 43 | 48 | 53 | 54 | -------------------------------------------------------------------------------- /static/mstile-150x150.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trackawesomelist/trackawesomelist-source/cac86ac7e22af4f8c6d27dbe660ac04ad45a9332/static/mstile-150x150.png -------------------------------------------------------------------------------- /static/robots.txt: -------------------------------------------------------------------------------- 1 | User-agent: * 2 | -------------------------------------------------------------------------------- /static/safari-pinned-tab.svg: -------------------------------------------------------------------------------- 1 | 2 | 4 | 7 | 8 | Created by potrace 1.14, written by Peter Selinger 2001-2017 9 | 10 | 12 | 19 | 30 | 34 | 35 | 36 | -------------------------------------------------------------------------------- /static/site.webmanifest: -------------------------------------------------------------------------------- 1 | { 2 | "name": "Track Awesome List", 3 | "short_name": "Track Awesome List", 4 | "icons": [ 5 | { 6 | "src": "/android-chrome-192x192.png", 7 | "sizes": "192x192", 8 | "type": "image/png" 9 | }, 10 | { 11 | "src": "/android-chrome-512x512.png", 12 | "sizes": "512x512", 13 | "type": "image/png" 14 | } 15 | ], 16 | "theme_color": "#ffffff", 17 | "background_color": "#ffffff", 18 | "display": "standalone" 19 | } 20 | -------------------------------------------------------------------------------- /tal.ts: -------------------------------------------------------------------------------- 1 | import { Command } from "./deps.ts"; 2 | import main from "./main.ts"; 3 | export default async function tal() { 4 | await new Command() 5 | .name("tal") 6 | .version("0.1.0") 7 | .description("Track Markdown Files Changes") 8 | .env("DEBUG=", "Enable debug output.") 9 | .env("FORCE=", "Enable force update.") 10 | .env("FORCE_FETCH=", "Enable force update fetch.") 11 | .env("PUSH=", "Enable push to remote repo.") 12 | .env("REBUILD=", "Enable rebuild.") 13 | .env("LIMIT=", "Limit sources to build, for debug.") 14 | .env("DAY_MARKDOWN=", "Disable day markdown output.") 15 | .env( 16 | "FETCH_REPO_UPDATES=", 17 | "fetch repo updates when init there is a cache. for dev fast test", 18 | ) 19 | .option("-d, --debug", "Enable debug output.") 20 | .option("-f, --force", "Force update markdown.") 21 | .option("--force-fetch", "Force update sources.") 22 | .option("--rebuild", "rebuild updates from git repo") 23 | .option("-p, --push", "Push markdown to remote.") 24 | .option("--no-fetch", "Don't fetch remote sources.") 25 | .option("--no-markdown", "do not build markdown file.") 26 | .option("--clean-html", "clean html files.") 27 | .option("--clean-markdown", "clean markdown files.") 28 | .option("--no-day-markdown", "do not build day markdown file.") 29 | .option("--no-fetch-repo-updates", "do not fetch repo updates.") 30 | .option("--html", "Build html files.") 31 | .option("--no-serve", "Serve site.") 32 | .option("--limit ", "Limit number of sources to process.") 33 | .option( 34 | "--auto-init", 35 | "auto init db meta, for avoid load remote db failed", 36 | ).option( 37 | "--port ", 38 | "Serve site port.", 39 | { 40 | default: 8000, 41 | }, 42 | ) 43 | .arguments("[files...:string]") 44 | .action((options, ...args) => { 45 | main(options, ...args); 46 | }) 47 | .parse(Deno.args); 48 | } 49 | 50 | if (import.meta.main) { 51 | await tal(); 52 | } 53 | -------------------------------------------------------------------------------- /templates/day.md.mu: -------------------------------------------------------------------------------- 1 | {{{nav}}} 2 | 3 | {{#groups}} 4 | ## [{{{group_name}}}{{{group_suffix}}}]({{{group_url}}}) 5 | 6 | {{#items}} 7 | {{#category}} 8 | ### {{{category}}} 9 | 10 | {{/category}} 11 | {{#items}}{{{markdown}}} 12 | {{/items}} 13 | 14 | {{/items}} 15 | {{/groups}} 16 | 17 | {{{footer}}} 18 | -------------------------------------------------------------------------------- /templates/file-by-day.md.mu: -------------------------------------------------------------------------------- 1 | # {{{title}}} 2 | 3 | {{{description}}} 4 | 5 | {{{_nav_text}}} 6 | 7 | {{#items}} 8 | ## [{{{title}}}{{{_title_suffix}}}]({{{url}}}) 9 | 10 | {{{content_text}}} 11 | {{/items}} 12 | 13 | -------------------------------------------------------------------------------- /templates/index.html.mu: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | {{{_seo_title}}} 14 | 15 | 16 | 17 | 18 | 19 | 27 | 28 | 29 |
30 | {{{body}}} 31 |
32 | 33 | 34 | -------------------------------------------------------------------------------- /templates/root-readme.md.mu: -------------------------------------------------------------------------------- 1 | # {{{feed.title}}} 2 | 3 | {{{feed.description}}} This repo is generated by [trackawesomelist-source](https://github.com/trackawesomelist/trackawesomelist-source), visit it [Online](https://www.trackawesomelist.com) or with [Github](https://github.com/trackawesomelist/trackawesomelist/). 4 | 5 | {{{navText}}} 6 | 7 | ## Table of Contents 8 | 9 | - [Recently Updated](#recently-updated) 10 | - [Top 50 Awesome List](#top-50-awesome-list) 11 | - [All Tracked List](#all-tracked-list) 12 | - [Social Media](#social-media) 13 | - [Contribution](#contribution) 14 | 15 | ## Recently Updated{{#items}} 16 | 17 | ### [{{{_short_title}}}]({{{_filepath}}}) 18 | 19 | {{{content_text}}}{{/items}}{{paginationText}} 20 | 21 | ## Top 50 Awesome List 22 | 23 | {{#sortedRepos}} 24 | {{order}}. [{{{name}}}]({{{url}}}) - ([Source ⭐ {{star}} 📝 {{updated}} ]({{{source_url}}})) - {{{meta.description}}} 25 | {{/sortedRepos}} 26 | 27 | ## All Tracked List 28 | {{#list}} 29 | 30 | ### {{category}} 31 | 32 | {{#items}} 33 | - [{{{name}}}]({{{url}}}) - ([Source ⭐ {{star}}, 📝 {{updated}} ]({{{source_url}}})) - {{{meta.description}}} 34 | {{/items}} 35 | {{/list}} 36 | 37 | 38 | ## Social Media 39 | 40 | - [Twitter](https://twitter.com/trackawesome) 41 | - [Telegram](https://t.me/trackawesomelist) 42 | 43 | 44 | ## Contribution 45 | 46 | This repo is generated by [trackawesomelist-source](https://github.com/trackawesomelist/trackawesomelist-source), if you want to add your awesome list here, please edit [config.yml](https://github.com/trackawesomelist/trackawesomelist-source/blob/main/config.yml), and send a pull request, or just open an [issue](https://github.com/trackawesomelist/trackawesomelist-source/issues), I'll add it manually. 47 | 48 | If you want to add badge ([![Track Awesome List](https://www.trackawesomelist.com/badge.svg)](https://www.trackawesomelist.com/ripienaar/free-for-dev/) 49 | ) to your awesome list, please add the following code to your README.md: 50 | 51 | ```markdown 52 | [![Track Awesome List](https://www.trackawesomelist.com/badge.svg)](https://www.trackawesomelist.com/your_repo_pathname/) 53 | ``` 54 | 55 | 56 | The doc is still under construction, if you have any question, please open an [issue](https://github.com/trackawesomelist/trackawesomelist-source/issues) 57 | -------------------------------------------------------------------------------- /templates/search.html.mu: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | {{{_seo_title}}} 14 | 15 | 16 | 17 | 18 | 19 | 44 | 45 | 46 | 47 |
48 |

Search Awesome Stuff

49 |
50 | 51 |
52 |
53 |
54 | 55 |
56 | 57 | 58 | 59 | 98 | 99 | 100 | -------------------------------------------------------------------------------- /templates/week.md.mu: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trackawesomelist/trackawesomelist-source/cac86ac7e22af4f8c6d27dbe660ac04ad45a9332/templates/week.md.mu -------------------------------------------------------------------------------- /test-deps.ts: -------------------------------------------------------------------------------- 1 | export { 2 | assert, 3 | assertAlmostEquals, 4 | assertArrayIncludes, 5 | assertEquals, 6 | assertExists, 7 | assertMatch, 8 | assertNotEquals, 9 | assertNotMatch, 10 | assertObjectMatch, 11 | assertRejects, 12 | assertStringIncludes, 13 | assertThrows, 14 | } from "https://deno.land/std@0.151.0/testing/asserts.ts"; 15 | -------------------------------------------------------------------------------- /util_test.ts: -------------------------------------------------------------------------------- 1 | import { getDayNumber, readTextFile, sha1, titleCase } from "./util.ts"; 2 | import { assertEquals } from "./test-deps.ts"; 3 | 4 | Deno.test("sha1 #1", async () => { 5 | const content = "hello world"; 6 | const sum = await sha1(content); 7 | assertEquals(sum, "2aae6c35c94fcfb415dbe95f408b9ce91ee846ed"); 8 | }); 9 | 10 | Deno.test("getDayNumber", () => { 11 | const date = new Date("2020-01-01"); 12 | const dayNumber = getDayNumber(date); 13 | assertEquals(dayNumber, 20200101); 14 | }); 15 | 16 | Deno.test("title case", () => { 17 | const title = titleCase("free-for-dev"); 18 | assertEquals(title, "Free for Dev"); 19 | }); 20 | --------------------------------------------------------------------------------