/user/repo.")
16 | }
17 | var dir = path.join(__dirname, service, repo);
18 | var reqStream = req.headers['content-encoding'] == 'gzip' ? req.pipe(zlib.createGunzip()) : req;
19 |
20 | reqStream.pipe(backend(req.url, function (err, service) {
21 | if (err) return res.end(err + '\n');
22 |
23 | res.setHeader('content-type', service.type);
24 |
25 | var ps = spawn(service.cmd, service.args.concat(dir));
26 | ps.stdout.pipe(service.createStream()).pipe(ps.stdin);
27 |
28 | })).pipe(res);
29 | });
30 |
--------------------------------------------------------------------------------
/api/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "twitter-bridge",
3 | "version": "1.0.0",
4 | "description": "",
5 | "main": "app.ts",
6 | "scripts": {
7 | "test": "echo \"Error: no test specified\" && exit 1",
8 | "start": "tsc app.ts && node app.js"
9 | },
10 | "author": "",
11 | "license": "ISC",
12 | "dependencies": {
13 | "cors": "^2.8.5",
14 | "express": "^4.17.1",
15 | "git-http-backend": "^1.0.2",
16 | "isomorphic-git": "^1.8.2",
17 | "load-json-file": "^6.2.0",
18 | "node-fetch": "^2.6.1",
19 | "source-map-support": "^0.5.19",
20 | "string-strip-html": "^8.3.0"
21 | },
22 | "devDependencies": {
23 | "@types/node": "^16.9.4",
24 | "nodemon": "^2.0.13",
25 | "ts-node": "^10.3.0",
26 | "typescript": "^4.4.3"
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/api/pnpm-lock.yaml:
--------------------------------------------------------------------------------
1 | lockfileVersion: 5.3
2 |
3 | specifiers:
4 | cors: ^2.8.5
5 | express: ^4.17.1
6 | git-http-backend: ^1.0.2
7 | isomorphic-git: ^1.8.2
8 | load-json-file: ^6.2.0
9 | node-fetch: ^2.6.1
10 | source-map-support: ^0.5.19
11 | string-strip-html: ^8.3.0
12 |
13 | dependencies:
14 | cors: 2.8.5
15 | express: 4.17.1
16 | git-http-backend: 1.0.2
17 | isomorphic-git: 1.8.2
18 | load-json-file: 6.2.0
19 | node-fetch: 2.6.1
20 | source-map-support: 0.5.19
21 | string-strip-html: 8.3.0
22 |
23 | packages:
24 |
25 | /@babel/code-frame/7.14.5:
26 | resolution: {integrity: sha512-9pzDqyc6OLDaqe+zbACgFkb6fKMNG6CObKpnYXChRsvYGyEdc7CA2BaqeOM+vOtCS5ndmJicPJhKAwYRI6UfFw==}
27 | engines: {node: '>=6.9.0'}
28 | dependencies:
29 | '@babel/highlight': 7.14.5
30 | dev: false
31 |
32 | /@babel/helper-validator-identifier/7.14.5:
33 | resolution: {integrity: sha512-5lsetuxCLilmVGyiLEfoHBRX8UCFD+1m2x3Rj97WrW3V7H3u4RWRXA4evMjImCsin2J2YT0QaVDGf+z8ondbAg==}
34 | engines: {node: '>=6.9.0'}
35 | dev: false
36 |
37 | /@babel/highlight/7.14.5:
38 | resolution: {integrity: sha512-qf9u2WFWVV0MppaL877j2dBtQIDgmidgjGk5VIMw3OadXvYaXn66U1BFlH2t4+t3i+8PhedppRv+i40ABzd+gg==}
39 | engines: {node: '>=6.9.0'}
40 | dependencies:
41 | '@babel/helper-validator-identifier': 7.14.5
42 | chalk: 2.4.2
43 | js-tokens: 4.0.0
44 | dev: false
45 |
46 | /@babel/runtime/7.14.5:
47 | resolution: {integrity: sha512-121rumjddw9c3NCQ55KGkyE1h/nzWhU/owjhw0l4mQrkzz4x9SGS1X8gFLraHwX7td3Yo4QTL+qj0NcIzN87BA==}
48 | engines: {node: '>=6.9.0'}
49 | dependencies:
50 | regenerator-runtime: 0.13.7
51 | dev: false
52 |
53 | /accepts/1.3.7:
54 | resolution: {integrity: sha512-Il80Qs2WjYlJIBNzNkK6KYqlVMTbZLXgHx2oT0pU/fjRHyEp+PEfEPY0R3WCwAGVOtauxh1hOxNgIf5bv7dQpA==}
55 | engines: {node: '>= 0.6'}
56 | dependencies:
57 | mime-types: 2.1.32
58 | negotiator: 0.6.2
59 | dev: false
60 |
61 | /ansi-styles/3.2.1:
62 | resolution: {integrity: sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==}
63 | engines: {node: '>=4'}
64 | dependencies:
65 | color-convert: 1.9.3
66 | dev: false
67 |
68 | /array-flatten/1.1.1:
69 | resolution: {integrity: sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=}
70 | dev: false
71 |
72 | /async-lock/1.3.0:
73 | resolution: {integrity: sha512-8A7SkiisnEgME2zEedtDYPxUPzdv3x//E7n5IFktPAtMYSEAV7eNJF0rMwrVyUFj6d/8rgajLantbjcNRQYXIg==}
74 | dev: false
75 |
76 | /body-parser/1.19.0:
77 | resolution: {integrity: sha512-dhEPs72UPbDnAQJ9ZKMNTP6ptJaionhP5cBb541nXPlW60Jepo9RV/a4fX4XWW9CuFNK22krhrj1+rgzifNCsw==}
78 | engines: {node: '>= 0.8'}
79 | dependencies:
80 | bytes: 3.1.0
81 | content-type: 1.0.4
82 | debug: 2.6.9
83 | depd: 1.1.2
84 | http-errors: 1.7.2
85 | iconv-lite: 0.4.24
86 | on-finished: 2.3.0
87 | qs: 6.7.0
88 | raw-body: 2.4.0
89 | type-is: 1.6.18
90 | dev: false
91 |
92 | /buffer-from/1.1.1:
93 | resolution: {integrity: sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A==}
94 | dev: false
95 |
96 | /bytes/3.1.0:
97 | resolution: {integrity: sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg==}
98 | engines: {node: '>= 0.8'}
99 | dev: false
100 |
101 | /chalk/2.4.2:
102 | resolution: {integrity: sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==}
103 | engines: {node: '>=4'}
104 | dependencies:
105 | ansi-styles: 3.2.1
106 | escape-string-regexp: 1.0.5
107 | supports-color: 5.5.0
108 | dev: false
109 |
110 | /clean-git-ref/2.0.1:
111 | resolution: {integrity: sha512-bLSptAy2P0s6hU4PzuIMKmMJJSE6gLXGH1cntDu7bWJUksvuM+7ReOK61mozULErYvP6a15rnYl0zFDef+pyPw==}
112 | dev: false
113 |
114 | /color-convert/1.9.3:
115 | resolution: {integrity: sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==}
116 | dependencies:
117 | color-name: 1.1.3
118 | dev: false
119 |
120 | /color-name/1.1.3:
121 | resolution: {integrity: sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=}
122 | dev: false
123 |
124 | /content-disposition/0.5.3:
125 | resolution: {integrity: sha512-ExO0774ikEObIAEV9kDo50o+79VCUdEB6n6lzKgGwupcVeRlhrj3qGAfwq8G6uBJjkqLrhT0qEYFcWng8z1z0g==}
126 | engines: {node: '>= 0.6'}
127 | dependencies:
128 | safe-buffer: 5.1.2
129 | dev: false
130 |
131 | /content-type/1.0.4:
132 | resolution: {integrity: sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==}
133 | engines: {node: '>= 0.6'}
134 | dev: false
135 |
136 | /cookie-signature/1.0.6:
137 | resolution: {integrity: sha1-4wOogrNCzD7oylE6eZmXNNqzriw=}
138 | dev: false
139 |
140 | /cookie/0.4.0:
141 | resolution: {integrity: sha512-+Hp8fLp57wnUSt0tY0tHEXh4voZRDnoIrZPqlo3DPiI4y9lwg/jqx+1Om94/W6ZaPDOUbnjOt/99w66zk+l1Xg==}
142 | engines: {node: '>= 0.6'}
143 | dev: false
144 |
145 | /cors/2.8.5:
146 | resolution: {integrity: sha512-KIHbLJqu73RGr/hnbrO9uBeixNGuvSQjul/jdFvS/KFSIH1hWVd1ng7zOHx+YrEfInLG7q4n6GHQ9cDtxv/P6g==}
147 | engines: {node: '>= 0.10'}
148 | dependencies:
149 | object-assign: 4.1.1
150 | vary: 1.1.2
151 | dev: false
152 |
153 | /crc-32/1.2.0:
154 | resolution: {integrity: sha512-1uBwHxF+Y/4yF5G48fwnKq6QsIXheor3ZLPT80yGBV1oEUwpPojlEhQbWKVw1VwcTQyMGHK1/XMmTjmlsmTTGA==}
155 | engines: {node: '>=0.8'}
156 | hasBin: true
157 | dependencies:
158 | exit-on-epipe: 1.0.1
159 | printj: 1.1.2
160 | dev: false
161 |
162 | /debug/2.6.9:
163 | resolution: {integrity: sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==}
164 | dependencies:
165 | ms: 2.0.0
166 | dev: false
167 |
168 | /decompress-response/4.2.1:
169 | resolution: {integrity: sha512-jOSne2qbyE+/r8G1VU+G/82LBs2Fs4LAsTiLSHOCOMZQl2OKZ6i8i4IyHemTe+/yIXOtTcRQMzPcgyhoFlqPkw==}
170 | engines: {node: '>=8'}
171 | dependencies:
172 | mimic-response: 2.1.0
173 | dev: false
174 |
175 | /depd/1.1.2:
176 | resolution: {integrity: sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak=}
177 | engines: {node: '>= 0.6'}
178 | dev: false
179 |
180 | /destroy/1.0.4:
181 | resolution: {integrity: sha1-l4hXRCxEdJ5CBmE+N5RiBYJqvYA=}
182 | dev: false
183 |
184 | /diff3/0.0.3:
185 | resolution: {integrity: sha1-1OXDpM305f4SEatC5pP8tDIVgPw=}
186 | dev: false
187 |
188 | /ee-first/1.1.1:
189 | resolution: {integrity: sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0=}
190 | dev: false
191 |
192 | /encodeurl/1.0.2:
193 | resolution: {integrity: sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k=}
194 | engines: {node: '>= 0.8'}
195 | dev: false
196 |
197 | /error-ex/1.3.2:
198 | resolution: {integrity: sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==}
199 | dependencies:
200 | is-arrayish: 0.2.1
201 | dev: false
202 |
203 | /escape-html/1.0.3:
204 | resolution: {integrity: sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg=}
205 | dev: false
206 |
207 | /escape-string-regexp/1.0.5:
208 | resolution: {integrity: sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=}
209 | engines: {node: '>=0.8.0'}
210 | dev: false
211 |
212 | /etag/1.8.1:
213 | resolution: {integrity: sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc=}
214 | engines: {node: '>= 0.6'}
215 | dev: false
216 |
217 | /exit-on-epipe/1.0.1:
218 | resolution: {integrity: sha512-h2z5mrROTxce56S+pnvAV890uu7ls7f1kEvVGJbw1OlFH3/mlJ5bkXu0KRyW94v37zzHPiUd55iLn3DA7TjWpw==}
219 | engines: {node: '>=0.8'}
220 | dev: false
221 |
222 | /express/4.17.1:
223 | resolution: {integrity: sha512-mHJ9O79RqluphRrcw2X/GTh3k9tVv8YcoyY4Kkh4WDMUYKRZUq0h1o0w2rrrxBqM7VoeUVqgb27xlEMXTnYt4g==}
224 | engines: {node: '>= 0.10.0'}
225 | dependencies:
226 | accepts: 1.3.7
227 | array-flatten: 1.1.1
228 | body-parser: 1.19.0
229 | content-disposition: 0.5.3
230 | content-type: 1.0.4
231 | cookie: 0.4.0
232 | cookie-signature: 1.0.6
233 | debug: 2.6.9
234 | depd: 1.1.2
235 | encodeurl: 1.0.2
236 | escape-html: 1.0.3
237 | etag: 1.8.1
238 | finalhandler: 1.1.2
239 | fresh: 0.5.2
240 | merge-descriptors: 1.0.1
241 | methods: 1.1.2
242 | on-finished: 2.3.0
243 | parseurl: 1.3.3
244 | path-to-regexp: 0.1.7
245 | proxy-addr: 2.0.7
246 | qs: 6.7.0
247 | range-parser: 1.2.1
248 | safe-buffer: 5.1.2
249 | send: 0.17.1
250 | serve-static: 1.14.1
251 | setprototypeof: 1.1.1
252 | statuses: 1.5.0
253 | type-is: 1.6.18
254 | utils-merge: 1.0.1
255 | vary: 1.1.2
256 | dev: false
257 |
258 | /finalhandler/1.1.2:
259 | resolution: {integrity: sha512-aAWcW57uxVNrQZqFXjITpW3sIUQmHGG3qSb9mUah9MgMC4NeWhNOlNjXEYq3HjRAvL6arUviZGGJsBg6z0zsWA==}
260 | engines: {node: '>= 0.8'}
261 | dependencies:
262 | debug: 2.6.9
263 | encodeurl: 1.0.2
264 | escape-html: 1.0.3
265 | on-finished: 2.3.0
266 | parseurl: 1.3.3
267 | statuses: 1.5.0
268 | unpipe: 1.0.0
269 | dev: false
270 |
271 | /forwarded/0.2.0:
272 | resolution: {integrity: sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==}
273 | engines: {node: '>= 0.6'}
274 | dev: false
275 |
276 | /fresh/0.5.2:
277 | resolution: {integrity: sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac=}
278 | engines: {node: '>= 0.6'}
279 | dev: false
280 |
281 | /git-http-backend/1.0.2:
282 | resolution: {integrity: sha1-3AHkKEIJNTBkRTpw+S+J3gg06xA=}
283 | dependencies:
284 | git-side-band-message: 0.0.3
285 | inherits: 2.0.4
286 | dev: false
287 |
288 | /git-side-band-message/0.0.3:
289 | resolution: {integrity: sha1-uKU0jC3L8ZSf0pXFBgFOJsPyakY=}
290 | dev: false
291 |
292 | /graceful-fs/4.2.6:
293 | resolution: {integrity: sha512-nTnJ528pbqxYanhpDYsi4Rd8MAeaBA67+RZ10CM1m3bTAVFEDcd5AuA4a6W5YkGZ1iNXHzZz8T6TBKLeBuNriQ==}
294 | dev: false
295 |
296 | /has-flag/3.0.0:
297 | resolution: {integrity: sha1-tdRU3CGZriJWmfNGfloH87lVuv0=}
298 | engines: {node: '>=4'}
299 | dev: false
300 |
301 | /html-entities/2.3.2:
302 | resolution: {integrity: sha512-c3Ab/url5ksaT0WyleslpBEthOzWhrjQbg75y7XUsfSzi3Dgzt0l8w5e7DylRn15MTlMMD58dTfzddNS2kcAjQ==}
303 | dev: false
304 |
305 | /http-errors/1.7.2:
306 | resolution: {integrity: sha512-uUQBt3H/cSIVfch6i1EuPNy/YsRSOUBXTVfZ+yR7Zjez3qjBz6i9+i4zjNaoqcoFVI4lQJ5plg63TvGfRSDCRg==}
307 | engines: {node: '>= 0.6'}
308 | dependencies:
309 | depd: 1.1.2
310 | inherits: 2.0.3
311 | setprototypeof: 1.1.1
312 | statuses: 1.5.0
313 | toidentifier: 1.0.0
314 | dev: false
315 |
316 | /http-errors/1.7.3:
317 | resolution: {integrity: sha512-ZTTX0MWrsQ2ZAhA1cejAwDLycFsd7I7nVtnkT3Ol0aqodaKW+0CTZDQ1uBv5whptCnc8e8HeRRJxRs0kmm/Qfw==}
318 | engines: {node: '>= 0.6'}
319 | dependencies:
320 | depd: 1.1.2
321 | inherits: 2.0.4
322 | setprototypeof: 1.1.1
323 | statuses: 1.5.0
324 | toidentifier: 1.0.0
325 | dev: false
326 |
327 | /iconv-lite/0.4.24:
328 | resolution: {integrity: sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==}
329 | engines: {node: '>=0.10.0'}
330 | dependencies:
331 | safer-buffer: 2.1.2
332 | dev: false
333 |
334 | /ignore/5.1.8:
335 | resolution: {integrity: sha512-BMpfD7PpiETpBl/A6S498BaIJ6Y/ABT93ETbby2fP00v4EbvPBXWEoaR1UBPKs3iR53pJY7EtZk5KACI57i1Uw==}
336 | engines: {node: '>= 4'}
337 | dev: false
338 |
339 | /inherits/2.0.3:
340 | resolution: {integrity: sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=}
341 | dev: false
342 |
343 | /inherits/2.0.4:
344 | resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==}
345 | dev: false
346 |
347 | /ipaddr.js/1.9.1:
348 | resolution: {integrity: sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==}
349 | engines: {node: '>= 0.10'}
350 | dev: false
351 |
352 | /is-arrayish/0.2.1:
353 | resolution: {integrity: sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=}
354 | dev: false
355 |
356 | /isomorphic-git/1.8.2:
357 | resolution: {integrity: sha512-wp3on2Kks1sE/tLUmGLPV7EEAj+JRK8WoL2ZSfJHVQfWzRqMRv96bqzDjyYpC6COGKlDQnhTNCucRf83S3cuMw==}
358 | engines: {node: '>=10'}
359 | hasBin: true
360 | dependencies:
361 | async-lock: 1.3.0
362 | clean-git-ref: 2.0.1
363 | crc-32: 1.2.0
364 | diff3: 0.0.3
365 | ignore: 5.1.8
366 | minimisted: 2.0.1
367 | pako: 1.0.11
368 | pify: 4.0.1
369 | readable-stream: 3.6.0
370 | sha.js: 2.4.11
371 | simple-get: 3.1.0
372 | dev: false
373 |
374 | /js-tokens/4.0.0:
375 | resolution: {integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==}
376 | dev: false
377 |
378 | /json-parse-even-better-errors/2.3.1:
379 | resolution: {integrity: sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==}
380 | dev: false
381 |
382 | /lines-and-columns/1.1.6:
383 | resolution: {integrity: sha1-HADHQ7QzzQpOgHWPe2SldEDZ/wA=}
384 | dev: false
385 |
386 | /load-json-file/6.2.0:
387 | resolution: {integrity: sha512-gUD/epcRms75Cw8RT1pUdHugZYM5ce64ucs2GEISABwkRsOQr0q2wm/MV2TKThycIe5e0ytRweW2RZxclogCdQ==}
388 | engines: {node: '>=8'}
389 | dependencies:
390 | graceful-fs: 4.2.6
391 | parse-json: 5.2.0
392 | strip-bom: 4.0.0
393 | type-fest: 0.6.0
394 | dev: false
395 |
396 | /lodash.clonedeep/4.5.0:
397 | resolution: {integrity: sha1-4j8/nE+Pvd6HJSnBBxhXoIblzO8=}
398 | dev: false
399 |
400 | /lodash.isplainobject/4.0.6:
401 | resolution: {integrity: sha1-fFJqUtibRcRcxpC4gWO+BJf1UMs=}
402 | dev: false
403 |
404 | /lodash.trim/4.5.1:
405 | resolution: {integrity: sha1-NkJefukL5KpeJ7zruFt9EepHqlc=}
406 | dev: false
407 |
408 | /lodash.without/4.4.0:
409 | resolution: {integrity: sha1-PNRXSgC2e643OpS3SHcmQFB7eqw=}
410 | dev: false
411 |
412 | /media-typer/0.3.0:
413 | resolution: {integrity: sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g=}
414 | engines: {node: '>= 0.6'}
415 | dev: false
416 |
417 | /merge-descriptors/1.0.1:
418 | resolution: {integrity: sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E=}
419 | dev: false
420 |
421 | /methods/1.1.2:
422 | resolution: {integrity: sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4=}
423 | engines: {node: '>= 0.6'}
424 | dev: false
425 |
426 | /mime-db/1.49.0:
427 | resolution: {integrity: sha512-CIc8j9URtOVApSFCQIF+VBkX1RwXp/oMMOrqdyXSBXq5RWNEsRfyj1kiRnQgmNXmHxPoFIxOroKA3zcU9P+nAA==}
428 | engines: {node: '>= 0.6'}
429 | dev: false
430 |
431 | /mime-types/2.1.32:
432 | resolution: {integrity: sha512-hJGaVS4G4c9TSMYh2n6SQAGrC4RnfU+daP8G7cSCmaqNjiOoUY0VHCMS42pxnQmVF1GWwFhbHWn3RIxCqTmZ9A==}
433 | engines: {node: '>= 0.6'}
434 | dependencies:
435 | mime-db: 1.49.0
436 | dev: false
437 |
438 | /mime/1.6.0:
439 | resolution: {integrity: sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==}
440 | engines: {node: '>=4'}
441 | hasBin: true
442 | dev: false
443 |
444 | /mimic-response/2.1.0:
445 | resolution: {integrity: sha512-wXqjST+SLt7R009ySCglWBCFpjUygmCIfD790/kVbiGmUgfYGuB14PiTd5DwVxSV4NcYHjzMkoj5LjQZwTQLEA==}
446 | engines: {node: '>=8'}
447 | dev: false
448 |
449 | /minimist/1.2.5:
450 | resolution: {integrity: sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==}
451 | dev: false
452 |
453 | /minimisted/2.0.1:
454 | resolution: {integrity: sha512-1oPjfuLQa2caorJUM8HV8lGgWCc0qqAO1MNv/k05G4qslmsndV/5WdNZrqCiyqiz3wohia2Ij2B7w2Dr7/IyrA==}
455 | dependencies:
456 | minimist: 1.2.5
457 | dev: false
458 |
459 | /ms/2.0.0:
460 | resolution: {integrity: sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=}
461 | dev: false
462 |
463 | /ms/2.1.1:
464 | resolution: {integrity: sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==}
465 | dev: false
466 |
467 | /negotiator/0.6.2:
468 | resolution: {integrity: sha512-hZXc7K2e+PgeI1eDBe/10Ard4ekbfrrqG8Ep+8Jmf4JID2bNg7NvCPOZN+kfF574pFQI7mum2AUqDidoKqcTOw==}
469 | engines: {node: '>= 0.6'}
470 | dev: false
471 |
472 | /node-fetch/2.6.1:
473 | resolution: {integrity: sha512-V4aYg89jEoVRxRb2fJdAg8FHvI7cEyYdVAh94HH0UIK8oJxUfkjlDQN9RbMx+bEjP7+ggMiFRprSti032Oipxw==}
474 | engines: {node: 4.x || >=6.0.0}
475 | dev: false
476 |
477 | /object-assign/4.1.1:
478 | resolution: {integrity: sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=}
479 | engines: {node: '>=0.10.0'}
480 | dev: false
481 |
482 | /on-finished/2.3.0:
483 | resolution: {integrity: sha1-IPEzZIGwg811M3mSoWlxqi2QaUc=}
484 | engines: {node: '>= 0.8'}
485 | dependencies:
486 | ee-first: 1.1.1
487 | dev: false
488 |
489 | /once/1.4.0:
490 | resolution: {integrity: sha1-WDsap3WWHUsROsF9nFC6753Xa9E=}
491 | dependencies:
492 | wrappy: 1.0.2
493 | dev: false
494 |
495 | /pako/1.0.11:
496 | resolution: {integrity: sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw==}
497 | dev: false
498 |
499 | /parse-json/5.2.0:
500 | resolution: {integrity: sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==}
501 | engines: {node: '>=8'}
502 | dependencies:
503 | '@babel/code-frame': 7.14.5
504 | error-ex: 1.3.2
505 | json-parse-even-better-errors: 2.3.1
506 | lines-and-columns: 1.1.6
507 | dev: false
508 |
509 | /parseurl/1.3.3:
510 | resolution: {integrity: sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==}
511 | engines: {node: '>= 0.8'}
512 | dev: false
513 |
514 | /path-to-regexp/0.1.7:
515 | resolution: {integrity: sha1-32BBeABfUi8V60SQ5yR6G/qmf4w=}
516 | dev: false
517 |
518 | /pify/4.0.1:
519 | resolution: {integrity: sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==}
520 | engines: {node: '>=6'}
521 | dev: false
522 |
523 | /printj/1.1.2:
524 | resolution: {integrity: sha512-zA2SmoLaxZyArQTOPj5LXecR+RagfPSU5Kw1qP+jkWeNlrq+eJZyY2oS68SU1Z/7/myXM4lo9716laOFAVStCQ==}
525 | engines: {node: '>=0.8'}
526 | hasBin: true
527 | dev: false
528 |
529 | /proxy-addr/2.0.7:
530 | resolution: {integrity: sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==}
531 | engines: {node: '>= 0.10'}
532 | dependencies:
533 | forwarded: 0.2.0
534 | ipaddr.js: 1.9.1
535 | dev: false
536 |
537 | /qs/6.7.0:
538 | resolution: {integrity: sha512-VCdBRNFTX1fyE7Nb6FYoURo/SPe62QCaAyzJvUjwRaIsc+NePBEniHlvxFmmX56+HZphIGtV0XeCirBtpDrTyQ==}
539 | engines: {node: '>=0.6'}
540 | dev: false
541 |
542 | /range-parser/1.2.1:
543 | resolution: {integrity: sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==}
544 | engines: {node: '>= 0.6'}
545 | dev: false
546 |
547 | /ranges-apply/5.1.0:
548 | resolution: {integrity: sha512-VF3a0XUuYS/BQHv2RaIyX1K7S1hbfrs64hkGKgPVk0Y7p4XFwSucjTTttrBqmkcmB/PZx5ISTZdxErRZi/89aQ==}
549 | dependencies:
550 | '@babel/runtime': 7.14.5
551 | ranges-merge: 7.1.0
552 | dev: false
553 |
554 | /ranges-merge/7.1.0:
555 | resolution: {integrity: sha512-coTHcyAEIhoEdsBs9f5f+q0rmy7UHvS/5nfuXzuj5oLX/l/tbqM5uxRb6eh8WMdetXia3lK67ZO4tarH4ieulQ==}
556 | dependencies:
557 | '@babel/runtime': 7.14.5
558 | ranges-push: 5.1.0
559 | ranges-sort: 4.1.0
560 | dev: false
561 |
562 | /ranges-push/5.1.0:
563 | resolution: {integrity: sha512-vqGcaGq7GWV1zBa9w83E+dzYkOvE9/3pIRUPvLf12c+mGQCf1nesrkBI7Ob8taN2CC9V1HDSJx0KAQl0SgZftA==}
564 | dependencies:
565 | '@babel/runtime': 7.14.5
566 | ranges-merge: 7.1.0
567 | string-collapse-leading-whitespace: 5.1.0
568 | string-trim-spaces-only: 3.1.0
569 | dev: false
570 |
571 | /ranges-sort/4.1.0:
572 | resolution: {integrity: sha512-GOQgk6UtsrfKFeYa53YLiBVnLINwYmOk5l2QZG1csZpT6GdImUwooh+/cRrp7b+fYawZX/rnyA3Ul+pdgQBIzA==}
573 | dependencies:
574 | '@babel/runtime': 7.14.5
575 | dev: false
576 |
577 | /raw-body/2.4.0:
578 | resolution: {integrity: sha512-4Oz8DUIwdvoa5qMJelxipzi/iJIi40O5cGV1wNYp5hvZP8ZN0T+jiNkL0QepXs+EsQ9XJ8ipEDoiH70ySUJP3Q==}
579 | engines: {node: '>= 0.8'}
580 | dependencies:
581 | bytes: 3.1.0
582 | http-errors: 1.7.2
583 | iconv-lite: 0.4.24
584 | unpipe: 1.0.0
585 | dev: false
586 |
587 | /readable-stream/3.6.0:
588 | resolution: {integrity: sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==}
589 | engines: {node: '>= 6'}
590 | dependencies:
591 | inherits: 2.0.4
592 | string_decoder: 1.3.0
593 | util-deprecate: 1.0.2
594 | dev: false
595 |
596 | /regenerator-runtime/0.13.7:
597 | resolution: {integrity: sha512-a54FxoJDIr27pgf7IgeQGxmqUNYrcV338lf/6gH456HZ/PhX+5BcwHXG9ajESmwe6WRO0tAzRUrRmNONWgkrew==}
598 | dev: false
599 |
600 | /safe-buffer/5.1.2:
601 | resolution: {integrity: sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==}
602 | dev: false
603 |
604 | /safe-buffer/5.2.1:
605 | resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==}
606 | dev: false
607 |
608 | /safer-buffer/2.1.2:
609 | resolution: {integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==}
610 | dev: false
611 |
612 | /send/0.17.1:
613 | resolution: {integrity: sha512-BsVKsiGcQMFwT8UxypobUKyv7irCNRHk1T0G680vk88yf6LBByGcZJOTJCrTP2xVN6yI+XjPJcNuE3V4fT9sAg==}
614 | engines: {node: '>= 0.8.0'}
615 | dependencies:
616 | debug: 2.6.9
617 | depd: 1.1.2
618 | destroy: 1.0.4
619 | encodeurl: 1.0.2
620 | escape-html: 1.0.3
621 | etag: 1.8.1
622 | fresh: 0.5.2
623 | http-errors: 1.7.3
624 | mime: 1.6.0
625 | ms: 2.1.1
626 | on-finished: 2.3.0
627 | range-parser: 1.2.1
628 | statuses: 1.5.0
629 | dev: false
630 |
631 | /serve-static/1.14.1:
632 | resolution: {integrity: sha512-JMrvUwE54emCYWlTI+hGrGv5I8dEwmco/00EvkzIIsR7MqrHonbD9pO2MOfFnpFntl7ecpZs+3mW+XbQZu9QCg==}
633 | engines: {node: '>= 0.8.0'}
634 | dependencies:
635 | encodeurl: 1.0.2
636 | escape-html: 1.0.3
637 | parseurl: 1.3.3
638 | send: 0.17.1
639 | dev: false
640 |
641 | /setprototypeof/1.1.1:
642 | resolution: {integrity: sha512-JvdAWfbXeIGaZ9cILp38HntZSFSo3mWg6xGcJJsd+d4aRMOqauag1C63dJfDw7OaMYwEbHMOxEZ1lqVRYP2OAw==}
643 | dev: false
644 |
645 | /sha.js/2.4.11:
646 | resolution: {integrity: sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ==}
647 | hasBin: true
648 | dependencies:
649 | inherits: 2.0.4
650 | safe-buffer: 5.2.1
651 | dev: false
652 |
653 | /simple-concat/1.0.1:
654 | resolution: {integrity: sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q==}
655 | dev: false
656 |
657 | /simple-get/3.1.0:
658 | resolution: {integrity: sha512-bCR6cP+aTdScaQCnQKbPKtJOKDp/hj9EDLJo3Nw4y1QksqaovlW/bnptB6/c1e+qmNIDHRK+oXFDdEqBT8WzUA==}
659 | dependencies:
660 | decompress-response: 4.2.1
661 | once: 1.4.0
662 | simple-concat: 1.0.1
663 | dev: false
664 |
665 | /source-map-support/0.5.19:
666 | resolution: {integrity: sha512-Wonm7zOCIJzBGQdB+thsPar0kYuCIzYvxZwlBa87yi/Mdjv7Tip2cyVbLj5o0cFPN4EVkuTwb3GDDyUx2DGnGw==}
667 | dependencies:
668 | buffer-from: 1.1.1
669 | source-map: 0.6.1
670 | dev: false
671 |
672 | /source-map/0.6.1:
673 | resolution: {integrity: sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==}
674 | engines: {node: '>=0.10.0'}
675 | dev: false
676 |
677 | /statuses/1.5.0:
678 | resolution: {integrity: sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow=}
679 | engines: {node: '>= 0.6'}
680 | dev: false
681 |
682 | /string-collapse-leading-whitespace/5.1.0:
683 | resolution: {integrity: sha512-mYz9/Kb5uvRB4DZj46zILwI4y9lD9JsvXG9Xb7zjbwm0I/R40G7oFfMsqJ28l2d7gWMTLJL569NfJQVLQbnHCw==}
684 | dependencies:
685 | '@babel/runtime': 7.14.5
686 | dev: false
687 |
688 | /string-left-right/4.1.0:
689 | resolution: {integrity: sha512-ic/WvfNVUygWWsgg8akzSzp2NuttfhrdbH7QmSnda5b5RFmT9aCEDiS/M+gmTJwtFy7+b/2AXU4Z6vejcePQqQ==}
690 | dependencies:
691 | '@babel/runtime': 7.14.5
692 | lodash.clonedeep: 4.5.0
693 | lodash.isplainobject: 4.0.6
694 | dev: false
695 |
696 | /string-strip-html/8.3.0:
697 | resolution: {integrity: sha512-1+rjTPt0JjpFr1w0bfNL1S6O0I9fJDqM+P3pFTpC6eEEpIXhmBvPLnaQoEuWarswiH219qCefDSxTLxGQyHKUg==}
698 | dependencies:
699 | '@babel/runtime': 7.14.5
700 | html-entities: 2.3.2
701 | lodash.isplainobject: 4.0.6
702 | lodash.trim: 4.5.1
703 | lodash.without: 4.4.0
704 | ranges-apply: 5.1.0
705 | ranges-push: 5.1.0
706 | string-left-right: 4.1.0
707 | dev: false
708 |
709 | /string-trim-spaces-only/3.1.0:
710 | resolution: {integrity: sha512-AW7RSi3+QtE6wR+4m/kmwlyy39neBbCIzrzzu1/RGzNRiPKQOeB3rGzr4ubg4UIQgYtr2w0PrxhKPXgyqJ0vaQ==}
711 | dependencies:
712 | '@babel/runtime': 7.14.5
713 | dev: false
714 |
715 | /string_decoder/1.3.0:
716 | resolution: {integrity: sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==}
717 | dependencies:
718 | safe-buffer: 5.2.1
719 | dev: false
720 |
721 | /strip-bom/4.0.0:
722 | resolution: {integrity: sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==}
723 | engines: {node: '>=8'}
724 | dev: false
725 |
726 | /supports-color/5.5.0:
727 | resolution: {integrity: sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==}
728 | engines: {node: '>=4'}
729 | dependencies:
730 | has-flag: 3.0.0
731 | dev: false
732 |
733 | /toidentifier/1.0.0:
734 | resolution: {integrity: sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw==}
735 | engines: {node: '>=0.6'}
736 | dev: false
737 |
738 | /type-fest/0.6.0:
739 | resolution: {integrity: sha512-q+MB8nYR1KDLrgr4G5yemftpMC7/QLqVndBmEEdqzmNj5dcFOO4Oo8qlwZE3ULT3+Zim1F8Kq4cBnikNhlCMlg==}
740 | engines: {node: '>=8'}
741 | dev: false
742 |
743 | /type-is/1.6.18:
744 | resolution: {integrity: sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==}
745 | engines: {node: '>= 0.6'}
746 | dependencies:
747 | media-typer: 0.3.0
748 | mime-types: 2.1.32
749 | dev: false
750 |
751 | /unpipe/1.0.0:
752 | resolution: {integrity: sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw=}
753 | engines: {node: '>= 0.8'}
754 | dev: false
755 |
756 | /util-deprecate/1.0.2:
757 | resolution: {integrity: sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=}
758 | dev: false
759 |
760 | /utils-merge/1.0.1:
761 | resolution: {integrity: sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM=}
762 | engines: {node: '>= 0.4.0'}
763 | dev: false
764 |
765 | /vary/1.1.2:
766 | resolution: {integrity: sha1-IpnwLG3tMNSllhsLn3RSShj2NPw=}
767 | engines: {node: '>= 0.8'}
768 | dev: false
769 |
770 | /wrappy/1.0.2:
771 | resolution: {integrity: sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=}
772 | dev: false
773 |
--------------------------------------------------------------------------------
/api/run-prod.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Copyright 2021 Google LLC
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 | # This is for the JS/TS based API, which is experimental.
17 | npm install
18 | npx ts-node app.ts
19 |
--------------------------------------------------------------------------------
/api/templates/status.html:
--------------------------------------------------------------------------------
1 |
2 |
3 | Status for Agora Bridge
4 |
5 | No status known.
6 | Try again later? :)
7 |
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/api/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | // "sourceMap": true,
4 | "inlineSourceMap": true
5 | },
6 | "files":["app.ts"]
7 | }
--------------------------------------------------------------------------------
/bots/.gitignore:
--------------------------------------------------------------------------------
1 | agora-bot.yaml
2 | tweets.yaml
3 |
--------------------------------------------------------------------------------
/bots/bluesky/agora-bot.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import argparse
4 | import logging
5 | import os
6 | import re
7 | import time
8 | import subprocess
9 | import urllib
10 | import yaml
11 |
12 | # #go https://github.com/MarshalX/atproto
13 | from atproto import Client, client_utils, models
14 |
15 | parser = argparse.ArgumentParser(description='Agora Bot for Bluesky (atproto).')
16 | parser.add_argument('--config', dest='config', type=argparse.FileType('r'), required=True, help='The path to agora-bot.yaml, see agora-bot.yaml.example.')
17 | parser.add_argument('--verbose', dest='verbose', type=bool, default=False, help='Whether to log more information.')
18 | parser.add_argument('--output-dir', dest='output_dir', required=True, help='The path to a directory where data will be dumped as needed. If it does not exist, we will try to create it.')
19 | parser.add_argument('--write', dest='write', action="store_true", help='Whether to actually post (default, when this is off, is dry run.')
20 | args = parser.parse_args()
21 |
22 | WIKILINK_RE = re.compile(r'\[\[(.*?)\]\]', re.IGNORECASE)
23 | # thou shall not use regexes to parse html, except when yolo
24 | HASHTAG_RE = re.compile(r'#(\w+)', re.IGNORECASE)
25 | # https://github.com/bluesky-social/atproto/discussions/2523
26 | URI_RE = re.compile(r'at://(.*?)/app.bsky.feed.post/(.*)', re.IGNORECASE)
27 |
28 | logging.basicConfig()
29 | L = logging.getLogger('agora-bot')
30 | if args.verbose:
31 | L.setLevel(logging.DEBUG)
32 | else:
33 | L.setLevel(logging.INFO)
34 |
35 | def uniq(l):
36 | # also orders, because actually it works better.
37 | # return list(OrderedDict.fromkeys(l))
38 | # only works for hashable items
39 | return sorted(list(set(l)), key=str.casefold)
40 |
41 | def mkdir(string):
42 | if not os.path.isdir(string):
43 | print(f"Trying to create {string}.")
44 | output = subprocess.run(['mkdir', '-p', string], capture_output=True)
45 | if output.stderr:
46 | L.error(output.stderr)
47 | return os.path.abspath(string)
48 |
49 |
50 |
51 | class AgoraBot(object):
52 |
53 | def __init__(self):
54 | try:
55 | self.config = yaml.safe_load(args.config)
56 | except yaml.YAMLError as e:
57 | L.error(e)
58 |
59 | self.client = Client(base_url='https://bsky.social')
60 | self.client.login(self.config['user'], self.config['password'])
61 |
62 | self.me = self.client.resolve_handle(self.config['user'])
63 |
64 | def build_reply(self, entities):
65 | # always at-mention at least the original author.
66 | text_builder = client_utils.TextBuilder()
67 | for entity in entities:
68 | path = urllib.parse.quote_plus(entity)
69 | url = f'https://anagora.org/{path}'
70 | text_builder.link(url, url)
71 | text_builder.text('\n')
72 | return text_builder
73 |
74 | def post_uri_to_url(self, uri):
75 | base = 'https://bsky.app'
76 | match = URI_RE.search(uri)
77 | profile = match.group(1)
78 | rkey = match.group(2)
79 | return f'{base}/profile/{profile}/post/{rkey}'
80 |
81 | def log_post(self, uri, post, entities):
82 | url = self.post_uri_to_url(uri)
83 |
84 | if not args.output_dir:
85 | return False
86 |
87 | if not args.write:
88 | L.info(f'Here we would log a link to {url} in nodes {entities}.')
89 |
90 | for node in entities:
91 | if ('/' in node):
92 | # for now, dump only to the last path fragment -- this yields the right behaviour in e.g. [[go/cat-tournament]]
93 | node = os.path.split(node)[-1]
94 |
95 | # TODO: update username after refactoring.
96 | bot_stream_dir = mkdir(os.path.join(args.output_dir, self.config['user']))
97 | bot_stream_filename = os.path.join(bot_stream_dir, node + '.md')
98 |
99 | # dedup logic.
100 | try:
101 | with open(bot_stream_filename, 'r') as note:
102 | note = note.read()
103 | L.info(f"In note: {note}.")
104 | if note and url in note:
105 | L.info("Post already logged to note.")
106 | return False
107 | else:
108 | if args.write:
109 | L.info("Post will be logged to note.")
110 | except FileNotFoundError:
111 | pass
112 |
113 | # try to append.
114 | try:
115 | if args.write:
116 | with open(bot_stream_filename, 'a') as note:
117 | note.write(f"- [[{post.indexed_at}]] @[[{post.author.handle}]]: {url}\n")
118 | except:
119 | L.error("Couldn't log post to note.")
120 | return False
121 |
122 | return True
123 |
124 | def maybe_reply(self, uri, post, msg, entities):
125 | L.info(f'Would reply to {post} with {msg.build_text()}')
126 | ref = models.create_strong_ref(post)
127 | if args.write:
128 | # Only actually write if we haven't written before (from the PoV of the current agora).
129 | # log_post should return false if we have already written a link to node previously.
130 | if self.log_post(uri, post, entities):
131 | self.client.send_post(msg, reply_to=models.AppBskyFeedPost.ReplyRef(parent=ref, root=ref))
132 | else:
133 | L.info(f'Skipping replying due to dry_run. Pass --write to actually write.')
134 |
135 | def get_followers(self):
136 | return self.client.get_followers(self.config['user'])['followers']
137 |
138 | def get_follows(self):
139 | return self.client.get_follows(self.config['user'])['follows']
140 |
141 | def get_mutuals(self):
142 | # Note we'll return a set of DIDs (type hints to the rescue? eventually... :))
143 | mutuals = set()
144 | follows = self.get_follows()
145 | followers = self.get_followers()
146 | for follower in followers:
147 | if follower.did in [f.did for f in follows]:
148 | # Ahoy matey!
149 | mutuals.add(follower.did)
150 |
151 | # This is no longer needed but remains an example of working with cursors.
152 | # cursor = ''
153 | # # This work but it is quite inefficient to check for mutualness as some accounts follow *a lot* of people.
154 | # while True:
155 | # L.info(f'Processing following list for {follower.handle} with cursor {cursor}')
156 | # follows = self.client.get_follows(follower.did, limit=100, cursor=cursor)
157 | # for following in follows:
158 | # if following[0] == 'follows':
159 | # for follow in following[1]:
160 | # # L.info(f'{follow.did}')
161 | # if follow.did == self.me.did:
162 | # # Ahoy matey!
163 | # L.info(f'{follower.handle} follows us!')
164 | # mutuals.add(follower.did)
165 | # cursor = follows.cursor
166 | # if not cursor:
167 | # break
168 |
169 | return mutuals
170 |
171 | def follow_followers(self):
172 | for follower in self.get_followers():
173 | if follower.did in self.get_mutuals():
174 | L.info(f'-> We already follow {follower.handle}')
175 | else:
176 | L.info(f'-> Trying to follow back {follower.handle}')
177 | self.client.follow(follower.did)
178 |
179 | def catch_up(self):
180 | for mutual_did in self.get_mutuals():
181 | L.info(f'-> Processing posts by {mutual_did}...')
182 | posts = self.client.app.bsky.feed.post.list(mutual_did, limit=100)
183 | for uri, post in posts.records.items():
184 | wikilinks = WIKILINK_RE.findall(post.text)
185 | if wikilinks:
186 | entities = uniq(wikilinks)
187 | L.info(f'\nSaw wikilinks at {uri}:\n{post.text}\n')
188 | msg = self.build_reply(entities)
189 | L.info(f'\nWould respond with:\n{msg.build_text()}\n--\n')
190 | # atproto somehow needs this kind of post and not the... other?
191 | actual_post = self.client.get_posts([uri]).posts[0]
192 | self.maybe_reply(uri, actual_post, msg, entities)
193 |
194 | def main():
195 | # How much to sleep between runs, in seconds (this may go away once we're using a subscription model?).
196 | sleep = 60
197 |
198 | bot = AgoraBot()
199 |
200 | while True:
201 | bot.follow_followers()
202 | bot.catch_up()
203 |
204 | L.info(f'-> Sleeping for {sleep} seconds...')
205 | time.sleep(sleep)
206 |
207 | # Much more goes here I guess :)
208 |
209 | if __name__ == "__main__":
210 | main()
211 |
--------------------------------------------------------------------------------
/bots/bluesky/agora-bot.yaml.example:
--------------------------------------------------------------------------------
1 | # - [[flancian]] says:
2 | # - this file is meant to be consumed by an [[agora bot]] kept in https://github.com/flancian/agora-bridge.
3 | # - for a specification of this format, please consult https://anagora.org/agora-bot.
4 | # - for instructions to set up a fully working [[agora]] using this and other repos, please consult to https://anagora.org/agora-setup.
5 |
6 | user: something.bsky.social
7 | password: your password here
8 |
--------------------------------------------------------------------------------
/bots/bluesky/run-dev.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Trying to move to [[poetry]] across the board.
3 | # . venv/bin/activate
4 | OUTPUT=$HOME/agora/stream/
5 | mkdir -p ${OUTPUT}
6 | ~/.local/bin/poetry run ./agora-bot.py --config agora-bot.yaml --output=${OUTPUT} $@
7 |
--------------------------------------------------------------------------------
/bots/bluesky/run-prod.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Trying to move to [[poetry]] across the board.
3 | # . venv/bin/activate
4 | OUTPUT=$HOME/agora/stream/
5 | mkdir -p ${OUTPUT}
6 | # Install poetry with pipx install poetry or similar if you don't have it.
7 | ~/.local/bin/poetry run ./agora-bot.py --config agora-bot.yaml --output=${OUTPUT} --write $@
8 |
--------------------------------------------------------------------------------
/bots/bluesky/setup.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | (command -v python3 && command -v pip3) || (echo 'Please install Python3 and pip using your OS packaging system. In Debian: sudo apt-get install python3 python3-venv python3-pip' && exit 42)
4 |
5 | python3 -m venv venv &&
6 | . venv/bin/activate &&
7 | pip3 install -r requirements.txt
8 |
9 | echo "see agora-bridge.service and https://anagora.org/systemd for pointers on how to set up a production agora as a system service."
10 |
--------------------------------------------------------------------------------
/bots/mastodon/.agora-bot.py.swp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/flancian/agora-bridge/f338fc9ef528c4903f268ae0a1e80c6bcbeea865/bots/mastodon/.agora-bot.py.swp
--------------------------------------------------------------------------------
/bots/mastodon/agora-bot.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # Copyright 2021 Google LLC
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 | # an [[agora bridge]], that is, a utility that takes a .yaml file describing a set of [[personal knowledge graphs]] or [[digital gardens]] and pulls them to be consumed by other bridges or an [[agora server]]. [[flancian]]
17 |
18 | import argparse
19 | import glob
20 | import logging
21 | import os
22 | import subprocess
23 | import random
24 | import re
25 | import time
26 | import urllib
27 | import yaml
28 |
29 | from collections import OrderedDict
30 | from datetime import datetime
31 | from mastodon import Mastodon, StreamListener, MastodonAPIError, MastodonNetworkError
32 |
33 | # [[2022-11-17]]: changing approaches, bots should write by calling an Agora API; direct writing to disk was a hack.
34 | # common.py should have the methods to write resources to a node in any case.
35 | # (maybe direct writing to disk can remain as an option, as it's very simple and convenient if people are running local agoras?).
36 | # [[2025-03-23]]: drive by while I'm passing by here fixing a different thing -- I think I'm coming to terms with the fact that a lot of hacks I intend to fix will be permanent :) not saying that this is one, but if it is, so be it. Future Agoras will learn from our mistakes ;)
37 | import common
38 |
39 | WIKILINK_RE = re.compile(r'\[\[(.*?)\]\]', re.IGNORECASE)
40 | # thou shall not use regexes to parse html, except when yolo
41 | HASHTAG_RE = re.compile(r'#(\w+)', re.IGNORECASE)
42 | PUSH_RE = re.compile(r'\[\[push\]\]', re.IGNORECASE)
43 | # Buggy, do not enable without revamping build_reply()
44 | P_HELP = 0.0
45 |
46 | parser = argparse.ArgumentParser(description='Agora Bot for Mastodon (ActivityPub).')
47 | parser.add_argument('--config', dest='config', type=argparse.FileType('r'), required=True, help='The path to agora-bot.yaml, see agora-bot.yaml.example.')
48 | parser.add_argument('--verbose', dest='verbose', type=bool, default=False, help='Whether to log more information.')
49 | parser.add_argument('--output-dir', dest='output_dir', required=True, help='The path to a directory where data will be dumped as needed. If it does not exist, we will try to create it.')
50 | parser.add_argument('--dry-run', dest='dry_run', action="store_true", help='Whether to refrain from posting or making changes.')
51 | parser.add_argument('--catch-up', dest='catch_up', action="store_true", help='Whether to run code to catch up on missed toots (e.g. because we were down for a bit, or because this is a new bot instance.')
52 | args = parser.parse_args()
53 |
54 | logging.basicConfig()
55 | L = logging.getLogger('agora-bot')
56 | if args.verbose:
57 | L.setLevel(logging.DEBUG)
58 | else:
59 | L.setLevel(logging.INFO)
60 |
61 | def uniq(l):
62 | # also orders, because actually it works better.
63 | # return list(OrderedDict.fromkeys(l))
64 | # only works for hashable items
65 | return sorted(list(set(l)), key=str.casefold)
66 |
67 | class AgoraBot(StreamListener):
68 | """main class for [[agora bot]] for [[mastodon]]."""
69 | # this follows https://mastodonpy.readthedocs.io/en/latest/#streaming and https://github.com/ClearlyClaire/delibird/blob/master/main.py
70 |
71 | def __init__(self, mastodon, bot_username):
72 | StreamListener.__init__(self)
73 | self.mastodon = mastodon
74 | self.bot_username = bot_username
75 | L.info(f'[[agora bot]] for {bot_username} started!')
76 |
77 | def send_toot(self, msg, in_reply_to_id=None):
78 | L.info('sending toot.')
79 | status = self.mastodon.status_post(msg, in_reply_to_id=in_reply_to_id)
80 |
81 | def boost_toot(self, id):
82 | L.info('boosting toot.')
83 | status = self.mastodon.status_reblog(id)
84 |
85 | def build_reply(self, status, entities):
86 | # These could be made a lot more user friendly just by making the Agora bot return *anything* beyond just links and mentions!
87 | # At least some greeting...?
88 | if random.random() < P_HELP:
89 | self.send_toot('If an Agora hears about a [[wikilink]] or #hashtag, it will try to resolve them for you and link your resources in the [[nodes]] or #nodes you mention.', status.id)
90 | lines = []
91 |
92 | # always at-mention at least the original author.
93 | mentions = f"@{status['account']['acct']} "
94 | if status.mentions:
95 | # Some time in the past I wrote: if other people are mentioned in the thread, only at mention them if they also follow us.
96 | # [[2025-03-23]]: honestly, as I'm rereading this code while on a long flight, I'm not sure this was a great idea.
97 | # I see no strong reason to make an information-integrator and information-spreader like Agora bot less effective by dropping people from threads.
98 | # see https://social.coop/@flancian/108153868738763998 for initial reasoning -- but as I'm standing here I'm leaning towards disabling this.
99 | #
100 | # followers = [x['acct'] for x in self.get_followers()]
101 | for mention in status.mentions:
102 | # if mention['acct'] in followers:
103 | mentions += f"@{mention['acct']} "
104 |
105 | lines.append(mentions)
106 |
107 | for entity in entities:
108 | path = urllib.parse.quoe_plus(entity)
109 | lines.append(f'https://anagora.org/{path}')
110 |
111 | msg = '\n'.join(lines)
112 | return msg
113 |
114 | def log_toot(self, toot, nodes):
115 | if not args.output_dir:
116 | # note this actually means that if output_dir is not set up this bot won't respond to messages,
117 | # as the caller currently thinks False -> do not post (to prevent duplicates).
118 | return False
119 |
120 | for node in nodes:
121 | if ('/' in node):
122 | # for now, dump only to the last path fragment -- this yields the right behaviour in e.g. [[go/cat-tournament]]
123 | node = os.path.split(node)[-1]
124 |
125 | bot_stream_dir = common.mkdir(os.path.join(args.output_dir, self.bot_username))
126 | bot_stream_filename = os.path.join(bot_stream_dir, node + '.md')
127 |
128 | # dedup logic.
129 | try:
130 | with open(bot_stream_filename, 'r') as note:
131 | note = note.read()
132 | L.info(f"Note: {note}.")
133 | # why both? it has been lost to the mists of time, or maybe the commit log :)
134 | # perhaps uri is what's set in pleroma?
135 | if note and (toot.url or toot.uri) in note:
136 | L.info("Toot already logged to note.")
137 | return False
138 | else:
139 | L.info("Toot will be logged to note.")
140 | except FileNotFoundError:
141 | pass
142 |
143 | # try to append.
144 | try:
145 | with open(bot_stream_filename, 'a') as note:
146 | url = toot.url or toot.uri
147 | # Now also adding creation datetime of the toot to align with other bots.
148 | note.write(f"- [[{toot.created_at}]] [[{toot.account.acct}]] {url}\n")
149 | except:
150 | L.error("Couldn't log toot to note.")
151 | return False
152 | return True
153 |
154 | def write_toot(self, toot, nodes):
155 | L.debug(f"Maybe logging toot if user has opted in.")
156 | if not args.output_dir:
157 | return False
158 |
159 | # toot.account.acct is flancian@social.coop, .username is actually just flancian
160 | username = toot.account.acct
161 |
162 | if not self.wants_writes(username):
163 | L.info(f"User {username} has NOT opted in, skipping logging full post.")
164 | return False
165 | L.info(f"User {username} has opted in to writing, pushing (publishing) full post text to an Agora.")
166 |
167 | user_stream_dir = common.mkdir(os.path.join(args.output_dir, username))
168 |
169 | for node in nodes:
170 | user_stream_filename = os.path.join(user_stream_dir, node + '.md')
171 | try:
172 | with open(user_stream_filename, 'a') as note:
173 | url = toot.url or toot.uri
174 | note.write(f"- [[{toot.created_at}]] @[[{username}]] (link):\n - {toot.content}\n")
175 | except:
176 | L.error("Couldn't log full post to note in user stream.")
177 | return
178 |
179 | def is_mentioned_in(self, username, node):
180 | # TODO: fix this.
181 | if not args.output_dir:
182 | return False
183 |
184 | if ('/' in node):
185 | # for now, dump only to the last path fragment -- this yields the right behaviour in e.g. [[go/cat-tournament]]
186 | node = os.path.split(node)[-1]
187 |
188 | agora_stream_dir = common.mkdir(os.path.join(args.output_dir, self.bot_username))
189 | filename = os.path.join(agora_stream_dir, node + '.md')
190 | L.info(f"Checking if {username} is mentioned in {node} meaning {filename}.")
191 |
192 | try:
193 | with open(filename, 'r') as note:
194 | if f'[[{username}]]' in note.read():
195 | L.info(f"User {username} is mentioned in {node}.")
196 | return True
197 | else:
198 | L.info(f"User {username} not mentioned in {node}.")
199 | return False
200 | except FileNotFoundError:
201 | return False
202 |
203 | def wants_writes(self, user):
204 | # Allowlist to begin testing? :)
205 | WANTS_WRITES = ['@flancian@social.coop']
206 |
207 | if user in WANTS_WRITES:
208 | return True
209 | # Trying to infer opt in status from the Agora: does the node 'push' contain a mention of the user?
210 | if self.is_mentioned_in(user, 'push') and not self.is_mentioned_in(user, 'no push'):
211 | return True
212 | # Same for [[opt in]]
213 | if self.is_mentioned_in(user, 'opt in') and not self.is_mentioned_in(user, 'opt out'):
214 | return True
215 | return False
216 |
217 | def maybe_reply(self, status, msg, entities):
218 |
219 | if args.dry_run:
220 | L.info(f"-> not replying due to dry run, message would be: {msg}")
221 | return False
222 |
223 | # we use the log as a database :)
224 | if self.log_toot(status, entities):
225 | self.send_toot(msg, status.id)
226 | # maybe write the full message to disk if the user seems to have opted in.
227 | # one user -> one directory, as that allows us to easily transfer history to users.
228 | # [[digital self determination]]
229 | self.write_toot(status, entities)
230 | else:
231 | L.info("-> not replying due to failed or redundant logging, skipping to avoid duplicates.")
232 |
233 | def get_followers(self):
234 | # First batching method, we will probably need more of these :)
235 | batch = self.mastodon.account_followers(self.mastodon.me().id, limit=80)
236 | followers = []
237 | while batch:
238 | followers += batch
239 | batch = self.mastodon.fetch_next(batch)
240 | return followers
241 |
242 | def get_statuses(self, user):
243 | # Added on [[2025-03-23]] to work around weird Mastodon bug with sorting, and it seems generally useful so...
244 | batch = mastodon.account_statuses(user['id'], limit=40)
245 | posts = []
246 | while batch:
247 | posts += batch
248 | batch = self.mastodon.fetch_next(batch)
249 | return posts
250 |
251 | def is_following(self, user):
252 | following_accounts = [f['acct'] for f in self.get_followers()]
253 | if user not in following_accounts:
254 | L.info(f"account {user} not in followers: {following_accounts}.")
255 | return False
256 | return True
257 |
258 | def handle_wikilink(self, status, match=None):
259 | L.info(f'handling at least one wikilink: {status.content}, {match}')
260 |
261 | if status['reblog']:
262 | L.info(f'Not handling boost.')
263 | return True
264 |
265 | # We want to only reply to accounts that follow us.
266 | user = status['account']['acct']
267 | if not self.is_following(user):
268 | return True
269 |
270 | wikilinks = WIKILINK_RE.findall(status.content)
271 | entities = uniq(wikilinks)
272 | msg = self.build_reply(status, entities)
273 | self.maybe_reply(status, msg, entities)
274 |
275 | def handle_hashtag(self, status, match=None):
276 | L.info(f'handling at least one hashtag: {status.content}, {match}')
277 | user = status['account']['acct']
278 |
279 | # Update (2023-07-19): We want to only reply hashtag posts to accounts that opted in.
280 | if not self.is_mentioned_in(user, 'opt in'):
281 | return True
282 |
283 | # We want to only reply to accounts that follow us.
284 | user = status['account']['acct']
285 | if not self.is_following(user):
286 | return True
287 |
288 | # These users have opted out of hashtag handling.
289 | if 'bmann' in user or self.is_mentioned_in(user, 'opt out'):
290 | L.info(f'Opting out user {user} from hashtag handling.')
291 | return True
292 | if status['reblog'] and not self.is_mentioned_in(user, 'opt in'):
293 | L.info(f'Not handling boost from non-opted-in user.')
294 | return True
295 | hashtags = HASHTAG_RE.findall(status.content)
296 | entities = uniq(hashtags)
297 | msg = self.build_reply(status, entities)
298 | self.maybe_reply(status, msg, entities)
299 |
300 | def handle_push(self, status, match=None):
301 | L.info(f'seen push: {status}, {match}')
302 | # This has a bug as of [[2022-08-13]], likely having to do with us not logging pushes to disk as with other triggers.
303 | return False
304 | if args.dry_run:
305 | L.info("-> not replying due to dry run")
306 | return False
307 | self.send_toot('If you ask an Agora to push and you are a friend, the Agora will try to push with you.', status.id)
308 | self.boost_toot(status.id)
309 |
310 | def handle_mention(self, status):
311 | """Handle toots mentioning the [[agora bot]], which may contain commands"""
312 | L.info('Got a mention!')
313 | # Process commands, in order of priority
314 | cmds = [(PUSH_RE, self.handle_push),
315 | (WIKILINK_RE, self.handle_wikilink),
316 | (HASHTAG_RE, self.handle_hashtag)]
317 | for regexp, handler in cmds:
318 | match = regexp.search(status.content)
319 | if match:
320 | handler(status, match)
321 |
322 | def handle_update(self, status):
323 | """Handle toots with [[patterns]] by people that follow us."""
324 | # Process commands, in order of priority
325 | cmds = [(PUSH_RE, self.handle_push),
326 | (WIKILINK_RE, self.handle_wikilink),
327 | (HASHTAG_RE, self.handle_hashtag)]
328 | for regexp, handler in cmds:
329 | match = regexp.search(status.content)
330 | if match:
331 | L.info(f'Got a status with a pattern! {status.url}')
332 | handler(status, match)
333 |
334 | def handle_follow(self, notification):
335 | """Try to handle live follows of [[agora bot]]."""
336 | L.info('Got a follow!')
337 | mastodon.account_follow(notification.account)
338 |
339 | def handle_unfollow(self, notification):
340 | """Try to handle live unfollows of [[agora bot]]."""
341 | L.info('Got an unfollow!')
342 | mastodon.account_follow(notification.account)
343 |
344 | def on_notification(self, notification):
345 | # we get this for explicit mentions.
346 | self.last_read_notification = notification.id
347 | if notification.type == 'mention':
348 | self.handle_mention(notification.status)
349 | elif notification.type == 'follow':
350 | self.handle_follow(notification.status)
351 | elif notification.type == 'unfollow':
352 | self.handle_unfollow(notification.status)
353 | else:
354 | L.info(f'received unhandled notification type: {notification.type}')
355 |
356 | def on_update(self, status):
357 | # we get this on all activity on our watching list.
358 | self.handle_update(status)
359 |
360 | def get_watching(mastodon):
361 | now = datetime.now()
362 | watching = mastodon.list_create(f'{now}')
363 | return watching
364 |
365 | def main():
366 | try:
367 | config = yaml.safe_load(args.config)
368 | except yaml.YAMLError as e:
369 | L.error(e)
370 |
371 | # Set up Mastodon API.
372 | mastodon = Mastodon(
373 | version_check_mode="none",
374 | access_token = config['access_token'],
375 | api_base_url = config['api_base_url'],
376 | )
377 |
378 | bot_username = f"{config['user']}@{config['instance']}"
379 |
380 | bot = AgoraBot(mastodon, bot_username)
381 | followers = bot.get_followers()
382 | # Now unused?
383 | watching = get_watching(mastodon)
384 |
385 | # try to clean up one old list to account for the one we'll create next.
386 | lists = mastodon.lists()
387 | try:
388 | for l in lists[:-5]:
389 | L.info(f"trying to clean up an old list: {l}, {l['id']}.")
390 | mastodon.list_delete(l['id'])
391 | L.info(f"clean up succeeded.")
392 | except:
393 | L.info("couldn't clean up list.")
394 |
395 | try:
396 | mastodon.list_accounts_add(watching, followers)
397 | except MastodonAPIError as e:
398 | print("error when trying to add accounts to watching")
399 | print(f"watching: {watching}")
400 | print(e)
401 |
402 | # why do we have both? hmm.
403 | # TODO(flancian): look in commit history or try disabling one.
404 | # it would be nice to get rid of lists if we can.
405 |
406 | # as of 2025-01 and with the move to GoToSocial (social.agor.ai), streaming seems broken.
407 | # suspecting GTS, for now we go back to polling/catching up.
408 | # given that we know own the instance, I am fine bumping throttling limits and just going with this for now.
409 |
410 | # # TODO: re-add?
411 | # L.info('trying to stream user.')
412 | # mastodon.stream_user(bot, run_async=True, reconnect_async=True)
413 | # mastodon.stream_user(bot)
414 | # L.info('now streaming.')
415 |
416 | # We used to do lists -- maybe worth trying again with GTS?
417 | # L.info('trying to stream list.')
418 | # mastodon.stream_list(id=watching.id, listener=bot, run_async=True, reconnect_async=True)
419 | while True:
420 | L.info('[[agora mastodon bot]] is alive, trying to catch up with new friends and lost posts.')
421 |
422 | # YOLO -- working around a potential bug after the move to GoToSocial :)
423 | followers = bot.get_followers()
424 | for user in followers:
425 | L.info(f'Trying to follow back {user.acct}')
426 | try:
427 | mastodon.account_follow(user.id)
428 | except MastodonAPIError:
429 | pass
430 |
431 | if args.catch_up:
432 | L.info(f"trying to catch up with any missed toots for user {user.acct}.")
433 | # the mastodon API... sigh.
434 | # mastodon.timeline() maxes out at 40 toots, no matter what limit we set.
435 | # (this might be a limitation of botsin.space?)
436 | # mastodon.list_timeline() looked promising but always comes back empty with no reason.
437 | # so we need to iterate per-user in the end. should be OK.
438 | L.info(f'fetching latest toots by user {user.acct}')
439 | # as of [[2025-03-23]], I'm here trying to figure out why suddenly the Agora bot is not seeing new toots.
440 | # maybe the ordering of toots is implementation-dependent and we're supposed to iterate/sort client side?
441 | # looking into Mastodon.py, there's not much to this method beyond a wrapper that calls __api_request to //statuses...
442 | for status in bot.get_statuses(user):
443 | # this should handle deduping, so it's safe to always try to reply.
444 | bot.handle_update(status)
445 |
446 | L.info('Sleeping...')
447 | time.sleep(30)
448 |
449 | if __name__ == "__main__":
450 | main()
451 |
--------------------------------------------------------------------------------
/bots/mastodon/agora-bot.yaml.example:
--------------------------------------------------------------------------------
1 | ---
2 | # - [[flancian]] says:
3 | # - this file is meant to be consumed by an [[agora bot]] kept in https://github.com/flancian/agora-bridge.
4 | # - for a specification of this format, please consult https://anagora.org/agora-bot.
5 | # - for instructions to set up a fully working [[agora]] using this and other repos, please consult to https://anagora.org/agora-setup.
6 |
7 | # [[botsin space]] is a good default for bots in the fediverse.
8 | # the bot user in your chosen instance. for @agora@botsin.space, 'agora'.
9 | user: agora
10 | # for @agora@botsin.space, 'botsin.space'.
11 | instance: botsin.space
12 | # a bit redundant but gives protocol port flexibility so why not.
13 | api_base_url: https://botsin.space/
14 |
15 | # keep this secret.
16 | # can generate one in https://botsin.space/settings/applications, create an application as needed.
17 | access_token: keep-this-secret
18 |
--------------------------------------------------------------------------------
/bots/mastodon/common.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # Copyright 2022 Google LLC
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | #
16 | # Exploring some code convergence between bots.
17 | #
18 | # This is unusable currently because bots can't import "up" in the tree due to how Python works, and the complexity needed to "fix" that wouldn't be a good investment, more like a costly hack. Better to refactor further and merge this Python codebase currently under 'bots' (and associated virtual environments) up to the root?
19 |
20 | import argparse
21 | import os
22 | import subprocess
23 |
24 | # https://stackoverflow.com/questions/11415570/directory-path-types-with-argparse
25 | class readable_dir(argparse.Action):
26 | def __call__(self, parser, namespace, values, option_string=None):
27 | prospective_dir=values
28 | if not os.path.isdir(prospective_dir):
29 | raise argparse.ArgumentTypeError("readable_dir:{0} is not a valid path".format(prospective_dir))
30 | if os.access(prospective_dir, os.R_OK):
31 | setattr(namespace,self.dest,prospective_dir)
32 | else:
33 | raise argparse.ArgumentTypeError("readable_dir:{0} is not a readable dir".format(prospective_dir))
34 |
35 | def mkdir(string):
36 | if not os.path.isdir(string):
37 | print(f"Trying to create {string}.")
38 | output = subprocess.run(['mkdir', '-p', string], capture_output=True)
39 | if output.stderr:
40 | L.error(output.stderr)
41 | return os.path.abspath(string)
42 |
--------------------------------------------------------------------------------
/bots/mastodon/dry-run.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | ./run.sh --dry-run $@
3 |
--------------------------------------------------------------------------------
/bots/mastodon/questions.md:
--------------------------------------------------------------------------------
1 | - Why does mastodon.account_statuses(user['id'], limit=40) seem to return only old messages suddenly?
2 | - Maybe this is related to the migration to social.agor.ai? But if so, how did it ever work in testing?
3 | - I need to check if there are parameters to specify I want to retrieve the latest toots first?
4 | - Or just iterate to do "proper catchup"... hmm.
5 | - Reading Mastodon.py, the block list on `__api_request` looks interesting...
6 | - Apparently I want to use min_id pagination, and not since_id (?).
7 | - -> Trying the "while batch" approach like I did previously with get_followers.
8 | - Question for next time I try to do a Python code change offline: what's the best tool to make sure that syntax is fine in my script without actually running it (when I need connectivity for a run)? pylint or something in that space?
9 |
--------------------------------------------------------------------------------
/bots/mastodon/requirements.txt:
--------------------------------------------------------------------------------
1 | blurhash==1.1.4
2 | certifi==2021.5.30
3 | chardet==4.0.0
4 | decorator==5.0.9
5 | idna==2.10
6 | Mastodon.py==1.5.1
7 | python-dateutil==2.8.1
8 | python-magic==0.4.24
9 | pytz==2021.1
10 | PyYAML==5.4.1
11 | requests==2.25.1
12 | six==1.16.0
13 | urllib3==1.26.6
14 |
--------------------------------------------------------------------------------
/bots/mastodon/run-dev.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Trying to move to [[poetry]] across the board.
3 | # . venv/bin/activate
4 | OUTPUT=/home/flancian/agora/stream/
5 | mkdir ${OUTPUT}
6 | poetry run ./agora-bot.py --config agora-bot.yaml --catch-up --dry-run --output=${OUTPUT} $@
7 |
--------------------------------------------------------------------------------
/bots/mastodon/run-prod.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Will migrate to poetry.
3 | # . venv/bin/activate
4 | OUTPUT=/home/agora/agora/stream/
5 | mkdir ${OUTPUT}
6 | # This shouldn't be needed but it is when running something based on Poetry as a systemd service for some reason.
7 | export PATH=$HOME/.local/bin:${PATH}
8 | poetry run ./agora-bot.py --config agora-bot.yaml --output=${OUTPUT} --catch-up $@
9 |
--------------------------------------------------------------------------------
/bots/mastodon/setup.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | (command -v python3 && command -v pip3) || (echo 'Please install Python3 and pip using your OS packaging system. In Debian: sudo apt-get install python3 python3-venv python3-pip' && exit 42)
4 |
5 | python3 -m venv venv &&
6 | . venv/bin/activate &&
7 | pip3 install -r requirements.txt
8 |
9 | echo "see agora-bridge.service and https://anagora.org/systemd for pointers on how to set up a production agora as a system service."
10 |
--------------------------------------------------------------------------------
/bots/matrix/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [yyyy] [name of copyright owner]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
203 | --
204 |
205 | Code in this directory was originally based on https://github.com/TomCasavant/RedditMaubot, the license for that follows.
206 |
207 | MIT License
208 |
209 | Copyright (c) 2019 Tom Casavant
210 |
211 | Permission is hereby granted, free of charge, to any person obtaining a copy
212 | of this software and associated documentation files (the "Software"), to deal
213 | in the Software without restriction, including without limitation the rights
214 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
215 | copies of the Software, and to permit persons to whom the Software is
216 | furnished to do so, subject to the following conditions:
217 |
218 | The above copyright notice and this permission notice shall be included in all
219 | copies or substantial portions of the Software.
220 |
221 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
222 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
223 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
224 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
225 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
226 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
227 | SOFTWARE.
228 |
229 |
--------------------------------------------------------------------------------
/bots/matrix/README.md:
--------------------------------------------------------------------------------
1 | # [[agora bot]] for Matrix
2 | A simple [maubot](https://github.com/maubot/maubot) that responds to [[wikilinks]] resolved to an [[agora]] (or [[commons]]).
3 |
4 | ## Usage
5 | '[[foo]]' responds with https://anagora.org/foo by default; override Agora if you want to resolve elsewhere.
6 |
7 | ## Deploying
8 |
9 | I use `mbc build` followed by `mbc upload`.
10 |
11 | `mbc auth` will be needed the first time.
12 |
--------------------------------------------------------------------------------
/bots/matrix/agora.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # Copyright 2022 Google LLC
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | #
16 | # [[flancian]]: I originally derived this from https://github.com/TomCasavant/RedditMaubot, but little code from that remains.
17 | # This directory includes a MIT license (see LICENSE) because that is the original license for the above repo.
18 |
19 | from random import choice
20 | from typing import List, Tuple
21 | import urllib.parse
22 | from maubot import Plugin, MessageEvent
23 | from mautrix.types import RelationType, TextMessageEventContent, RelatesTo, MessageType
24 | from mautrix import errors
25 | from maubot.handlers import command
26 | import datetime
27 | import os
28 | import re
29 |
30 | AGORA_BOT_ID="anagora@matrix.org"
31 | AGORA_URL=f"https://anagora.org"
32 | MATRIX_URL=f"https://develop.element.io"
33 | AGORA_ROOT=os.path.expanduser("~/agora")
34 | OUTPUT_DIR=f"{AGORA_ROOT}/stream/{AGORA_BOT_ID}"
35 | THREAD = RelationType("m.thread")
36 | # Probably should invest instead in not answering to *spurious* hashtags :)
37 | HASHTAG_OPT_OUT_ROOMS = [
38 | '!zPwMsygFdoMjtdrDfo:matrix.org', # moa party
39 | '!akkaZImONyQWKswVdt:matrix.org', # social coop tech chat
40 | '!aIpzDTRzEEUkMCcBay:matrix.org', # social coop open chat
41 | ]
42 |
43 | class AgoraPlugin(Plugin):
44 | @command.passive("\[\[(.+?)\]\]", multiple=True)
45 | async def wikilink_handler(self, evt: MessageEvent, subs: List[Tuple[str, str]]) -> None:
46 | await evt.mark_read()
47 | self.log.info(f"responding to event: {evt}")
48 | response = ""
49 | wikilinks = [] # List of all wikilinks given by user
50 | for _, link in subs:
51 | if 'href=' in link or re.match('\[.+?\]\(.+?\)', link):
52 | # this wikilink is already anchored (resolved), skip it.
53 | continue
54 | else:
55 | # urlencode otherwise
56 | link = "https://anagora.org/{}".format(urllib.parse.quote_plus(link))
57 |
58 | wikilinks.append(link)
59 |
60 | if wikilinks:
61 | self.log.info(f"*** found wikilinks in message.")
62 | response = f"\n".join(wikilinks)
63 | if self.inThread(evt):
64 | # already in a thread, can't start one :)
65 | self.log.info(f"*** already in thread, can't start another one.")
66 | await evt.reply(response, allow_html=True)
67 | else:
68 | self.log.info(f"*** trying to start a thread with response.")
69 | # start a thread with our reply.
70 | content = TextMessageEventContent(
71 | body=response,
72 | msgtype=MessageType.NOTICE,
73 | relates_to=RelatesTo(rel_type=THREAD, event_id=evt.event_id))
74 | try:
75 | await evt.respond(content, allow_html=True) # Reply to user
76 | except errors.request.MUnknown:
77 | # works around: "cannot start threads from an event with a relation"
78 | self.log.info(f"*** couldn't start a thread, falling back to regular response.")
79 | await evt.reply(response, allow_html=True)
80 | # try to save a link to the message in the Agora.
81 | for wikilink in wikilinks:
82 | self.log_evt(evt, wikilink)
83 |
84 | # this uses a non-capturing group, ?:, for not responding to e.g. anchors in URLs (I didn't mind but people really didn't like that.)
85 | @command.passive(r'(?:^|\s)#(\S+)', multiple=True)
86 | async def hashtag_handler(self, evt: MessageEvent, subs: List[Tuple[str, str]]) -> None:
87 | if evt.room_id in HASHTAG_OPT_OUT_ROOMS:
88 | self.log.info(f"not handling hashtag due to opted out room: {evt.room_id}")
89 | return
90 | await evt.mark_read()
91 | self.log.info(f"responding to event: {evt}")
92 | response = ""
93 | hashtags = [] # List of all hashtags given by user
94 | for _, link in subs:
95 | link = "https://anagora.org/{}".format(urllib.parse.quote_plus(link))
96 | hashtags.append(link)
97 |
98 | if hashtags:
99 | self.log.info(f"*** found hashtags in message.")
100 | response = f"\n".join(hashtags)
101 | if self.inThread(evt):
102 | # already in a thread, can't start one :)
103 | self.log.info(f"*** already in thread, can't start another one.")
104 | await evt.reply(response, allow_html=True)
105 | else:
106 | self.log.info(f"*** trying to start a thread with response.")
107 | # start a thread with our reply.
108 | content = TextMessageEventContent(
109 | body=response,
110 | msgtype=MessageType.NOTICE,
111 | relates_to=RelatesTo(rel_type=THREAD, event_id=evt.event_id))
112 | try:
113 | await evt.respond(content, allow_html=True) # Reply to user
114 | except errors.request.MUnknown:
115 | # works around: "cannot start threads from an event with a relation"
116 | self.log.info(f"*** couldn't start a thread, falling back to regular response.")
117 | await evt.reply(response, allow_html=True)
118 | # try to save a link to the message in the Agora.
119 | for hashtag in hashtags:
120 | self.log_evt(evt, hashtag)
121 |
122 |
123 |
124 | def inThread(self, evt):
125 | try:
126 | content = evt.content
127 | relates = content._relates_to
128 | if relates.rel_type==THREAD:
129 | self.log.info("*** event was already in thread")
130 | return True
131 | return False
132 | except:
133 | return False
134 |
135 | def log_evt(self, evt, node):
136 |
137 | # filesystems are move flexible than URLs, spaces are fine and preferred :)
138 | node = urllib.parse.unquote_plus(node)
139 |
140 | try:
141 | os.mkdir(OUTPUT_DIR)
142 | except FileExistsError:
143 | pass
144 |
145 | # unsure if it's OK inlining, perhaps fine in this case as each room does explicit setup?
146 | msg = evt.content.body
147 |
148 | # this was shamelessly copy/pasted and adapted from [[agora bridge]], mastodon bot.
149 | if ('/' in node):
150 | # for now, dump only to the last path fragment -- this yields the right behaviour in e.g. [[go/cat-tournament]]
151 | node = os.path.split(node)[-1]
152 |
153 | filename = os.path.join(OUTPUT_DIR, node + '.md')
154 | self.log.info(f"logging {evt} to file {filename} mapping to {node}.")
155 |
156 | # hack hack -- this should be enabled/disabled/configured in the maubot admin interface somehow?
157 | try:
158 | with open(filename, 'a') as note:
159 | username = evt.sender
160 | # /1000 needed to reduce 13 -> 10 digits
161 | dt = datetime.datetime.fromtimestamp(int(evt.timestamp/1000))
162 | link = f'[link]({MATRIX_URL}/#/room/{evt.room_id}/{evt.event_id})'
163 | # note.write(f"- [[{username}]] at {dt}: {link}\n - ```{msg}```")
164 | note.write(f"- [[{dt}]] [[{username}]] ({link}):\n - {msg}\n")
165 | except Exception as e:
166 | self.log.info(f"Couldn't save link to message, exception: {e}.")
167 |
168 |
169 |
170 |
--------------------------------------------------------------------------------
/bots/matrix/maubot.yaml:
--------------------------------------------------------------------------------
1 | # Originally derived from: https://github.com/TomCasavant/RedditMaubot
2 |
3 | # Target maubot version
4 | maubot: 0.1.0
5 |
6 | # The unique ID for the plugin. Java package naming style. (i.e. use your own domain, not xyz.maubot)
7 | id: org.anagora.agorabot
8 |
9 | # A PEP 440 compliant version string.
10 | version: 1.0.21
11 |
12 | # The SPDX license identifier for the plugin. https://spdx.org/licenses/
13 | # Optional, assumes all rights reserved if omitted.
14 | license: Apache-2.0
15 |
16 | # The list of modules to load from the plugin archive.
17 | # Modules can be directories with an __init__.py file or simply python files.
18 | # Submodules that are imported by modules listed here don't need to be listed separately.
19 | # However, top-level modules must always be listed even if they're imported by other modules.
20 | modules:
21 | - agora
22 |
23 | # The main class of the plugin. Format: module/Class
24 | # If `module` is omitted, will default to last module specified in the module list.
25 | # Even if `module` is not omitted here, it must be included in the modules list.
26 | # The main class must extend maubot.Plugin
27 | main_class: AgoraPlugin
28 |
29 | # Whether or not instances need a database
30 | database: false
31 |
--------------------------------------------------------------------------------
/bots/twitter/agora-bot.yaml.example:
--------------------------------------------------------------------------------
1 | ---
2 | # - [[flancian]] says:
3 | # - this file is meant to be consumed by an [[agora bot]] kept in https://github.com/flancian/agora-bridge.
4 | # - for a specification of this format, please consult https://anagora.org/agora-bot.
5 | # - for instructions to set up a fully working [[agora]] using this and other repos, please consult to https://anagora.org/agora-setup.
6 |
7 | # user: an_agora
8 | # can be resolved with https://commentpicker.com/twitter-id.php, probably also other better tools (and through an API call: https://api.twitter.com/2/users/by/username/:username, field 'id'.
9 | # fill in what's declared REQUIRED below.
10 | # some fields marked as REQUIRED might not be, still figuring this out.
11 | bot_user_id: REQUIRED
12 | # consumer keys ("api key and secret")
13 | consumer_key: REQUIRED
14 | consumer_secret: REQUIRED
15 | # bearer token
16 | bearer_token: optional
17 | # auth tokens ("access token and secret")
18 | access_token: REQUIRED
19 | access_token_secret: REQUIRED
20 | # optional: never react to tweets older than this
21 | since_id: 1
22 |
--------------------------------------------------------------------------------
/bots/twitter/friends.yaml:
--------------------------------------------------------------------------------
1 | - id: '25357843'
2 | name: Alan Laidlaw
3 | username: alanlaidlaw
4 | - id: '2586046706'
5 | name: The Meta-Author
6 | username: metaauthor
7 | - id: '28697346'
8 | name: Sander Dijkhuis
9 | username: s89
10 | - id: '10139842'
11 | name: lauriewayne
12 | username: lauriewayne
13 | - id: '1223180328530956290'
14 | name: Namit Oberoy
15 | username: indianlegaltech
16 | - id: '8388432'
17 | name: ESTEVAN CARLOS BENSON
18 | username: estevancarlos
19 | - id: '23216407'
20 | name: Doug Holton
21 | username: edtechdev
22 | - id: '1510481966713479168'
23 | name: Mike
24 | username: Mike272701541
25 | - id: '1523104660726370304'
26 | name: israel mendoza
27 | username: israelm30301012
28 | - id: '1282434038477787142'
29 | name: Pete LoVerso
30 | username: PeteLoVerso
31 | - id: '1509234669597900805'
32 | name: "Olena Rusina \U0001F1FA\U0001F1E6"
33 | username: OlenaRusina
34 | - id: '14318815'
35 | name: romeo aspe
36 | username: curlytoptrader
37 | - id: '9749882'
38 | name: Piero Rivizzigno
39 | username: PieroRivizzigno
40 | - id: '804785766324912128'
41 | name: Ricardo Huaranga
42 | username: ricardohuaranga
43 | - id: '1501572449271312386'
44 | name: Europe Vibe
45 | username: VibeEurope
46 | - id: '861687042853527553'
47 | name: Dr Mikael Morbachev
48 | username: jimbingo2
49 | - id: '1287851617509953537'
50 | name: "dragondelis \U0001F1FA\U0001F1E6"
51 | username: dragondelis
52 | - id: '3893571'
53 | name: Dr. Arq. Villegas-Ballesta
54 | username: arquitextonica
55 | - id: '22838993'
56 | name: Aron Shelton
57 | username: Skelt
58 | - id: '1539830361475137536'
59 | name: Sef
60 | username: Saefokk
61 | - id: '40989620'
62 | name: "Adam Zeiner (he/him) \U0001F331"
63 | username: _dezein
64 | - id: '1523699455135997960'
65 | name: buba
66 | username: boba751
67 | - id: '1027900844249772032'
68 | name: Dan Allosso
69 | username: AllossoDan
70 | - id: '282116067'
71 | name: Mesh
72 | username: meshachthomas
73 | - id: '4633808432'
74 | name: "\u25B2nders on a quest \U0001F738 to see myself \U0001FA9E"
75 | username: anderskitson
76 | - id: '50546986'
77 | name: "Brian Sunter \U0001F9E0"
78 | username: Bsunter
79 | - id: '1418740612971393026'
80 | name: quantum hypescam zero
81 | username: hypescam
82 | - id: '180354861'
83 | name: Mark Tappin
84 | username: marktappin
85 | - id: '119604402'
86 | name: "Tom\xE1s Guti\xE9rrez"
87 | username: tomiguti
88 | - id: '824514567640379395'
89 | name: Frank was here.
90 | username: paraguatejpeg
91 | - id: '19248923'
92 | name: Benjamin Schneider
93 | username: bschne
94 | - id: '1534652308079910912'
95 | name: Daniel Van Zant
96 | username: Daniel_Van_Zant
97 | - id: '942394886673727488'
98 | name: jeremyd_neuro
99 | username: science_is_hard
100 | - id: '1545694403913781249'
101 | name: Stephan Ullmann
102 | username: Ullmann25
103 | - id: '1406653688026959879'
104 | name: Maximilian Osterhagen
105 | username: Sozialalphabet
106 | - id: '1431824198448533507'
107 | name: soool
108 | username: erlcost2
109 | - id: '1430287662511017984'
110 | name: "d\u2019Arma"
111 | username: Arma1911
112 | - id: '2826991440'
113 | name: Pedro Parrachia ( :D , :D )
114 | username: parrachia
115 | - id: '1536992981533835265'
116 | name: raghav Srih
117 | username: RAGHAV_SAASANA
118 | - id: '1292115644884844544'
119 | name: david todd
120 | username: dthTMW
121 | - id: '3021781999'
122 | name: PropagandaTyler.usd
123 | username: DataKinds
124 | - id: '1326535637378371585'
125 | name: Zubayr Ali
126 | username: zoobhalu
127 | - id: '1422545870357155842'
128 | name: "\u2601\uFE0F\u2601\uFE0F\u2601\uFE0F\u2601\uFE0F\u2601\uFE0F"
129 | username: Queryologist
130 | - id: '4053791'
131 | name: Ali Engin
132 | username: aliko
133 | - id: '1534842418436132864'
134 | name: Andro.Meta
135 | username: AndroMeta9
136 | - id: '15278864'
137 | name: Shakir S. Hussain
138 | username: shakirshussain
139 | - id: '1422460926389858307'
140 | name: "Alex Qwxlea \U0001F50D"
141 | username: QwxleaA
142 | - id: '3271161444'
143 | name: Conrad Mearns
144 | username: conrad_mearns
145 | - id: '2342244918'
146 | name: Didier
147 | username: DidierMarin8
148 | - id: '1008235176068046854'
149 | name: Bhagya Silva
150 | username: bhagyax
151 | - id: '350565624'
152 | name: Rhys Cassidy
153 | username: rhyscass
154 | - id: '1449050223335165963'
155 | name: Null Flancian
156 | username: NFlancian
157 | - id: '1297757895090507778'
158 | name: "harley \U0001F4E1"
159 | username: harleyraygray
160 | - id: '4924866428'
161 | name: Tan Sing Kuang
162 | username: Tan_Sing_Kuang
163 | - id: '1471207635823251468'
164 | name: "PhoebeFactorial \U0001F525\U0001FA84\u2699\uFE0F5\uFE0F\u20E3"
165 | username: phoebefactorial
166 | - id: '1262316586850873344'
167 | name: Simulatte
168 | username: netstorm_
169 | - id: '1150430933000032256'
170 | name: boyar
171 | username: boyargh
172 | - id: '818340'
173 | name: Marshall Kirkpatrick
174 | username: marshallk
175 | - id: '4916559826'
176 | name: Bruno Melo
177 | username: brvn0jm
178 | - id: '88676762'
179 | name: Ian O'Byrne
180 | username: wiobyrne
181 | - id: '563452081'
182 | name: "n\u1D57\u02B0 instar \U000131A4"
183 | username: mockingw0rd
184 | - id: '167134266'
185 | name: Olaf Odlind
186 | username: o1avus
187 | - id: '1371539482646777856'
188 | name: Dimes Square vibe
189 | username: Dimes_Sq_Vibe
190 | - id: '19404730'
191 | name: Alex Wagner | societalcollapse.eth
192 | username: alexdw5
193 | - id: '1384111529910431757'
194 | name: "Efe \u015Eener \U0001F9D9\u200D\u2642\uFE0F"
195 | username: NocoderEfe
196 | - id: '1438111465991507969'
197 | name: Elite Digitalist
198 | username: EliteDigitalist
199 | - id: '1250284120141221889'
200 | name: k-so0906
201 | username: KSo0906
202 | - id: '79683764'
203 | name: Koush Solanki
204 | username: koushsolanki
205 | - id: '19810731'
206 | name: Maleph
207 | username: maleph
208 | - id: '1521586284740886536'
209 | name: Tim Wainwright
210 | username: TJFWainwright
211 | - id: '2220997760'
212 | name: Katerina
213 | username: katerinabohlec
214 | - id: '19033312'
215 | name: Rev. Sean
216 | username: seanneilbarron
217 | - id: '1065316525417160705'
218 | name: Ronen Tamari
219 | username: rtk254
220 | - id: '1466691661220782085'
221 | name: ''
222 | username: XHamsterka
223 | - id: '4789008675'
224 | name: Cawflands Durdar
225 | username: cawflands
226 | - id: '1461355741042733063'
227 | name: "12many \u03C4"
228 | username: 12manyI
229 | - id: '1035254982516396032'
230 | name: '@jorg@climatjustice.social #PeopleNotProfit'
231 | username: HJCJ_
232 | - id: '2163518497'
233 | name: Decentralize
234 | username: p2pdht
235 | - id: '1000425715014258688'
236 | name: Brandon
237 | username: brandonklotz
238 | - id: '1468517006'
239 | name: Margaret Warren
240 | username: ImageSnippets
241 | - id: '1242240461244227584'
242 | name: Andrew Haden
243 | username: i__am__aka
244 | - id: '1044196866353696770'
245 | name: "Laboratorio de Bioinformaci\xF3n"
246 | username: infovestigacion
247 | - id: '1157076911606177793'
248 | name: RUSIA FARLON
249 | username: FarlonRusia
250 | - id: '1146931970'
251 | name: paula sorensen
252 | username: SorensenPaula
253 | - id: '3173455931'
254 | name: Muhammet
255 | username: muhammetcalis01
256 | - id: '1236127448363864066'
257 | name: Conformal Person
258 | username: bjaguar_aL
259 | - id: '1117484839836495872'
260 | name: Drew Levanti
261 | username: anthrosophist
262 | - id: '837012161008369666'
263 | name: Nathan E. Rasmussen
264 | username: VolodymyrVelyky
265 | - id: '1519558939427409923'
266 | name: "ExquisiteFootwears\U0001F462"
267 | username: ExquisiteFootw2
268 | - id: '1159718660677734400'
269 | name: Maalisuo
270 | username: Maalisuo2
271 | - id: '1132684600575111168'
272 | name: Eshan dwikhi luhur ( TULI )
273 | username: eshan_dwikhi
274 | - id: '882587440724267008'
275 | name: Therapeutic Venezuela
276 | username: therapeuticvzla
277 | - id: '100367444'
278 | name: Neil Thawani
279 | username: lioninawhat
280 | - id: '1199386926920290304'
281 | name: "SDO \U0001F3F4\u200D\u2620\uFE0F\u24B6"
282 | username: seconddayout
283 | - id: '1031636988871499776'
284 | name: Tijan
285 | username: Tijani48791056
286 | - id: '605651172'
287 | name: "ntotao.eth \u26A1\U0001F987\U0001F50A"
288 | username: ntotao
289 | - id: '17715155'
290 | name: "christopher is \u27B0 the \u2638\uFE0F"
291 | username: becomingbabyman
292 | - id: '1057239305721016321'
293 | name: Graphy Search
294 | username: graphysearch
295 | - id: '632191261'
296 | name: "Zora \U0001F3F3\uFE0F\u200D\u26A7\uFE0F"
297 | username: zora_ilex
298 | - id: '1420671912468914185'
299 | name: ydivho12123
300 | username: ydivho12123
301 | - id: '23430631'
302 | name: Mariana Lanari
303 | username: Mariskine
304 | - id: '1124168875971465217'
305 | name: Keenan Payne
306 | username: KeenanPayne_
307 | - id: '1510956850606395398'
308 | name: "Becky \U0001F4AF"
309 | username: BeckyLinkz
310 | - id: '2550607902'
311 | name: Habib Mahdi
312 | username: the_true_habib
313 | - id: '3282807283'
314 | name: "Sashin \U0001F58A\uFE0F\U0001F331\U0001F30C"
315 | username: sashintweets
316 | - id: '1403357100323991557'
317 | name: Tree
318 | username: neo13003494
319 | - id: '709142717209534464'
320 | name: "Ali Ammar \U0001F1F1\U0001F1E7"
321 | username: _ali_ammar
322 | - id: '305320812'
323 | name: Kumar Biswas
324 | username: kumarnw
325 | - id: '1090961309103996928'
326 | name: One
327 | username: real_ez_cheeze
328 | - id: '420744611'
329 | name: Christina Fedor
330 | username: christinasatory
331 | - id: '17859953'
332 | name: "\u02D7\u02CF\u02CB Mark Foster \u02CE\u02CA\u02D7"
333 | username: mfosterio
334 | - id: '2868667054'
335 | name: Your Fridge Therapist
336 | username: teleoflexuous
337 | - id: '1380964363008360457'
338 | name: duo
339 | username: duonetic
340 | - id: '1380524520273031172'
341 | name: Crafty Savage
342 | username: cr4ftysavage
343 | - id: '2715527232'
344 | name: "b \u3013\u3013 \U0001F6A9\U0001F1F5\U0001F1F8"
345 | username: gdmorningcpn
346 | - id: '1506423748399931392'
347 | name: sophia
348 | username: sophiaaaxu
349 | - id: '1495751312411172869'
350 | name: allinone.tools
351 | username: AllinoneTools
352 | - id: '264344694'
353 | name: "Alexander Rink \U0001F1FA\U0001F1E6"
354 | username: rcvd_io
355 | - id: '1463940972014931972'
356 | name: MichaelPT
357 | username: 1uniqueid
358 | - id: '1495977773525000194'
359 | name: Musicguy410
360 | username: musicguy410
361 | - id: '806620919229620224'
362 | name: defeasible inference
363 | username: hautetakes
364 | - id: '1313018174594408450'
365 | name: Weavit App - Your digital brain
366 | username: WeavitApp
367 | - id: '7920922'
368 | name: Daniel
369 | username: DanielMorgan
370 | - id: '14314716'
371 | name: "cyberlabe \U0001F989"
372 | username: cyberlabe
373 | - id: '1153291299765702662'
374 | name: Urvin Soneta
375 | username: SonetaUrvin
376 | - id: '14613296'
377 | name: Nilesh
378 | username: nileshtrivedi
379 | - id: '1105917406592024576'
380 | name: Cara M. Antonaccio, PhD, MSPH
381 | username: caraantonaccio
382 | - id: '954320467158069249'
383 | name: ISWS
384 | username: isws_semweb
385 | - id: '4556700449'
386 | name: "Calvin \U0001F56F"
387 | username: ProxyUniversal
388 | - id: '9517882'
389 | name: Mathew
390 | username: mathewlowry
391 | - id: '26750085'
392 | name: Ivo Velitchkov
393 | username: kvistgaard
394 | - id: '393745964'
395 | name: John Bezark
396 | username: JohnBezark
397 | - id: '1374221160708194305'
398 | name: G. Heffley
399 | username: taxabellum
400 | - id: '1501606412085972993'
401 | name: Vibe Bali
402 | username: Vibe__Bali
403 | - id: '2316456072'
404 | name: Amber Lynn
405 | username: SassWitAss95
406 | - id: '1557401876'
407 | name: DreamKoala
408 | username: DreamKoala22
409 | - id: '2900657776'
410 | name: VP
411 | username: vp_coder1911
412 | - id: '88902573'
413 | name: .
414 | username: rdh_one
415 | - id: '84108585'
416 | name: Jack Park
417 | username: gardenfelder
418 | - id: '60903818'
419 | name: ++ hekovnik
420 | username: hekovnik
421 | - id: '15258949'
422 | name: interlinear annotation separator
423 | username: tilgovi
424 | - id: '5900182'
425 | name: R.
426 | username: marcusfaith
427 | - id: '1251119660503674880'
428 | name: "\U0001D53D_un"
429 | username: FF_un1
430 | - id: '13071242'
431 | name: Michael J.J. Tiffany
432 | username: kubla
433 | - id: '992427392802197504'
434 | name: "Leobardo \xD3scar Alc\xE1ntara Oca\xF1a"
435 | username: joshua_w_d
436 | - id: '825280087054393344'
437 | name: YimingWu is not doing anything
438 | username: ChengduLittleA
439 | - id: '1077142641324818432'
440 | name: Vince KreaCity
441 | username: kreacity
442 | - id: '851675554059309056'
443 | name: The Crypto Warrior
444 | username: Warrior_Bitcoin
445 | - id: '1338697218'
446 | name: katelm
447 | username: infokatelm
448 | - id: '1077930944940642304'
449 | name: shaudy
450 | username: WhoIsShaudy
451 | - id: '6280752'
452 | name: Bug SLe
453 | username: Bugsle
454 | - id: '2880766798'
455 | name: Tyrue Nguyen
456 | username: tyruenguyen
457 | - id: '1327819678614507520'
458 | name: Jaider Becerra
459 | username: JaiderBec
460 | - id: '6035812'
461 | name: "\U0001202D\U00012097\U000121A0 Psychotronic Tsundoku Infohazard"
462 | username: enkiv2
463 | - id: '1387193628724977664'
464 | name: sauna_thoughts - just asking questions
465 | username: ceo_sauna
466 | - id: '3012641675'
467 | name: abrahamu kilawe
468 | username: abra468
469 | - id: '965595068332695552'
470 | name: Abdoulie Nyass
471 | username: nyassabu
472 | - id: '11331452'
473 | name: Andrew Altshuler
474 | username: 1eo
475 | - id: '179529321'
476 | name: Sebastian Gorton
477 | username: aaadotpm
478 | - id: '1127709804'
479 | name: Arianne_Trchy
480 | username: ArianneCollado
481 | - id: '4471665614'
482 | name: DotDev
483 | username: _dotdev
484 | - id: '4923106354'
485 | name: Raghav Agrawal
486 | username: impactology
487 | - id: '1193323919379439618'
488 | name: "\U0001D589\U0001D586\U0001D597\U0001D591\U0001D58E\U0001D592"
489 | username: henriquedarlim
490 | - id: '1428135838525714432'
491 | name: vibecamp- phase 2 of chaos mode!
492 | username: vibecamp_
493 | - id: '81560978'
494 | name: Lloyd Woodham
495 | username: woodhamlloyd
496 | - id: '1416164094931963906'
497 | name: Chimalo
498 | username: Chimaloster
499 | - id: '2670711660'
500 | name: "c\xE1ssius - c4ss1us.l1f3"
501 | username: c4ss1usl1f3
502 | - id: '1513607755'
503 | name: sipsip
504 | username: geekodour
505 | - id: '18089164'
506 | name: "leo.hmn \U0001F987\U0001F50A ( :D, :D )"
507 | username: leonardkish
508 | - id: '1117396504178765826'
509 | name: Oleg Martynov
510 | username: yfjolne
511 | - id: '1139052052422770688'
512 | name: caravinalia
513 | username: caravinalia
514 | - id: '17006530'
515 | name: JustGage
516 | username: JustGage
517 | - id: '1122104264854319104'
518 | name: Ryan Faulhaber
519 | username: ryan_faulhaber
520 | - id: '1272630683077410816'
521 | name: outerheaven213
522 | username: outerheaven213
523 | - id: '2742035528'
524 | name: AhaFisher
525 | username: ahafisher
526 | - id: '213444548'
527 | name: Finn Brown
528 | username: finnatsea
529 | - id: '792217726353707008'
530 | name: "freak\u26A1\uFE0Feinstein"
531 | username: freakeinstein3
532 | - id: '128931327'
533 | name: Viktor
534 | username: wmobilas
535 | - id: '264843368'
536 | name: Nikola
537 | username: Atlantic777
538 | - id: '922355541585813504'
539 | name: brak
540 | username: lenguagenesis
541 | - id: '1403388818527604737'
542 | name: '...'
543 | username: _illidari
544 | - id: '2790337036'
545 | name: Connor St. George
546 | username: CWStGeorge
547 | - id: '629682266'
548 | name: Bobby Alter
549 | username: BobbyAlter
550 | - id: '5388852'
551 | name: (((Howard Rheingold)))
552 | username: hrheingold
553 | - id: '7005092'
554 | name: Alan Morrison
555 | username: AlanMorrison
556 | - id: '881458279335026688'
557 | name: bhavit sharma
558 | username: avaitopiper
559 | - id: '807742267415007232'
560 | name: andre
561 | username: andsnc
562 | - id: '1573778000'
563 | name: Nesrine Aouinti
564 | username: NesrineAouinti
565 | - id: '1229909590130724864'
566 | name: ink asymptotes
567 | username: inkasymptotes
568 | - id: '244942474'
569 | name: Stefan van Lier
570 | username: svlier
571 | - id: '234539951'
572 | name: "Habib M. Sayah \U000100CF"
573 | username: rhizomyx
574 | - id: '9971872'
575 | name: Sean McBride
576 | username: seanmcbride
577 | - id: '767784642799894529'
578 | name: "Carnun is \U0001F3D7\uFE0F UNDER CONSTRUCTION \U0001F468\u200D\U0001F4BB"
579 | username: CarnunMP
580 | - id: '1001965286348607488'
581 | name: brendan
582 | username: brendmurn
583 | - id: '2852104100'
584 | name: Velcro
585 | username: dopenessondeck8
586 | - id: '1069652371389759489'
587 | name: '0xb2f2d1'
588 | username: '0xb2f2d1'
589 | - id: '1372879423'
590 | name: "Tweets Space Rat \U0001F48E"
591 | username: TetraspaceWest
592 | - id: '8253712'
593 | name: Jeff Miller
594 | username: jmeowmeow
595 | - id: '1467148159734452232'
596 | name: Alex Rigoni
597 | username: AlexRigoni16
598 | - id: '2809311468'
599 | name: duntsHat
600 | username: duntsHat
601 | - id: '1290725204860297218'
602 | name: "Nanda \u2764\uFE0F\u200D\U0001F525 Raising the Star Banner"
603 | username: starbannergames
604 | - id: '353076798'
605 | name: vinay
606 | username: vinaychandranp
607 | - id: '1017457351455100928'
608 | name: Orion Lehoczky Escobar
609 | username: lehoczkyescobar
610 | - id: '1355475241087033349'
611 | name: Quinn Xing
612 | username: QuinnXing
613 | - id: '124651865'
614 | name: Nguyen Doan Quyet
615 | username: doanquyetnguyen
616 | - id: '1404259780563292161'
617 | name: "\u300E samuelp x \u300F"
618 | username: samuelpx
619 | - id: '258986058'
620 | name: Wesley Finck
621 | username: wesleyfinck
622 | - id: '15827403'
623 | name: Shem Freeze
624 | username: shemfreeze
625 | - id: '715932023198769152'
626 | name: Steffen Schuldenzucker
627 | username: sschuldenzucker
628 | - id: '1009131791331069954'
629 | name: Emma
630 | username: xxejp
631 | - id: '819188578064470019'
632 | name: Joe Tay
633 | username: Joe90Tay
634 | - id: '1245914339892133888'
635 | name: Anthony Goldstein, Ravenclaw, Jewish wizard.
636 | username: nEquals001
637 | - id: '1195081758108200961'
638 | name: Mr. Victoire Mwiseneza
639 | username: victoire_dr
640 | - id: '349878286'
641 | name: "\u13E9\u03B1\u03BDi\u03B7 \u13C0a\u043C\u0432o\u03B1 \U0001F331\u2601\uFE0F\
642 | \u2728"
643 | username: gavcloud
644 | - id: '28126022'
645 | name: Angus McMorland
646 | username: amcmorl
647 | - id: '892306984610824192'
648 | name: "\U0001D693\U0001D698\U0001D697\U0001D697\U0001D6A2\uFE4F\U0001D69C\U0001D68A\
649 | \U0001D69E\U0001D697\U0001D68D\U0001D68E\U0001D69B\U0001D69C"
650 | username: json_dirs
651 | - id: '1398276033619496964'
652 | name: Satellite Seattle Tech News
653 | username: Satellitenw
654 | - id: '727606088623869952'
655 | name: Jess
656 | username: uterobain
657 | - id: '885564053732589569'
658 | name: Thomas Tarabbia
659 | username: ThomasTarabbia
660 | - id: '346292006'
661 | name: a3260547654
662 | username: a802785478096
663 | - id: '4783266375'
664 | name: "o\u030A"
665 | username: j0lms
666 | - id: '218643712'
667 | name: mentalconflux
668 | username: mentalconflux
669 | - id: '1093767247'
670 | name: Alex Garcia
671 | username: alex_here_now
672 | - id: '3140101'
673 | name: counting sheep (1/100)
674 | username: mmuehlenstein
675 | - id: '1248799343525560320'
676 | name: "\uA9C1\u03A6\uA9C2"
677 | username: SIMPOTUS
678 | - id: '1491673968'
679 | name: Richard Williams
680 | username: Rwill98
681 | - id: '49044207'
682 | name: Malcolm Ocean is home in NS (45/100 vids)
683 | username: Malcolm_Ocean
684 | - id: '3896582473'
685 | name: "\U0001F32A\uFE0FM\U0001F98AK*blue spiral in contrasts tornado apart*"
686 | username: sylvanarevalo
687 | - id: '2806820966'
688 | name: roy.ly
689 | username: fROYndlich
690 | - id: '1336506015202865154'
691 | name: ApeJoy.sol
692 | username: SamuraiSatoshi_
693 | - id: '1112875554875871232'
694 | name: Evan
695 | username: Scaledish
696 | - id: '2159323615'
697 | name: Tess
698 | username: xsphi
699 | - id: '900567777106427904'
700 | name: Another
701 | username: workflowy01
702 | - id: '801687370798137344'
703 | name: "OnePlaybook \u0FCB"
704 | username: OnePlaybookHQ
705 | - id: '18711961'
706 | name: Bryan Mock
707 | username: bmock
708 | - id: '1244177371110035457'
709 | name: n/0
710 | username: divideby0zero
711 | - id: '36664716'
712 | name: fabrice liut
713 | username: SlaapMe
714 | - id: '3165397510'
715 | name: Edward Anderson
716 | username: anderson_edw
717 | - id: '1252263933945294850'
718 | name: Vitalist Doxometrist
719 | username: doxometrist
720 | - id: '1140648243715592194'
721 | name: GitJournal
722 | username: GitJournalApp
723 | - id: '1425080281153867784'
724 | name: Martha Almashi | inspirational writing
725 | username: AlmashiMartha
726 | - id: '128784941'
727 | name: DR
728 | username: danhrahal
729 | - id: '14527577'
730 | name: christine
731 | username: curiousfeet
732 | - id: '436492956'
733 | name: Akash Kaura
734 | username: akash_kaura
735 | - id: '3004810906'
736 | name: "deadgirlratking \U0001F54A"
737 | username: Veinseer
738 | - id: '1446456987248627715'
739 | name: TelemachusTrout
740 | username: TelemachusTrout
741 | - id: '1202027300516184065'
742 | name: smitop
743 | username: _smitop
744 | - id: '109722777'
745 | name: Martin Milan
746 | username: martin12333
747 | - id: '1050813805276659712'
748 | name: "Jose E. S\xE1nchez Tato"
749 | username: neurorebelde
750 | - id: '1280388211026477056'
751 | name: "BToh is in London \U0001F1EC\U0001F1E7"
752 | username: btohtoh
753 | - id: '19370317'
754 | name: bastiano
755 | username: Maalo
756 | - id: '1442730053461110787'
757 | name: "Don\u2019t Accept the Default \U0001F50D"
758 | username: DAtD_life
759 | - id: '32167903'
760 | name: farayi
761 | username: dynamicsTutor
762 | - id: '3348799768'
763 | name: Jeremy Georges-Filteau
764 | username: jgeofil
765 | - id: '44963854'
766 | name: Marco ||
767 | username: marcosins
768 | - id: '287387396'
769 | name: Dan Whaley
770 | username: dwhly
771 | - id: '1424356090419064833'
772 | name: "Fernando \U0001F33A\U0001F30C"
773 | username: zetalyrae
774 | - id: '212140544'
775 | name: Thomas Kim
776 | username: iamtdk20
777 | - id: '3320757050'
778 | name: Sulaiman Sanaullah
779 | username: sulsanaul
780 | - id: '758230986853224448'
781 | name: Dhruv Gupta
782 | username: AIPulp
783 | - id: '1062818080882073605'
784 | name: Max Lin Worm
785 | username: WormLin
786 | - id: '235656884'
787 | name: Gabriele Farei
788 | username: jayfarei
789 | - id: '931653411631591424'
790 | name: '&mut Agora'
791 | username: NeotenicPrimate
792 | - id: '985342160265359362'
793 | name: Michael Graham
794 | username: michaeldgrahams
795 | - id: '2409815474'
796 | name: James Newman
797 | username: hmac_sha1
798 | - id: '7384'
799 | name: Jes Wolfe!
800 | username: jes5199
801 | - id: '942982964'
802 | name: "Eigil is in Tallinn \U0001F48E (29866/50000 words)"
803 | username: AyeGill
804 | - id: '1097285036158652416'
805 | name: Edward
806 | username: EdwardIPAguilar
807 | - id: '1192638405336633344'
808 | name: "Brayton \u2022 Bananaplanet\U0001F40C"
809 | username: Bananaplanet2
810 | - id: '1239971'
811 | name: Bill Seitz of FluxGarden
812 | username: BillSeitz
813 | - id: '2827471609'
814 | name: "jimmylv.eth (\U0001F423, \U0001F423) \u5415\u7ACB\u9752 2\U000100CF22 \U0001F1E8\
815 | \U0001F1F3"
816 | username: Jimmy_JingLv
817 | - id: '1334122119228231680'
818 | name: RoamResearcher
819 | username: ResearcherRoam
820 | - id: '1290898110185705472'
821 | name: Sir Mush
822 | username: mush_sir
823 | - id: '269757961'
824 | name: Michael Thomas
825 | username: ChargingThrough
826 | - id: '371876382'
827 | name: Evan Cater
828 | username: Evan_ec
829 | - id: '17441631'
830 | name: pavedwalden
831 | username: pavedwalden
832 | - id: '7993612'
833 | name: Marc-Antoine Parent
834 | username: ma_parent
835 | - id: '963158500045189125'
836 | name: "\U0001D64E\U0001D65E\U0001D65B\U0001D669\U0001D65A\U0001D667"
837 | username: JMSifter
838 | - id: '1357924710403686401'
839 | name: "manunam \U0001F33B\U0001FAD0"
840 | username: manunamz
841 | - id: '812270237068492800'
842 | name: Lidor Cohen
843 | username: LidorCG
844 | - id: '1108513420649783296'
845 | name: Kelly Davis
846 | username: Kelly_EdTech
847 | - id: '1084122013365465088'
848 | name: Zen
849 | username: Caizen5
850 | - id: '349685838'
851 | name: Paramveer
852 | username: notbodynormind
853 | - id: '1144177164251738112'
854 | name: "V\xEDctor E.R. Rodriguez"
855 | username: vicrerodriguez
856 | - id: '159526809'
857 | name: Shivam Saini
858 | username: ishivamsaini
859 | - id: '778983904137150464'
860 | name: "Krzysztof Kami\u0144ski"
861 | username: kris__kaminski
862 | - id: '229216477'
863 | name: richie bonilla
864 | username: richiebonilla
865 | - id: '1361500801106018308'
866 | name: KevinTheBoss99
867 | username: KevinTheBoss99
868 | - id: '6059142'
869 | name: phil jones (he/him - ele)
870 | username: interstar
871 | - id: '359017793'
872 | name: David Dohan
873 | username: dmdohan
874 | - id: '1278573670739464192'
875 | name: "\U0001F441\uFE0F autosurveillant \U0001F441\uFE0F"
876 | username: pee_zombie
877 | - id: '2501869627'
878 | name: amol
879 | username: life_of_amol
880 | - id: '812755382116450304'
881 | name: Serj Hunt | City lvl Learning Ecosystems
882 | username: Serjhunt_ARK
883 | - id: '1300873085248319490'
884 | name: Adrian in Medellin
885 | username: AdrianRicard0
886 | - id: '1356261773813420037'
887 | name: "Anurag Halder \u262D\u270A\U0001F3FB\u270A\U0001F3FC\u270A\U0001F3FD\u270A\
888 | \U0001F3FE\u270A\U0001F3FF\u270A"
889 | username: anuraguevara
890 | - id: '1298914055156047872'
891 | name: Miguel Marcos Martinez
892 | username: MiguelMarcosM12
893 | - id: '1058402442386108416'
894 | name: Flying Hippopotamus
895 | username: 0xAshith
896 | - id: '1088075285529006086'
897 | name: DickFeynman
898 | username: DickFeynman5
899 | - id: '322603863'
900 | name: "i \u2764\uFE0F\u200D\U0001F525 you xiq"
901 | username: exGenesis
902 | - id: '937297245933670400'
903 | name: Levi Hart
904 | username: Levi7hart
905 | - id: '5614412'
906 | name: "\U0001F1FA\U0001F1E6 sunflower seeds \U0001F33B"
907 | username: mykola
908 | - id: '14187737'
909 | name: Joey Harris
910 | username: joeyharris
911 | - id: '2197269032'
912 | name: Matt Smith
913 | username: MattSmithTweets
914 | - id: '344804054'
915 | name: Rat King Crimson
916 | username: Alphiloscorp
917 | - id: '825775322759102465'
918 | name: neoludic
919 | username: paleoludic
920 | - id: '11008842'
921 | name: Vulpix
922 | username: wild_vulpix
923 | - id: '15422272'
924 | name: hungry_bread_elevator.txt
925 | username: AGWilsonn
926 | - id: '460662551'
927 | name: ddddddddddd
928 | username: espodumena_
929 | - id: '911114132815933440'
930 | name: Alex Leahu
931 | username: alx_jsn
932 | - id: '20963093'
933 | name: "Xavier Llor\xE0"
934 | username: xllora
935 | - id: '163785395'
936 | name: "Mx. Lottie (Charlotte) \U0001F49C\U0001F98A (she/her) \u0398\u0394"
937 | username: Foxsan48
938 | - id: '1389101650145681410'
939 | name: "Miroslav S\xE1zovsk\xFD"
940 | username: CoachSazovsky
941 | - id: '98764925'
942 | name: Robert.Best
943 | username: Bortseb
944 | - id: '1282006421551415298'
945 | name: "\U0001F9BE\u271D\uFE0F\U0001F933 Jesus needs a minute"
946 | username: evan_just_evan
947 | - id: '102744289'
948 | name: "Lisa Ross \U0001F6A2 \U0001F9EA Job Crafting | Career Design"
949 | username: TheJobCrafter
950 | - id: '1360249636829622275'
951 | name: Nabin Budhathoki
952 | username: NabinBu69160359
953 | - id: '111391654'
954 | name: Benedict Lau
955 | username: LauBenedict
956 | - id: '1364791141267251201'
957 | name: Charles Adjovu
958 | username: CAdjovu
959 | - id: '1099843636886466560'
960 | name: "\U0001D682\U0001D698\U0001D699\U0001D691\U0001D692\U0001D68A \U0001D687\U0001D69E"
961 | username: thesophiaxu
962 | - id: '589048815'
963 | name: Kei Cheung
964 | username: keikhcheung
965 | - id: '36034879'
966 | name: Yannick Bollati
967 | username: Templay
968 | - id: '2205544208'
969 | name: "jay \U0001F987\U0001F3F4\U0001F344"
970 | username: _WildeAtHeart
971 | - id: '4894527845'
972 | name: "Prat nrkst \U0001F3F3\uFE0F\u200D\U0001F308\U0001F3F4"
973 | username: billyjef1
974 | - id: '2493609224'
975 | name: Gyro
976 | username: NotGyro
977 | - id: '635210503'
978 | name: .
979 | username: nothingmannow
980 | - id: '959108045535096832'
981 | name: "Andr\xE9 Oliveira"
982 | username: andregeorisk
983 | - id: '1295739326798733314'
984 | name: breath of desire
985 | username: breathofdesire_
986 | - id: '1221298152474464256'
987 | name: daytura
988 | username: ArchLeucoryx
989 | - id: '1233594974932996098'
990 | name: Jack
991 | username: kausch
992 | - id: '1281068231122247681'
993 | name: '@goth600'
994 | username: goth600
995 | - id: '4198832534'
996 | name: Noor Afshan Fathima
997 | username: noor_af93
998 | - id: '21271464'
999 | name: Ivashko, Vladimir A.
1000 | username: ivashko
1001 | - id: '6544132'
1002 | name: ZetaSeek
1003 | username: zetaseek
1004 | - id: '2846425072'
1005 | name: Florin R Prihotin
1006 | username: Florin_Prihotin
1007 | - id: '1171511199776690177'
1008 | name: DG.
1009 | username: dataghees
1010 | - id: '1363040159474675715'
1011 | name: "\u7F8A\u7F8A\u7F8A"
1012 | username: yyyzzz666
1013 | - id: '1366870949581086720'
1014 | name: toastloaf
1015 | username: toastloaf1
1016 | - id: '15383818'
1017 | name: little ms. heather
1018 | username: TopHat8855
1019 | - id: '1244766739440664576'
1020 | name: Daniel Barry
1021 | username: danielbarryphd
1022 | - id: '153359125'
1023 | name: Althea Flus
1024 | username: AltheaFlus
1025 | - id: '53885631'
1026 | name: SMS
1027 | username: sabre23t
1028 | - id: '23712932'
1029 | name: Jeffery Glasen
1030 | username: JefferyGlasen
1031 | - id: '1295140824095830016'
1032 | name: "Elijah \U0001F50D"
1033 | username: Finer_Grains
1034 | - id: '1416500532'
1035 | name: "(wannabe) \u0243reaker of (the Bad) Loops"
1036 | username: generativist
1037 | - id: '3219703272'
1038 | name: Anirudh Badri
1039 | username: anirudhbadri
1040 | - id: '906601541590814722'
1041 | name: monocoded
1042 | username: monocoded
1043 | - id: '1170064144411897857'
1044 | name: "joint le man \U0001F9D9\u200D\u2642\uFE0F"
1045 | username: TheJointleman
1046 | - id: '15659972'
1047 | name: reddy2go
1048 | username: reddy2go
1049 | - id: '1135401272457146369'
1050 | name: Oh another alt
1051 | username: OhAnotherAlt
1052 | - id: '380683106'
1053 | name: Cameron Yick
1054 | username: hydrosquall
1055 | - id: '881192533056827392'
1056 | name: "goblin \xE0 la mode \U0001F368"
1057 | username: goblinodds
1058 | - id: '2793328260'
1059 | name: advanced persistent friend
1060 | username: 0xtujmrrr
1061 | - id: '1022435555710255104'
1062 | name: algorithmist
1063 | username: algrthmst
1064 | - id: '1347627362188488704'
1065 | name: Handshake Institute
1066 | username: HNSInstitute
1067 | - id: '804700438398455808'
1068 | name: Trail Marks
1069 | username: TrailMarks
1070 | - id: '36806342'
1071 | name: Alex - @blueadept:matrix.org
1072 | username: stile65
1073 | - id: '1399483300838883330'
1074 | name: Samizdat
1075 | username: SamizdatOS
1076 | - id: '1349799793330188294'
1077 | name: William Young
1078 | username: Wi11iamYoung
1079 | - id: '841207826148151296'
1080 | name: Thomas Sigmund
1081 | username: ThomasSigmund
1082 | - id: '947211792173993989'
1083 | name: acidic chew
1084 | username: acidicchew
1085 | - id: '289984786'
1086 | name: Diego de la Hera | @diegodlh@mastodon.social
1087 | username: diegodlh
1088 | - id: '57646649'
1089 | name: Kushan Joshi
1090 | username: kushan2020
1091 | - id: '14374944'
1092 | name: J. Ryan Stinnett (@jryans@merveilles.town)
1093 | username: jryans
1094 | - id: '277294812'
1095 | name: Allison Wilens
1096 | username: allisonwilens
1097 | - id: '14209841'
1098 | name: "Robert Barat \U0001F339"
1099 | username: volt4ire
1100 | - id: '134576119'
1101 | name: "Paul \U0001F1FA\U0001F1E6\U0001F69C"
1102 | username: djoghurt
1103 | - id: '360015936'
1104 | name: MBA Google.fr
1105 | username: mbagoogle
1106 | - id: '17962401'
1107 | name: '(ankostis) + #COVIDisAirborne'
1108 | username: ankostis
1109 | - id: '1150439796789190656'
1110 | name: Jon Bo
1111 | username: jondotbo
1112 | - id: '75123'
1113 | name: "Sam Klein (\xBF)"
1114 | username: metasj
1115 | - id: '1256624514500374530'
1116 | name: Abstract Fairy
1117 | username: AbstractFairy
1118 | - id: '350969975'
1119 | name: "Robert Haisfield \U0001F914\uFE0F\U0001F50E\U0001F92F\U0001F501"
1120 | username: RobertHaisfield
1121 | - id: '925818951925460998'
1122 | name: militant; alienated
1123 | username: ferment_absorb
1124 | - id: '759203344191778816'
1125 | name: hype
1126 | username: hyperstition_
1127 | - id: '152194866'
1128 | name: "Patrick Durusau \u23F3 White Person on White Supremacy"
1129 | username: patrickDurusau
1130 | - id: '1358562522719080449'
1131 | name: moaparty
1132 | username: moaparty
1133 | - id: '1173319262846541824'
1134 | name: polarizer app
1135 | username: PolarizerIO
1136 | - id: '570635991'
1137 | name: Tyrathalis
1138 | username: Tyrathalis
1139 | - id: '1101907614168956928'
1140 | name: Aviram ZIV
1141 | username: ziv_aviram
1142 | - id: '730685677613031424'
1143 | name: introspiral (98/100)
1144 | username: introspiral
1145 | - id: '1553517385'
1146 | name: "Jakob \U0001FAB5"
1147 | username: DerScheinriese
1148 | - id: '42812541'
1149 | name: marls
1150 | username: marls_canyon
1151 | - id: '3656036835'
1152 | name: JG
1153 | username: jesgma
1154 | - id: '2330032968'
1155 | name: A.C. Quinlan
1156 | username: museical
1157 | - id: '1205122572196761601'
1158 | name: Fractal Person
1159 | username: fractal_person
1160 | - id: '1445759592'
1161 | name: "Chris T\U000100CFler"
1162 | username: ctoler3
1163 | - id: '1294044410058100740'
1164 | name: justaghostintheshell
1165 | username: posthumanexp
1166 | - id: '8296412'
1167 | name: Alysson M. Costa
1168 | username: alycosta
1169 | - id: '2564021468'
1170 | name: Nils Hempel
1171 | username: NilsHempel
1172 | - id: '39284696'
1173 | name: poorly timed
1174 | username: harmongd
1175 | - id: '829703351822536709'
1176 | name: theSherwood
1177 | username: adamthesherwood
1178 | - id: '1270609763810885632'
1179 | name: "paradox \U0001F341"
1180 | username: zeno_dox
1181 | - id: '19578550'
1182 | name: Andric
1183 | username: andrictham
1184 | - id: '276770491'
1185 | name: . .
1186 | username: filipe_rasoilo
1187 | - id: '9510792'
1188 | name: mizminh
1189 | username: mizminh
1190 | - id: '1265915887858507778'
1191 | name: ((CONSTRUCTING))
1192 | username: RoamFm
1193 | - id: '33488192'
1194 | name: Valmaseda
1195 | username: Valmaseda
1196 | - id: '1372401441508372481'
1197 | name: copper_shovel
1198 | username: CopperShovel
1199 | - id: '1330777669307392001'
1200 | name: ArchiveBox
1201 | username: ArchiveBoxApp
1202 | - id: '1025295395579080704'
1203 | name: "Logseq \U0001FAB5"
1204 | username: logseq
1205 | - id: '226894530'
1206 | name: "Hanif \U0001F49C\U0001F49C"
1207 | username: analisistematik
1208 | - id: '2997046784'
1209 | name: Ronit Mandal
1210 | username: ronitmndl
1211 | - id: '1191585631'
1212 | name: CHT/Totalism.org
1213 | username: chtotalism
1214 | - id: '3051254626'
1215 | name: "Agn\u0117 Palamar\u010Duk \U0001F1F1\U0001F1F9\U0001F1EA\U0001F1FA"
1216 | username: AgnePalamarcuk
1217 | - id: '1070193487063248896'
1218 | name: "m\xE1ty\xE1s t\xF6r\u0151csik"
1219 | username: PVmatyas
1220 | - id: '44687734'
1221 | name: m bobak
1222 | username: MBstream
1223 | - id: '1260275657109635072'
1224 | name: Mikael Bergqvist
1225 | username: mik_bergqvist
1226 | - id: '365461645'
1227 | name: Ken
1228 | username: maximilian_kenz
1229 | - id: '267226131'
1230 | name: Sabine Reitmaier
1231 | username: SabReitmaier
1232 | - id: '2284319366'
1233 | name: manic map
1234 | username: neeasade
1235 | - id: '1349400678700109825'
1236 | name: Paulo Pinto
1237 | username: w3bk3rn3l
1238 | - id: '971605446380478464'
1239 | name: "Makerlog \U0001F680"
1240 | username: GetMakerlog
1241 | - id: '1166528729423765504'
1242 | name: Pedro Planas
1243 | username: pedroplanas77
1244 | - id: '942735451756138497'
1245 | name: "\U0001F31E anti-grifts engineering apprentice \U0001F50D"
1246 | username: samuelbars
1247 | - id: '1352494870162776064'
1248 | name: "Warlord of the Empirical Evil Supercluster | \U0001F9C2"
1249 | username: jaysalt6
1250 | - id: '550421870'
1251 | name: Rodrigo Baraglia
1252 | username: encincovoy
1253 | - id: '1347774807975329792'
1254 | name: "erisianwhispers \u2694\uFE0F"
1255 | username: lemurianculture
1256 | - id: '1059819709758083073'
1257 | name: dylsteck.eth
1258 | username: Dylan_Steck
1259 | - id: '1175126948185608193'
1260 | name: yiction
1261 | username: slimdaveyy
1262 | - id: '14797127'
1263 | name: "Olof Lindholm \U0001F1FA\U0001F1E6"
1264 | username: oloflindholm
1265 | - id: '15087237'
1266 | name: "Juan Rafael \xC1lvarez"
1267 | username: juanra31a
1268 | - id: '1158132886983401474'
1269 | name: "ELIAS\U0001F50DSCHMIED in Berlin for a while"
1270 | username: reconfigurthing
1271 | - id: '290957541'
1272 | name: Dmitriy
1273 | username: metamitya
1274 | - id: '1047117074118692864'
1275 | name: jonathan
1276 | username: l_o_r_a_n_d
1277 | - id: '15064825'
1278 | name: hftf
1279 | username: hftf
1280 | - id: '1217741094601068549'
1281 | name: Michael Ameh
1282 | username: wazirin_dangi
1283 | - id: '1252189847206293504'
1284 | name: "Veronika Winters(vera)\U0001F344"
1285 | username: verxnika999
1286 | - id: '13645402'
1287 | name: Chris Aldrich
1288 | username: ChrisAldrich
1289 | - id: '1114580800937156608'
1290 | name: cognacore
1291 | username: cognacore
1292 | - id: '1230350883667501057'
1293 | name: "Groves \U0001F333 Your Partner in Thought"
1294 | username: withgroves
1295 | - id: '1425896478'
1296 | name: "Sailor Neptune `\xB0\u2022\u25CB"
1297 | username: TJSMoura
1298 | - id: '52780825'
1299 | name: dickon bevington
1300 | username: dickonb
1301 | - id: '19638811'
1302 | name: "\u2116 \u2022 all the 10,000 things/100"
1303 | username: arabelladevine
1304 | - id: '1226698564555829249'
1305 | name: Aly
1306 | username: Fish_CTO
1307 | - id: '1336253103700504577'
1308 | name: "unknown quan2\uFE0F\u20E3ty"
1309 | username: unknownquan2ty
1310 | - id: '886987824989384704'
1311 | name: Francesca Pallopides
1312 | username: FPallopides
1313 | - id: '820607095'
1314 | name: printer paper people
1315 | username: printerpaperppl
1316 | - id: '254757657'
1317 | name: Simon Roderus
1318 | username: SimonRoderus
1319 | - id: '4166406203'
1320 | name: francisco
1321 | username: fannocua
1322 | - id: '467771034'
1323 | name: danielarmengolaltayo
1324 | username: armengolaltayo
1325 | - id: '1288562156850614272'
1326 | name: Quincy
1327 | username: QuincyThinks
1328 | - id: '1579316551'
1329 | name: "\u05D3\u05D0\u05D9\u05E7\u05D9\u05D9\u05D8"
1330 | username: doikaytnik
1331 | - id: '1096952132710756353'
1332 | name: the monkey king
1333 | username: wordrotator
1334 | - id: '3419155524'
1335 | name: todayIwasbetter
1336 | username: todayIwasbetter
1337 | - id: '1266858876747616263'
1338 | name: numen
1339 | username: decadantism
1340 | - id: '1306202289674256384'
1341 | name: jane
1342 | username: barbedcatpenis
1343 | - id: '1303923989048373248'
1344 | name: Hummingfly
1345 | username: ablueaeshna
1346 | - id: '973674380'
1347 | name: "\u27B3"
1348 | username: objetosmentales
1349 | - id: '1286057025433149441'
1350 | name: "\U0001F9CA - 1c3"
1351 | username: Ic3K1n9
1352 | - id: '895188246149423105'
1353 | name: Adenosine Triphosphate
1354 | username: hyperanomalous
1355 | - id: '773144670507499521'
1356 | name: In Flancia we'll meet!
1357 | username: flancian
1358 |
--------------------------------------------------------------------------------
/bots/twitter/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.poetry]
2 | name = "agora twitter bot"
3 | version = "0.1.0"
4 | description = "https://anagora.org/agora-twitter-bot"
5 | authors = ["Flancian <0@flancia.org>"]
6 | license = "Apache"
7 |
8 | [tool.poetry.dependencies]
9 | python = "^3.7"
10 | cachetools = "^5.2.0"
11 | tweepy = "^4.10.1"
12 | requests = "^2.28.1"
13 | PyYAML = "^6.0"
14 |
15 | [tool.poetry.dev-dependencies]
16 |
17 | [build-system]
18 | requires = ["poetry-core>=1.0.0"]
19 | build-backend = "poetry.core.masonry.api"
20 |
--------------------------------------------------------------------------------
/bots/twitter/requirements.txt:
--------------------------------------------------------------------------------
1 | cachetools==4.2.2
2 | certifi==2021.5.30
3 | chardet==4.0.0
4 | idna==2.10
5 | oauthlib==3.1.1
6 | PySocks==1.7.1
7 | PyYAML==5.4.1
8 | requests==2.25.1
9 | requests-oauthlib==1.3.0
10 | six==1.16.0
11 | tweepy==3.10.0
12 | urllib3==1.26.6
13 |
--------------------------------------------------------------------------------
/bots/twitter/run-dev.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Moved to poetry.
3 | # . venv/bin/activate
4 | OUTPUT=/home/agora/agora/stream/
5 | mkdir -p ${OUTPUT}
6 | poetry run ./agora-bot.py --config agora-bot.yaml --dry-run --follow --output-dir=${OUTPUT} $@
7 |
--------------------------------------------------------------------------------
/bots/twitter/run-prod.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Moved to poetry.
3 | # . venv/bin/activate
4 | OUTPUT=/home/agora/agora/stream/
5 | mkdir -p ${OUTPUT}
6 | # This shouldn't be needed but it is when running as a systemd service for some reason.
7 | export PATH=$HOME/.local/bin:${PATH}
8 | # 10080 = 7d in minutes
9 | # 40320 = 4w in minutes
10 | poetry run ./agora-bot.py --config agora-bot.yaml --timeline --output-dir=${OUTPUT} --max-age=40320 $@
11 |
--------------------------------------------------------------------------------
/bots/twitter/setup.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | (command -v python3 && command -v pip3) || (echo 'Please install Python3 and pip using your OS packaging system. In Debian: sudo apt-get install python3 python3-venv python3-pip' && exit 42)
4 |
5 | python3 -m venv venv &&
6 | . venv/bin/activate &&
7 | pip3 install -r requirements.txt
8 |
9 | echo "see agora-bridge.service and https://anagora.org/systemd for pointers on how to set up a production agora as a system service."
10 |
--------------------------------------------------------------------------------
/bots/twitter/tweets.yaml:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/flancian/agora-bridge/f338fc9ef528c4903f268ae0a1e80c6bcbeea865/bots/twitter/tweets.yaml
--------------------------------------------------------------------------------
/bots/youtube/README.md:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/flancian/agora-bridge/f338fc9ef528c4903f268ae0a1e80c6bcbeea865/bots/youtube/README.md
--------------------------------------------------------------------------------
/bots/youtube/playlist.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # Copyright 2022 Google LLC
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 | import argparse
17 |
18 | parser = argparse.ArgumentParser()
19 | parser.add_argument('playlist', nargs='+', help='Playlist to dump.')
20 | args = parser.parse_args()
21 |
22 | def dump(playlist):
23 | URL_BASE="https://www.youtube.com/playlist?list="
24 | PLAYLIST=args.playlist[0]
25 | from pytube import Playlist
26 | p = Playlist(URL_BASE + PLAYLIST)
27 | print(f"- a playlist.\n - #go {URL_BASE}{PLAYLIST}")
28 | for idx, video in enumerate(p):
29 | print(f" - #{idx} {video}?list={PLAYLIST}")
30 |
31 | if __name__ == '__main__':
32 | dump(args.playlist)
33 |
--------------------------------------------------------------------------------
/bots/youtube/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.poetry]
2 | name = "youtube"
3 | version = "0.1.0"
4 | description = ""
5 | authors = ["Flancian <0@flancia.org>"]
6 | readme = "README.md"
7 |
8 | [tool.poetry.dependencies]
9 | python = "^3.11"
10 | pytube = "^15.0.0"
11 |
12 |
13 | [build-system]
14 | requires = ["poetry-core"]
15 | build-backend = "poetry.core.masonry.api"
16 |
--------------------------------------------------------------------------------
/bots/youtube/requirements.txt:
--------------------------------------------------------------------------------
1 | pytube==12.1.0
2 |
--------------------------------------------------------------------------------
/build-image.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker build -t git.coopcloud.tech/flancian/agora-bridge .
4 |
--------------------------------------------------------------------------------
/clean.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | cd ~/agora && rm */*/.git/*lock
3 |
--------------------------------------------------------------------------------
/entrypoint.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | #
3 | # Caveats in run.sh apply :)
4 | #
5 | # For a supported way to run an Agora on containers, please refer to [[agora recipe]] for [[coop cloud]] in the Agora of Flancia: https://anagora.org/agora-recipe
6 |
7 | git pull
8 | poetry install
9 | (cd ~/agora && git pull)
10 | (cd ~/agora && find . -iname 'index.lock' -exec rm {} \;)
11 | ./run-dev.sh
12 |
--------------------------------------------------------------------------------
/fedwiki.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | cd fedwiki
3 | go run main.go $1 $2
--------------------------------------------------------------------------------
/fedwiki/.shadow-cljs/classpath.edn:
--------------------------------------------------------------------------------
1 | {:dependencies [[thheller/shadow-cljs "2.15.12" :classifier "aot"] [org.clojure/data.json "2.4.0" :exclusions [com.cognitect/transit-java org.clojure/clojure thheller/shadow-cljs org.clojure/clojurescript com.cognitect/transit-clj org.clojure/core.async]]], :version "2.15.12", :files ["/home/flancian/.m2/repository/fipp/fipp/0.6.24/fipp-0.6.24.jar" "/home/flancian/.m2/repository/com/fasterxml/jackson/core/jackson-core/2.8.7/jackson-core-2.8.7.jar" "/home/flancian/.m2/repository/com/bhauman/cljs-test-display/0.1.1/cljs-test-display-0.1.1.jar" "/home/flancian/.m2/repository/org/msgpack/msgpack/0.6.12/msgpack-0.6.12.jar" "/home/flancian/.m2/repository/org/jboss/xnio/xnio-nio/3.8.0.Final/xnio-nio-3.8.0.Final.jar" "/home/flancian/.m2/repository/ring/ring-core/1.9.4/ring-core-1.9.4.jar" "/home/flancian/.m2/repository/io/undertow/undertow-core/2.2.4.Final/undertow-core-2.2.4.Final.jar" "/home/flancian/.m2/repository/edn-query-language/eql/0.0.9/eql-0.0.9.jar" "/home/flancian/.m2/repository/org/graalvm/truffle/truffle-api/21.1.0/truffle-api-21.1.0.jar" "/home/flancian/.m2/repository/org/slf4j/slf4j-api/1.7.30/slf4j-api-1.7.30.jar" "/home/flancian/.m2/repository/org/clojure/data.priority-map/1.0.0/data.priority-map-1.0.0.jar" "/home/flancian/.m2/repository/nrepl/nrepl/0.8.3/nrepl-0.8.3.jar" "/home/flancian/.m2/repository/org/jboss/xnio/xnio-api/3.8.0.Final/xnio-api-3.8.0.Final.jar" "/home/flancian/.m2/repository/javax/xml/bind/jaxb-api/2.3.0/jaxb-api-2.3.0.jar" "/home/flancian/.m2/repository/com/wsscode/spec-inspec/1.0.0-alpha2/spec-inspec-1.0.0-alpha2.jar" "/home/flancian/.m2/repository/com/wsscode/pathom/2.2.31/pathom-2.2.31.jar" "/home/flancian/.m2/repository/org/clojure/tools.analyzer.jvm/1.1.0/tools.analyzer.jvm-1.1.0.jar" "/home/flancian/.m2/repository/com/cognitect/transit-js/0.8.874/transit-js-0.8.874.jar" "/home/flancian/.m2/repository/org/jboss/threads/jboss-threads/3.1.0.Final/jboss-threads-3.1.0.Final.jar" "/home/flancian/.m2/repository/org/wildfly/client/wildfly-client-config/1.0.1.Final/wildfly-client-config-1.0.1.Final.jar" "/home/flancian/.m2/repository/thheller/shadow-cljsjs/0.0.22/shadow-cljsjs-0.0.22.jar" "/home/flancian/.m2/repository/org/clojure/google-closure-library/0.0-20201211-3e6c510d/google-closure-library-0.0-20201211-3e6c510d.jar" "/home/flancian/.m2/repository/org/ow2/asm/asm/5.2/asm-5.2.jar" "/home/flancian/.m2/repository/ring/ring-codec/1.1.3/ring-codec-1.1.3.jar" "/home/flancian/.m2/repository/crypto-equality/crypto-equality/1.0.0/crypto-equality-1.0.0.jar" "/home/flancian/.m2/repository/thheller/shadow-cljs/2.15.12/shadow-cljs-2.15.12-aot.jar" "/home/flancian/.m2/repository/org/clojure/core.rrb-vector/0.1.1/core.rrb-vector-0.1.1.jar" "/home/flancian/.m2/repository/org/clojure/tools.analyzer/1.0.0/tools.analyzer-1.0.0.jar" "/home/flancian/.m2/repository/expound/expound/0.8.9/expound-0.8.9.jar" "/home/flancian/.m2/repository/org/clojure/google-closure-library-third-party/0.0-20201211-3e6c510d/google-closure-library-third-party-0.0-20201211-3e6c510d.jar" "/home/flancian/.m2/repository/org/clojure/clojure/1.10.3/clojure-1.10.3.jar" "/home/flancian/.m2/repository/org/clojure/core.async/1.3.618/core.async-1.3.618.jar" "/home/flancian/.m2/repository/org/clojure/test.check/1.1.0/test.check-1.1.0.jar" "/home/flancian/.m2/repository/com/google/javascript/closure-compiler-unshaded/v20210505/closure-compiler-unshaded-v20210505.jar" "/home/flancian/.m2/repository/com/googlecode/json-simple/json-simple/1.1.1/json-simple-1.1.1.jar" "/home/flancian/.m2/repository/org/clojure/core.cache/1.0.207/core.cache-1.0.207.jar" "/home/flancian/.m2/repository/hiccup/hiccup/1.0.5/hiccup-1.0.5.jar" "/home/flancian/.m2/repository/crypto-random/crypto-random/1.2.1/crypto-random-1.2.1.jar" "/home/flancian/.m2/repository/org/clojure/clojurescript/1.10.879/clojurescript-1.10.879.jar" "/home/flancian/.m2/repository/org/clojure/spec.alpha/0.2.194/spec.alpha-0.2.194.jar" "/home/flancian/.m2/repository/org/jboss/logging/jboss-logging/3.4.1.Final/jboss-logging-3.4.1.Final.jar" "/home/flancian/.m2/repository/thheller/shadow-client/1.3.3/shadow-client-1.3.3.jar" "/home/flancian/.m2/repository/org/clojure/core.memoize/1.0.236/core.memoize-1.0.236.jar" "/home/flancian/.m2/repository/org/graalvm/regex/regex/21.1.0/regex-21.1.0.jar" "/home/flancian/.m2/repository/com/cognitect/transit-cljs/0.8.269/transit-cljs-0.8.269.jar" "/home/flancian/.m2/repository/org/clojure/core.specs.alpha/0.2.56/core.specs.alpha-0.2.56.jar" "/home/flancian/.m2/repository/org/clojure/tools.reader/1.3.6/tools.reader-1.3.6.jar" "/home/flancian/.m2/repository/org/clojure/tools.cli/1.0.206/tools.cli-1.0.206.jar" "/home/flancian/.m2/repository/org/graalvm/js/js/21.1.0/js-21.1.0.jar" "/home/flancian/.m2/repository/io/methvin/directory-watcher/0.15.0/directory-watcher-0.15.0.jar" "/home/flancian/.m2/repository/com/cognitect/transit-clj/1.0.324/transit-clj-1.0.324.jar" "/home/flancian/.m2/repository/com/ibm/icu/icu4j/68.2/icu4j-68.2.jar" "/home/flancian/.m2/repository/commons-fileupload/commons-fileupload/1.4/commons-fileupload-1.4.jar" "/home/flancian/.m2/repository/net/java/dev/jna/jna/5.7.0/jna-5.7.0.jar" "/home/flancian/.m2/repository/org/wildfly/common/wildfly-common/1.5.2.Final/wildfly-common-1.5.2.Final.jar" "/home/flancian/.m2/repository/cider/piggieback/0.5.2/piggieback-0.5.2.jar" "/home/flancian/.m2/repository/org/clojure/data.json/2.4.0/data.json-2.4.0.jar" "/home/flancian/.m2/repository/org/graalvm/js/js-scriptengine/21.1.0/js-scriptengine-21.1.0.jar" "/home/flancian/.m2/repository/org/javassist/javassist/3.18.1-GA/javassist-3.18.1-GA.jar" "/home/flancian/.m2/repository/thheller/shadow-undertow/0.1.0/shadow-undertow-0.1.0.jar" "/home/flancian/.m2/repository/commons-io/commons-io/2.10.0/commons-io-2.10.0.jar" "/home/flancian/.m2/repository/thheller/shadow-util/0.7.0/shadow-util-0.7.0.jar" "/home/flancian/.m2/repository/spec-coerce/spec-coerce/1.0.0-alpha6/spec-coerce-1.0.0-alpha6.jar" "/home/flancian/.m2/repository/com/cognitect/transit-java/1.0.343/transit-java-1.0.343.jar" "/home/flancian/.m2/repository/org/graalvm/sdk/graal-sdk/21.1.0/graal-sdk-21.1.0.jar" "/home/flancian/.m2/repository/commons-codec/commons-codec/1.10/commons-codec-1.10.jar"], :deps-hierarchy {[org.clojure/data.json "2.4.0" :exclusions [[com.cognitect/transit-java] [org.clojure/clojure] [thheller/shadow-cljs] [org.clojure/clojurescript] [com.cognitect/transit-clj] [org.clojure/core.async]]] nil, [thheller/shadow-cljs "2.15.12" :classifier "aot"] {[cider/piggieback "0.5.2" :exclusions [[org.clojure/clojure] [org.clojure/clojurescript] [nrepl]]] nil, [com.bhauman/cljs-test-display "0.1.1"] nil, [com.cognitect/transit-clj "1.0.324"] {[com.cognitect/transit-java "1.0.343"] {[com.fasterxml.jackson.core/jackson-core "2.8.7"] nil, [commons-codec "1.10"] nil, [javax.xml.bind/jaxb-api "2.3.0"] nil, [org.msgpack/msgpack "0.6.12"] {[com.googlecode.json-simple/json-simple "1.1.1" :exclusions [[junit]]] nil, [org.javassist/javassist "3.18.1-GA"] nil}}}, [com.cognitect/transit-cljs "0.8.269"] {[com.cognitect/transit-js "0.8.874"] nil}, [com.google.javascript/closure-compiler-unshaded "v20210505"] nil, [com.wsscode/pathom "2.2.31" :exclusions [[org.clojure/data.json] [fulcrologic/fulcro] [camel-snake-kebab]]] {[com.wsscode/spec-inspec "1.0.0-alpha2"] nil, [edn-query-language/eql "0.0.9"] nil, [spec-coerce "1.0.0-alpha6"] nil}, [expound "0.8.9"] nil, [fipp "0.6.24"] {[org.clojure/core.rrb-vector "0.1.1"] nil}, [hiccup "1.0.5"] nil, [io.methvin/directory-watcher "0.15.0"] {[net.java.dev.jna/jna "5.7.0"] nil, [org.slf4j/slf4j-api "1.7.30"] nil}, [nrepl "0.8.3"] nil, [org.clojure/clojure "1.10.3"] {[org.clojure/core.specs.alpha "0.2.56"] nil, [org.clojure/spec.alpha "0.2.194"] nil}, [org.clojure/clojurescript "1.10.879" :exclusions [[com.google.javascript/closure-compiler-unshaded] [org.clojure/google-closure-library] [org.clojure/google-closure-library-third-party]]] nil, [org.clojure/core.async "1.3.618"] {[org.clojure/tools.analyzer.jvm "1.1.0"] {[org.clojure/core.memoize "1.0.236"] {[org.clojure/core.cache "1.0.207"] {[org.clojure/data.priority-map "1.0.0"] nil}}, [org.clojure/tools.analyzer "1.0.0"] nil, [org.ow2.asm/asm "5.2"] nil}}, [org.clojure/google-closure-library-third-party "0.0-20201211-3e6c510d"] nil, [org.clojure/google-closure-library "0.0-20201211-3e6c510d"] nil, [org.clojure/test.check "1.1.0"] nil, [org.clojure/tools.cli "1.0.206"] nil, [org.clojure/tools.reader "1.3.6"] nil, [org.graalvm.js/js-scriptengine "21.1.0"] nil, [org.graalvm.js/js "21.1.0"] {[com.ibm.icu/icu4j "68.2"] nil, [org.graalvm.regex/regex "21.1.0"] nil, [org.graalvm.sdk/graal-sdk "21.1.0"] nil, [org.graalvm.truffle/truffle-api "21.1.0"] nil}, [ring/ring-core "1.9.4" :exclusions [[clj-time]]] {[commons-fileupload "1.4"] nil, [commons-io "2.10.0"] nil, [crypto-equality "1.0.0"] nil, [crypto-random "1.2.1"] nil, [ring/ring-codec "1.1.3"] nil}, [thheller/shadow-client "1.3.3"] nil, [thheller/shadow-cljsjs "0.0.22"] nil, [thheller/shadow-undertow "0.1.0"] {[io.undertow/undertow-core "2.2.4.Final"] {[org.jboss.logging/jboss-logging "3.4.1.Final"] nil, [org.jboss.threads/jboss-threads "3.1.0.Final" :exclusions [[org.wildfly.common/wildfly-common]]] nil, [org.jboss.xnio/xnio-api "3.8.0.Final" :exclusions [[org.jboss.threads/jboss-threads]]] {[org.wildfly.client/wildfly-client-config "1.0.1.Final"] nil, [org.wildfly.common/wildfly-common "1.5.2.Final"] nil}, [org.jboss.xnio/xnio-nio "3.8.0.Final" :scope "runtime" :exclusions [[org.wildfly.common/wildfly-common]]] nil}}, [thheller/shadow-util "0.7.0"] nil}}, :deps-resolved {[fipp "0.6.24"] #{[org.clojure/core.rrb-vector "0.1.1"]}, [com.fasterxml.jackson.core/jackson-core "2.8.7"] nil, [com.bhauman/cljs-test-display "0.1.1"] nil, [org.msgpack/msgpack "0.6.12"] #{[com.googlecode.json-simple/json-simple "1.1.1" :exclusions [[junit]]] [org.javassist/javassist "3.18.1-GA"]}, [org.jboss.xnio/xnio-nio "3.8.0.Final" :scope "runtime" :exclusions [[org.wildfly.common/wildfly-common]]] nil, [ring/ring-core "1.9.4" :exclusions [[clj-time]]] #{[ring/ring-codec "1.1.3"] [crypto-equality "1.0.0"] [crypto-random "1.2.1"] [commons-fileupload "1.4"] [commons-io "2.10.0"]}, [io.undertow/undertow-core "2.2.4.Final"] #{[org.jboss.xnio/xnio-nio "3.8.0.Final" :scope "runtime" :exclusions [[org.wildfly.common/wildfly-common]]] [org.jboss.xnio/xnio-api "3.8.0.Final" :exclusions [[org.jboss.threads/jboss-threads]]] [org.jboss.threads/jboss-threads "3.1.0.Final" :exclusions [[org.wildfly.common/wildfly-common]]] [org.jboss.logging/jboss-logging "3.4.1.Final"]}, [edn-query-language/eql "0.0.9"] nil, [org.graalvm.truffle/truffle-api "21.1.0"] nil, [org.slf4j/slf4j-api "1.7.30"] nil, [org.clojure/data.priority-map "1.0.0"] nil, [nrepl "0.8.3"] nil, [org.jboss.xnio/xnio-api "3.8.0.Final" :exclusions [[org.jboss.threads/jboss-threads]]] #{[org.wildfly.client/wildfly-client-config "1.0.1.Final"] [org.wildfly.common/wildfly-common "1.5.2.Final"]}, [javax.xml.bind/jaxb-api "2.3.0"] nil, [com.wsscode/spec-inspec "1.0.0-alpha2"] nil, [com.wsscode/pathom "2.2.31" :exclusions [[org.clojure/data.json] [fulcrologic/fulcro] [camel-snake-kebab]]] #{[edn-query-language/eql "0.0.9"] [com.wsscode/spec-inspec "1.0.0-alpha2"] [spec-coerce "1.0.0-alpha6"]}, [org.clojure/tools.analyzer.jvm "1.1.0"] #{[org.ow2.asm/asm "5.2"] [org.clojure/tools.analyzer "1.0.0"] [org.clojure/core.memoize "1.0.236"]}, [com.cognitect/transit-js "0.8.874"] nil, [org.jboss.threads/jboss-threads "3.1.0.Final" :exclusions [[org.wildfly.common/wildfly-common]]] nil, [org.wildfly.client/wildfly-client-config "1.0.1.Final"] nil, [thheller/shadow-cljsjs "0.0.22"] nil, [org.clojure/google-closure-library "0.0-20201211-3e6c510d"] nil, [org.ow2.asm/asm "5.2"] nil, [ring/ring-codec "1.1.3"] nil, [crypto-equality "1.0.0"] nil, [thheller/shadow-cljs "2.15.12" :classifier "aot"] #{[fipp "0.6.24"] [com.bhauman/cljs-test-display "0.1.1"] [ring/ring-core "1.9.4" :exclusions [[clj-time]]] [nrepl "0.8.3"] [com.wsscode/pathom "2.2.31" :exclusions [[org.clojure/data.json] [fulcrologic/fulcro] [camel-snake-kebab]]] [thheller/shadow-cljsjs "0.0.22"] [org.clojure/google-closure-library "0.0-20201211-3e6c510d"] [expound "0.8.9"] [org.clojure/google-closure-library-third-party "0.0-20201211-3e6c510d"] [org.clojure/clojure "1.10.3"] [org.clojure/core.async "1.3.618"] [org.clojure/test.check "1.1.0"] [com.google.javascript/closure-compiler-unshaded "v20210505"] [hiccup "1.0.5"] [org.clojure/clojurescript "1.10.879" :exclusions [[com.google.javascript/closure-compiler-unshaded] [org.clojure/google-closure-library] [org.clojure/google-closure-library-third-party]]] [thheller/shadow-client "1.3.3"] [com.cognitect/transit-cljs "0.8.269"] [org.clojure/tools.reader "1.3.6"] [org.clojure/tools.cli "1.0.206"] [org.graalvm.js/js "21.1.0"] [io.methvin/directory-watcher "0.15.0"] [com.cognitect/transit-clj "1.0.324"] [cider/piggieback "0.5.2" :exclusions [[org.clojure/clojure] [org.clojure/clojurescript] [nrepl]]] [org.graalvm.js/js-scriptengine "21.1.0"] [thheller/shadow-undertow "0.1.0"] [thheller/shadow-util "0.7.0"]}, [org.clojure/core.rrb-vector "0.1.1"] nil, [org.clojure/tools.analyzer "1.0.0"] nil, [expound "0.8.9"] nil, [org.clojure/google-closure-library-third-party "0.0-20201211-3e6c510d"] nil, [org.clojure/clojure "1.10.3"] #{[org.clojure/spec.alpha "0.2.194"] [org.clojure/core.specs.alpha "0.2.56"]}, [org.clojure/core.async "1.3.618"] #{[org.clojure/tools.analyzer.jvm "1.1.0"]}, [org.clojure/test.check "1.1.0"] nil, [com.google.javascript/closure-compiler-unshaded "v20210505"] nil, [com.googlecode.json-simple/json-simple "1.1.1" :exclusions [[junit]]] nil, [org.clojure/core.cache "1.0.207"] #{[org.clojure/data.priority-map "1.0.0"]}, [hiccup "1.0.5"] nil, [crypto-random "1.2.1"] nil, [org.clojure/clojurescript "1.10.879" :exclusions [[com.google.javascript/closure-compiler-unshaded] [org.clojure/google-closure-library] [org.clojure/google-closure-library-third-party]]] nil, [org.clojure/spec.alpha "0.2.194"] nil, [org.jboss.logging/jboss-logging "3.4.1.Final"] nil, [thheller/shadow-client "1.3.3"] nil, [org.clojure/core.memoize "1.0.236"] #{[org.clojure/core.cache "1.0.207"]}, [org.graalvm.regex/regex "21.1.0"] nil, [com.cognitect/transit-cljs "0.8.269"] #{[com.cognitect/transit-js "0.8.874"]}, [org.clojure/core.specs.alpha "0.2.56"] nil, [org.clojure/tools.reader "1.3.6"] nil, [org.clojure/tools.cli "1.0.206"] nil, [org.graalvm.js/js "21.1.0"] #{[org.graalvm.truffle/truffle-api "21.1.0"] [org.graalvm.regex/regex "21.1.0"] [com.ibm.icu/icu4j "68.2"] [org.graalvm.sdk/graal-sdk "21.1.0"]}, [io.methvin/directory-watcher "0.15.0"] #{[org.slf4j/slf4j-api "1.7.30"] [net.java.dev.jna/jna "5.7.0"]}, [com.cognitect/transit-clj "1.0.324"] #{[com.cognitect/transit-java "1.0.343"]}, [com.ibm.icu/icu4j "68.2"] nil, [commons-fileupload "1.4"] nil, [net.java.dev.jna/jna "5.7.0"] nil, [org.wildfly.common/wildfly-common "1.5.2.Final"] nil, [cider/piggieback "0.5.2" :exclusions [[org.clojure/clojure] [org.clojure/clojurescript] [nrepl]]] nil, [org.clojure/data.json "2.4.0" :exclusions [[com.cognitect/transit-java] [org.clojure/clojure] [thheller/shadow-cljs] [org.clojure/clojurescript] [com.cognitect/transit-clj] [org.clojure/core.async]]] nil, [org.graalvm.js/js-scriptengine "21.1.0"] nil, [org.javassist/javassist "3.18.1-GA"] nil, [thheller/shadow-undertow "0.1.0"] #{[io.undertow/undertow-core "2.2.4.Final"]}, [commons-io "2.10.0"] nil, [thheller/shadow-util "0.7.0"] nil, [spec-coerce "1.0.0-alpha6"] nil, [com.cognitect/transit-java "1.0.343"] #{[com.fasterxml.jackson.core/jackson-core "2.8.7"] [org.msgpack/msgpack "0.6.12"] [javax.xml.bind/jaxb-api "2.3.0"] [commons-codec "1.10"]}, [org.graalvm.sdk/graal-sdk "21.1.0"] nil, [commons-codec "1.10"] nil}}
--------------------------------------------------------------------------------
/fedwiki/go.mod:
--------------------------------------------------------------------------------
1 | module github.com/fuck-capitalism/agora-bridge/fedwiki
2 |
3 | go 1.17
4 |
5 | require github.com/tidwall/gjson v1.12.1
6 |
--------------------------------------------------------------------------------
/fedwiki/go.sum:
--------------------------------------------------------------------------------
1 | github.com/tidwall/gjson v1.12.1 h1:ikuZsLdhr8Ws0IdROXUS1Gi4v9Z4pGqpX/CvJkxvfpo=
2 | github.com/tidwall/gjson v1.12.1/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
3 | github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA=
4 | github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM=
5 | github.com/tidwall/pretty v1.2.0 h1:RWIZEg2iJ8/g6fDDYzMpobmaoGh5OLl4AXtGUGPcqCs=
6 | github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU=
7 |
--------------------------------------------------------------------------------
/fedwiki/main.go:
--------------------------------------------------------------------------------
1 | // (defn host-to-json [host] (slurp (str host "/system/export.json")))
2 |
3 | package main
4 |
5 | import (
6 | "io/ioutil"
7 | "os"
8 |
9 | "github.com/fuck-capitalism/agora-bridge/fedwiki/parsing"
10 | )
11 |
12 | // (doseq [page pages]
13 | // (let [filename (str path "/" (page :slug) ".md")]
14 | // (spit filename (page :content))
15 | // (let [file (java.io.File. filename)]
16 | // (-> file (.setLastModified (long (page :created))))
17 | // (println (str "wrote " filename)))))))
18 |
19 | func CreateRecord(rec parsing.Record, path string) error {
20 | filename := path + "/" + rec.Slug + ".md"
21 | err := ioutil.WriteFile(filename, []byte(rec.Content), 0644)
22 | if err != nil {
23 | return err
24 | }
25 | err = os.Chtimes(filename, rec.Created, rec.Created)
26 | if err != nil {
27 | return err
28 | }
29 | return nil
30 | }
31 |
32 | func main() {
33 | url := os.Args[1] + "/system/export.json"
34 | path := os.Args[2]
35 | err := os.MkdirAll(path, os.ModePerm)
36 | if err != nil {
37 | panic(err)
38 | }
39 | json, err := parsing.GetJson(url)
40 | if err != nil {
41 | panic(err)
42 | }
43 | records := parsing.JsonToRecords(json)
44 | for _, rec := range records {
45 | err := CreateRecord(rec, path)
46 | if err != nil {
47 | panic(err)
48 | }
49 | }
50 | }
51 |
--------------------------------------------------------------------------------
/fedwiki/parsing/parsing.go:
--------------------------------------------------------------------------------
1 | package parsing
2 |
3 | import (
4 | "encoding/json"
5 | "fmt"
6 | "io/ioutil"
7 | "net/http"
8 | "strings"
9 | "time"
10 |
11 | "github.com/tidwall/gjson"
12 | )
13 |
14 | func MapJson(url string) (map[string]interface{}, error) {
15 | var data map[string]interface{}
16 |
17 | r, err := http.Get(url)
18 | if err != nil {
19 | return nil, err
20 | }
21 | b, err := ioutil.ReadAll(r.Body)
22 | if err != nil {
23 | return nil, err
24 | }
25 | err = json.Unmarshal(b, &data)
26 | if err != nil {
27 | fmt.Println("error in unmarshall")
28 | return nil, err
29 | }
30 | return data, nil
31 | }
32 |
33 | func GetJson(url string) ([]byte, error) {
34 | r, err := http.Get(url)
35 | if err != nil {
36 | return nil, err
37 | }
38 | b, err := ioutil.ReadAll(r.Body)
39 | if err != nil {
40 | return nil, err
41 | }
42 | return b, nil
43 | }
44 |
45 | // (defn json-to-pages [json]
46 | // (let [body (json/read-str json :key-fn keyword)]
47 | // (for [[slug page] body]
48 | // (let [content (page-to-content page)
49 | // created (page-created page)]
50 | // {:slug (name slug) :content content :created created}))))
51 |
52 | // (defn page-to-content [page]
53 | // (let [story (page :story)
54 |
55 | // texts (for [e story] (e :text))
56 | // content (str (clojure.string/join "\n\n" texts) "\n")]
57 | // content))
58 |
59 | // (defn page-created [page]
60 | // ((first (filter (fn [x] (or (= (x :type) "create") (= (x :type) "fork"))) (page :journal))) :date))
61 |
62 | func largest(a []int64) int64 {
63 | max := a[0]
64 | for _, v := range a {
65 | if v > max {
66 | max = v
67 | }
68 | }
69 | return max
70 | }
71 |
72 | type Record struct {
73 | Slug string
74 | Content string
75 | Created time.Time
76 | }
77 |
78 | func contentBody(page gjson.Result) string {
79 | texts := page.Get("story.#.text").Array()
80 | t := make([]string, len(texts))
81 | for i, v := range texts {
82 | t[i] = v.String()
83 | }
84 | content := strings.Join(t, "\n\n")
85 | return content
86 | }
87 |
88 | func JsonToRecords(input []byte) []Record {
89 | body := string(input)
90 | result := gjson.Parse(body)
91 | records := make([]Record, 0)
92 | result.ForEach(func(slug, page gjson.Result) bool {
93 | content := contentBody(page)
94 | vals := page.Get("journal.#.date").Array()
95 | ints := make([]int64, len(vals))
96 | for _, v := range vals {
97 | // ints[i] = v.Int()
98 | ints = append(ints, v.Int())
99 | }
100 | l := largest(ints)
101 | created := time.Unix(0, l*int64(time.Millisecond))
102 | records = append(records, Record{Slug: slug.String(), Content: content, Created: created})
103 | return true // keep iterating
104 | })
105 |
106 | return records
107 |
108 | }
109 |
--------------------------------------------------------------------------------
/fedwiki/parsing/parsing_test.go:
--------------------------------------------------------------------------------
1 | package parsing
2 |
3 | import (
4 | "fmt"
5 | "testing"
6 | )
7 |
8 | func TestMapJson(t *testing.T) {
9 | host := "http://vera.wiki.anagora.org"
10 | url := fmt.Sprintf("%s/system/export.json", host)
11 | data, err := MapJson(url)
12 | if err != nil {
13 | t.Error(err)
14 | }
15 | t.Log(data)
16 |
17 | }
18 |
19 | func TestJsonToRecords(t *testing.T) {
20 | host := "http://vera.wiki.anagora.org"
21 | url := fmt.Sprintf("%s/system/export.json", host)
22 | data, err := GetJson(url)
23 | if err != nil {
24 | t.Error(err)
25 | }
26 | records := JsonToRecords(data)
27 | t.Log(records)
28 | }
29 |
--------------------------------------------------------------------------------
/feed.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # Copyright 2021 Google LLC
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License. # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | import glob
16 | import itertools
17 | import os
18 | import feedparser
19 | import pprint
20 |
21 | DEBUG = True
22 |
23 | HYPOTHESIS_USERS = {
24 | 'flancian': 'flancian',
25 | 'diegodlh': 'diegodlh',
26 | }
27 |
28 | HYPOTHESIS_TAGS = ['[[byzantine emperors]]']
29 |
30 | MASTODON_USERS = {
31 | 'flancian': 'flancian',
32 | 'diegodlh': 'diegodlh',
33 | }
34 |
35 | def get_user_feeds():
36 | feeds = {}
37 | for agora_user, hypothesis_user in HYPOTHESIS_USERS.items():
38 | feeds[agora_user] = feedparser.parse(f'https://hypothes.is/stream.atom?user={hypothesis_user}')
39 | return feeds
40 |
41 | def get_tag_feeds():
42 | import urllib.parse
43 | feeds = []
44 | for tag in HYPOTHESIS_TAGS:
45 | tag = urllib.parse.quote_plus(tag)
46 | url = f'https://hypothes.is/stream.atom?tags={tag}'
47 | print(url)
48 | feeds.append(feedparser.parse(url))
49 | return feeds
50 |
51 | def main():
52 | if DEBUG:
53 | feeds = get_user_feeds()
54 | #for user, feed in feeds.items():
55 | # for item in feed.entries:
56 | # print(f'user: {user}')
57 | # pprint.pprint(item)
58 | # print('***\n')
59 |
60 | feeds = get_tag_feeds()
61 | for item in feeds:
62 | pprint.pprint(item)
63 | print('***\n')
64 | else:
65 | api.update_status(phrase)
66 |
67 |
68 | if __name__ == "__main__":
69 | main()
70 |
--------------------------------------------------------------------------------
/pull.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # Copyright 2021 Google LLC
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 | # an [[agora bridge]], that is, a utility that takes a .yaml file describing a set of [[personal knowledge graphs]] or [[digital gardens]] and pulls them to be consumed by other bridges or an [[agora server]]. [[flancian]]
17 |
18 | import argparse
19 | import glob
20 | import logging
21 | import os
22 | import time
23 | import yaml
24 | from multiprocessing import Pool, JoinableQueue, Process
25 | import subprocess
26 | this_path = os.getcwd()
27 |
28 | # for git commands, in seconds.
29 | TIMEOUT="60"
30 |
31 | def dir_path(string):
32 | if not os.path.isdir(string):
33 | print(f"Trying to create {string}.")
34 | output = subprocess.run(['mkdir', '-p', string], capture_output=True)
35 | if output.stderr:
36 | L.error(output.stderr)
37 | return os.path.abspath(string)
38 |
39 | parser = argparse.ArgumentParser(description='Agora Bridge')
40 | parser.add_argument('--config', dest='config', type=argparse.FileType('r'), required=True, help='The path to a YAML file describing the digital gardens to consume.')
41 | parser.add_argument('--output-dir', dest='output_dir', type=dir_path, required=True, help='The path to a directory where the digital gardens will be stored (one subdirectory per user).')
42 | parser.add_argument('--verbose', dest='verbose', type=bool, default=False, help='Whether to log more information.')
43 | parser.add_argument('--reset', dest='reset', type=bool, default=False, help='Whether to git reset --hard whenever a pull fails.')
44 | parser.add_argument('--reset_only', dest='reset_only', type=bool, default=False, help='Whether do reset --hard instead of pulling.')
45 | parser.add_argument('--delay', dest='delay', type=float, default=0.1, help='Delay between pulls.')
46 | args = parser.parse_args()
47 |
48 | logging.basicConfig()
49 | L = logging.getLogger('pull')
50 | if args.verbose:
51 | L.setLevel(logging.DEBUG)
52 | else:
53 | L.setLevel(logging.INFO)
54 |
55 | Q = JoinableQueue()
56 | WORKERS = 6
57 |
58 | def git_clone(url, path):
59 |
60 | if os.path.exists(path):
61 | L.info(f"{path} exists, won't clone to it.")
62 | return 42
63 |
64 | L.info(f"Running git clone {url} to path {path}")
65 |
66 | try:
67 | output = subprocess.run(['timeout', TIMEOUT, 'git', 'clone', url, path], capture_output=True)
68 | except subprocess.TimeoutExpired as e:
69 | # should not happen since we now call out to 'timeout' command.
70 | L.warning(f"Couldn't clone repo {url}, skipping.")
71 |
72 | L.info(output)
73 | if output.stderr:
74 | L.error(f'Error while cloning {url}: {output.stderr}')
75 |
76 | def git_reset(path):
77 | L.info(f'Trying to git reset --hard')
78 | subprocess.run(['timeout', TIMEOUT, 'git', 'fetch', 'origin'])
79 | branch = subprocess.run(['git', 'symbolic-ref', '--short', 'HEAD'], capture_output=True).stdout.strip()
80 | branch = branch.decode("utf-8")
81 | output = subprocess.run(['timeout', TIMEOUT, 'git', 'reset', '--hard', f'origin/{branch}'], capture_output=True)
82 | L.info(f'output: {output.stdout}')
83 | if output.stderr:
84 | L.error(output.stderr)
85 |
86 |
87 | def git_pull(path):
88 |
89 | if not os.path.exists(path):
90 | L.warning(f"{path} doesn't exist, couldn't pull to it.")
91 | return 42
92 |
93 | try:
94 | os.chdir(path)
95 | except FileNotFoundError:
96 | L.error(f"Couldn't pull in {path} due to the directory being missing, clone must be run first")
97 |
98 | if args.reset_only:
99 | git_reset(path)
100 | return
101 |
102 | # Is there a value to trying pull first? Could we just reset --hard?
103 | L.info(f"Running git pull in path {path}")
104 | try:
105 | # output = subprocess.run(['git', 'pull'], capture_output=True, timeout=10)
106 | output = subprocess.run(['timeout', TIMEOUT, 'git', 'pull'], capture_output=True)
107 | except subprocess.TimeoutExpired as e:
108 | # should not happen since we now call out to 'timeout' command.
109 | L.warning(f"Error while pulling repo in path {path}, skipping.")
110 |
111 | L.info(output.stdout)
112 | if output.stderr:
113 | L.error(f'{path}: {output.stderr}')
114 | if args.reset:
115 | git_reset(path)
116 |
117 | def fedwiki_import(url, path):
118 | os.chdir(this_path)
119 | output = subprocess.run([f"{this_path}/fedwiki.sh", url, path], capture_output=True)
120 | L.info(output.stdout)
121 |
122 | def worker():
123 | while True:
124 | L.debug("Queue size: {}".format(Q.qsize()))
125 | task = Q.get(block=True, timeout=60)
126 | task[0](*task[1:])
127 | Q.task_done()
128 | # if this is a pull, schedule the same task for another run later.
129 | if task[0] == git_pull or task[0] == fedwiki_import:
130 | Q.put(task)
131 | time.sleep(args.delay)
132 |
133 | def main():
134 |
135 | try:
136 | config = yaml.safe_load(args.config)
137 | except yaml.YAMLError as e:
138 | L.error(e)
139 |
140 | for item in config:
141 | path = os.path.join(args.output_dir, item['target'])
142 | if item['format'] == "fedwiki":
143 | Q.put((fedwiki_import, item['url'], path))
144 | continue
145 | # schedule one 'clone' run for every garden, in case this is a new garden (or agora).
146 | Q.put((git_clone, item['url'], path))
147 | # pull it once, it will be queued again later from the worker.
148 | Q.put((git_pull, path))
149 |
150 | processes = []
151 | for i in range(WORKERS):
152 | worker_process = Process(target=worker, daemon=True, name='worker_process_{}'.format(i))
153 | processes.append(worker_process)
154 |
155 | L.info(f"Starting {WORKERS} workers to execute work items.")
156 | for process in processes:
157 | process.start()
158 | Q.join()
159 |
160 | if __name__ == "__main__":
161 | main()
162 |
--------------------------------------------------------------------------------
/push-image.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker push git.coopcloud.tech/flancian/agora-bridge
4 |
--------------------------------------------------------------------------------
/push.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | #
3 | # This simple script tries to autopush (uploads/updates) repositories that this bridge is responsible for updating.
4 | # As of 2023-09-03, this means /stream -- meaning social media activity as dumped by the mastodon and matrix agora bots (twitter is broken due to elon).
5 | #
6 | # As with everything in this directory, if you're running an Agora in bare metal (without using containers/coop cloud) you probably want to run it as a systemd user service.
7 | #
8 | # Based on the equally humble https://gitlab.com/flancia/hedgedoc-export :)
9 | #
10 | # Keep it simple? Or maybe I'm just lazy.
11 | # If this broke you: sorry :)
12 | cd ~/agora/stream
13 |
14 | # YOLO :)
15 | while true; do
16 | git add .
17 | git commit -a -m "stream update"
18 | git push
19 | sleep 60
20 | done
21 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.poetry]
2 | name = "agora-bridge"
3 | version = "0.1.0"
4 | description = "https://anagora.org/agora+bridge"
5 | authors = ["Flancian <0@flancia.org>"]
6 | license = "Apache2"
7 | readme = "README.md"
8 | package-mode = false
9 |
10 | [tool.poetry.dependencies]
11 | python = "^3.8,<3.13"
12 | feedparser = "^6.0.10"
13 | html2text = "^2020.1.16"
14 | python-slugify = "^6.1.2"
15 | PyYAML = "^6.0"
16 | sgmllib3k = "^1.0.0"
17 | text-unidecode = "^1.3"
18 | Flask = "^2.2.2"
19 | jsons = "^1.6.3"
20 | mastodon-py = "^1.5.2"
21 | atproto = "^0.0.48"
22 | flask-sqlalchemy = "^3.1.1"
23 | flask-migrate = "^4.1.0"
24 | flask-login = "^0.6.3"
25 | flask-oauthlib = "^0.9.6"
26 | python-dotenv = "^1.0.1"
27 | sqlalchemy = "^2.0.38"
28 |
29 |
30 | [build-system]
31 | requires = ["poetry-core"]
32 | build-backend = "poetry.core.masonry.api"
33 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | feedparser==6.0.2
2 | html2text==2020.1.16
3 | python-slugify==4.0.1
4 | PyYAML==5.4.1
5 | sgmllib3k==1.0.0
6 | text-unidecode==1.3
7 |
--------------------------------------------------------------------------------
/run-api-dev.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Copyright 2020 Google LLC
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 | echo "If this doesn't work, install poetry and run 'poetry install' as per README.md first."
17 | # This shouldn't be needed but it is when running as a systemd service for some reason.
18 | export PATH=$HOME/.local/bin:${PATH}
19 |
20 | export FLASK_APP=api
21 | export AGORA_CONFIG="DevelopmentConfig"
22 | poetry run flask run -h 0.0.0.0 -p 5018
23 |
--------------------------------------------------------------------------------
/run-dev.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Copyright 2020 Google LLC
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 | echo "If this doesn't work, install poetry and run 'poetry install' as per README.md first."
17 | # This shouldn't be needed but it is when running as a systemd service for some reason.
18 | export PATH=$HOME/.local/bin:${PATH}
19 | poetry run ./pull.py --config ~/agora/sources.yaml --output-dir ~/agora --delay 1 --reset_only True
20 |
--------------------------------------------------------------------------------
/run-docker.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | # This assumes... many things :) See README.md, Dockerfile and ./run-*.sh for more.
3 | # This runs an Agora in an interactive container, mounting 'agora' in your home directory as the Agora root.
4 | # For a supported way to run an Agora on containers, please refer to [[agora recipe]] for [[coop cloud]] in the Agora of Flancia: https://anagora.org/agora-recipe
5 |
6 | docker run -it -p 5018:5018 -v ${HOME}/agora:/home/agora/agora -u agora git.coopcloud.tech/flancian/agora-bridge
7 |
--------------------------------------------------------------------------------
/run-prod.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Copyright 2020 Google LLC
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 | echo "If this doesn't work, install poetry and run 'poetry install' as per README.md first."
17 | # This shouldn't be needed but it is when running as a systemd service for some reason.
18 | export PATH=$HOME/.local/bin:${PATH}
19 |
20 | # Clean up lock files.
21 | ./clean.sh
22 |
23 | # Try to push as well as pull to update social media activity upstream if we have access :)
24 | ./push.sh &
25 |
26 | # Pull for the greater good! :)
27 | poetry run ./pull.py --config ~/agora/sources.yaml --output-dir ~/agora --reset True
28 |
--------------------------------------------------------------------------------
/run-vera.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Copyright 2020 Google LLC
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 | . venv/bin/activate
17 | ./pull.py --config test.yaml --output-dir ~/agora
18 |
--------------------------------------------------------------------------------
/setup.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | (command -v python3 && command -v pip3) || (echo 'Please install Python3 and pip using your OS packaging system. In Debian: sudo apt-get install python3 python3-venv python3-pip' && exit 42)
4 |
5 | python3 -m venv venv &&
6 | . venv/bin/activate &&
7 | pip3 install -r requirements.txt
8 |
9 | echo "see agora-bridge.service and https://anagora.org/systemd for pointers on how to set up a production agora as a system service."
10 |
--------------------------------------------------------------------------------
/sqlite-import/.env.sample:
--------------------------------------------------------------------------------
1 | GARDEN_DIR="/path/to/garden/folder" # Optional
2 | STREAM_DIR="/path/to/stream/folder" # Optional
3 | ROOT_DIR="/path/to/root/folder" # Optional
4 | AGORA_DB="/path/to/agora/database" # Sqlite database file; will be current folder if not set
5 |
--------------------------------------------------------------------------------
/sqlite-import/.gitignore:
--------------------------------------------------------------------------------
1 | /node_modules
2 | garden.db*
3 | agora.db*
4 |
--------------------------------------------------------------------------------
/sqlite-import/README.md:
--------------------------------------------------------------------------------
1 | # This is the project that imports a set of agora flat files and exports a sqlite database
2 |
3 | - Firstly, make sure to `cd sqlite-import` to make this your working directory.
4 | - Make sure to `cp .env.sample .env` and edit your paths.
5 | - `GARDEN_DIR` is path to your agora garden which contains user folders and files
6 | - `STREAM_DIR` is path to the repo containing stream data
7 | - `ROOT_DIR` is path to your communities agora root folder
8 | - `AGORA_DB` path to your agora database sqlite output file
9 | - Run the import with `npm run import`
10 |
--------------------------------------------------------------------------------
/sqlite-import/fixtures/subnode.html:
--------------------------------------------------------------------------------
1 |
2 | #push [[What is the Agora]]?
3 |
4 | -
5 | I've been wanting to write a special node which acts as explainer to the
6 | Agora that should be accessible to the average (?) internet browser, in
7 | the sense of a person browsing the internet.
8 |
9 | -
10 | Node [[agora]] was maybe originally that but it has amassed a lot of
11 | historical content which makes it harder to offer a 'curated' primer
12 | experience.
13 |
14 | -
15 | I've also been thinking about this as a [[WTF]] button which we could
16 | render in red up top, with the milder tooltip 'I don't understand / what
17 | is this place anyway?'
18 |
19 | -
20 | Surely writing this would be an interesting challenge in the first place
21 | :) The Agora is many things, at least to me, and probably to all the
22 | people already in the Agora of Flancia; and it has accreted layers
23 | (meanings) as time goes by.
24 |
25 |
26 |
27 |
--------------------------------------------------------------------------------
/sqlite-import/fixtures/subnode.md:
--------------------------------------------------------------------------------
1 | - Woke up by [[Bodensee]].
2 | - Will miss [[Flancia meet]] today as I temporarily don't have internet connectivity.
3 | - Will try to catch up later with people who were/are around! Apologies for missing it.
4 | - [[Flancia meet]] topics as I expected them
5 | - [[docker]]
6 | - [[agora recipe]] is running on [[coop cloud]], which is nice (this is what is serving link.agor.ai) but it needs some improvements:
7 | - It should be easier to override Agora settings from the coop cloud recipe proper, e.g. Agora name and sources. This could take place in the form of mounting agora.yaml as a config file?
8 | - It should be able to run one or more of the Agora bots which are part of [[agora bridge]] but currently not running for any Agora in agor.ai.
9 | - [[activitypub]]
10 | - Still unsure about whether to implement first-party support in e.g. [[agora server]], or to write a separate activitypub component (where? maybe in bridge?), or to rely on an existing implementation like the canonical golang one which seems quite mature and is geared precisely towards API usage (doesn't offer
11 | - #push [[What is the Agora]]?
12 | - I've been wanting to write a special node which acts as explainer to the Agora that should be accessible to the average (?) internet browser, in the sense of a person browsing the internet.
13 | - Node [[agora]] was maybe originally that but it has amassed a lot of historical content which makes it harder to offer a 'curated' primer experience.
14 | - I've also been thinking about this as a [[WTF]] button which we could render in red up top, with the milder tooltip 'I don't understand / what is this place anyway?'
15 | - Surely writing this would be an interesting challenge in the first place :) The Agora is many things, at least to me, and probably to all the people already in the Agora of Flancia; and it has accreted layers (meanings) as time goes by.
16 | - [[Jerome]] told me about [[Beaufort]] cheese yesterday.
17 |
18 | As I sit here with my laptop (with [[vim]]) and no internet connection, I realize that I don't write here longform as much as I could. I guess the availability of the internet does make it easier for me to get distracted, which granted I see sometimes as a positive (it motivates a form of exploration), but might not be conducive to practicing the skill of writing coherently and consistently for more than a few bullet points in each journal.
19 |
20 | The thought of writing in my blog again (meaning https://flancia.org/mine) has come up a few times recently. I'm unsure; I like the process of writing in my garden, and how everything I write in it automatically shows up in the Agora moments later (at least when I have an internet connection). So maybe what I want is to embrace this space as a blog, and just try to write longer form alongside with my mainly outline-style notes, like other Agora users already do so beautifully.
21 |
22 | - [[todo]] maybe this weekend
23 | - [ ] Upload social media activity gathered by the [[agora bots]] to git repos.
24 | - This one has been in the back burner for a while and doesn't sound very hard.
25 | - It would also remove one of the main reasons to keep making full Agora backups -- which keep causing low disk space events in the Flancia servers.
26 | - All in all good bang-for-the-buck to start the weekend.
27 | - [ ] Fix hedgedoc
28 | - I think hedgedoc is not syncing to the Agora, the syncing process has some bugs at least -- while I'm dealing with 'git autopush' as per the above, it'd be a good time to take another look at this process and see if it can be made incrementally better.
29 | - [ ] Actually autopull [[etherpad]] or [[hedgedoc]] on empty nodes
30 | - I realized the other day this is quite simple; I tried this a few times in the past and ended up disabling autopull of the stoas because it can be disruptive (they tend to steal focus when pulled), but the disruption is really just because they are in the wrong position for empty nodes. Because empty nodes render on a separate template path, it should be straighforward to just embed the right stoa _right there_ in the 'nobody has noded this yet' message, making the stoa onboarding experience much more convenient.
31 | - merge PRs
32 | - [x] Aram's
33 | - [x] vera formatting
34 | - [ ] vera sqlite
35 | - [ ] update journals page
36 | - formatting of the page is all different/weird
37 | - [ ] the pull of flancia.org/mine is broken above because of the parenthesis -- how to fix that?
38 | - [ ] update [[patera]] to something non ancient?
39 | - whatever is running on [[hypatia]]?
40 |
--------------------------------------------------------------------------------
/sqlite-import/lib/files.js:
--------------------------------------------------------------------------------
1 | "use strict";
2 |
3 | import { execSync } from "node:child_process";
4 | import * as parser from "./parser.js";
5 | import fs from "fs";
6 | import path from "node:path";
7 | import { Sha, Subnode } from "./model.js";
8 | const GARDEN_DIR = process.env.GARDEN_DIR;
9 |
10 | await Subnode.sync();
11 | await Sha.sync();
12 |
13 | /**
14 | * Get all directories at a given path and its subdirectories.
15 | *
16 | * @param {string} distPath - The path to the directory.
17 | * @returns {string[]} - An array of directories.
18 | */
19 | function deepGetDirectories(distPath) {
20 | // Get all files and directories at the given path.
21 | const filesAndDirs = fs.readdirSync(distPath);
22 |
23 | // Filter out only the directories.
24 | const directories = filesAndDirs.filter((file) => {
25 | const fullPath = path.join(distPath, file);
26 | return fs.statSync(fullPath).isDirectory();
27 | });
28 |
29 | // Iterate over the directories and get their subdirectories.
30 | const subDirectories = directories.reduce((all, subDir) => {
31 | const subDirPath = path.join(distPath, subDir);
32 | const filesInSubDir = fs.readdirSync(subDirPath);
33 | const subDirPaths = filesInSubDir.map((file) => path.join(subDir, file));
34 | return [...all, ...subDirPaths];
35 | }, []);
36 |
37 | return subDirectories;
38 | }
39 |
40 | /**
41 | * Process the files in a user's folder.
42 | *
43 | * @param {string} user - The name of the user.
44 | */
45 | export async function processFolder(user, folder) {
46 | // Get the path to the user's folder
47 | let user_path = path.join(folder.path, user);
48 |
49 | let current_sha = "";
50 | let last_sha = "";
51 | // Get the last stored SHA for the user
52 | let sha = await Sha.findOne({ where: { user } });
53 | if (sha) last_sha = sha.last_sha.toString().trim();
54 |
55 | // Initialize an empty array for the files
56 | let files = [];
57 | try {
58 | // try to get sha, might not be git folder i.e. streams
59 | // Get the SHA of the current commit
60 | if (folder.type == "garden") {
61 | current_sha = execSync(`git --git-dir=${user_path}/.git rev-parse @`);
62 | } else {
63 | current_sha = execSync(`git --git-dir=${folder.path}/.git rev-parse @`);
64 | }
65 | } catch (e) {
66 | console.log(e.message);
67 | }
68 |
69 | // If there is a last stored SHA
70 | if (last_sha) {
71 | let output = "";
72 | // Get the file names that changed between the last stored SHA and the current commit
73 | if (folder.type == "garden") {
74 | output = execSync(
75 | `git --git-dir=${user_path}/.git diff --name-only ${last_sha} @`
76 | ).toString();
77 | } else {
78 | output = execSync(
79 | `git --git-dir=${folder.path}/.git diff --name-only ${last_sha} @`
80 | ).toString();
81 | }
82 | files = output.split("\n").filter((name) => name != "");
83 | } else {
84 | // Store the current commit SHA for the user
85 | if (!sha) {
86 | await Sha.create({ user, last_sha: current_sha });
87 | } else {
88 | // Update the last stored SHA with the current commit SHA
89 | await Sha.update({ last_sha: current_sha }, { where: { user } });
90 | }
91 |
92 | // Get all files and directories in the user's folder recursively
93 | files = fs.readdirSync(user_path);
94 | files = files.concat(deepGetDirectories(path.join(folder.path, user)));
95 | }
96 |
97 | // chunk file processing to save memory
98 | let chunkSize = 100;
99 | let chunks = [];
100 | let subnodes = [];
101 | for (let i = 0; i < files.length; i += chunkSize) {
102 | chunks.push(files.slice(i, i + chunkSize));
103 | }
104 |
105 | for (const chunk of chunks) {
106 | let files = chunk;
107 | // Process the Markdown files in the user's folder
108 | for (const file of files) {
109 | // Get the file extension
110 | let ext = file.split(".").pop();
111 |
112 | // If the file is not a Markdown file, skip it
113 | if (ext !== "md") continue;
114 |
115 | // Get the title of the file
116 | let title = file
117 | .replace(/\.[^/.]+$/, "")
118 | .split("/")
119 | .pop()
120 | .toLowerCase();
121 |
122 | try {
123 | // Process the file
124 | let subnode = await processFile(
125 | path.join(folder.path, user, file),
126 | title,
127 | user
128 | );
129 | subnodes.push(subnode);
130 | } catch (e) {
131 | console.log(e.message);
132 | }
133 | }
134 | }
135 | await Subnode.bulkCreate(subnodes, {
136 | updateOnDuplicate: ["title", "user", "body", "links", "pushes"],
137 | });
138 | }
139 | /**
140 | * Process a file and insert or update subnode in the database.
141 | * @param {string} file - The path to the file to be processed.
142 | * @param {string} title - The title of the subnode.
143 | * @param {string} user - The user associated with the subnode.
144 | */
145 | async function processFile(file, title, user) {
146 | /** @typedef {Object} Subnode
147 | * @property {string} title - The title of the subnode.
148 | * @property {string} user - The user associated with the subnode.
149 | * @property {string} body - The markdown body of the subnode.
150 | * @property {string} links - The links in the subnode in json format
151 | * @property {string} pushes - The push items in the subnode in json format
152 | * @property {string} updated - The date and time the subnode was updated.
153 | */
154 |
155 | // Read the content of the file and convert it to a string
156 | let body = fs.readFileSync(file).toString();
157 |
158 | // Parse the links from the file body
159 | let links = parseLinks(body);
160 | links = JSON.stringify(links);
161 |
162 | // Parse the pushes from the file body
163 | let pushes = "[]";
164 | if (body.includes("#push")) {
165 | pushes = parser.pushes(body);
166 | pushes = JSON.stringify(pushes);
167 | }
168 |
169 | // Get the current date and time in ISO format
170 | let updated = new Date().toISOString();
171 |
172 | // Create the subnode object
173 | /** @type {Subnode} */
174 | let subnode = { title, user, body, links, pushes, updated };
175 |
176 | // return subnode for bulk insert
177 | return subnode;
178 | }
179 | /**
180 | * Parses links from content.
181 | *
182 | * @param {string} content - The content to parse links from.
183 | * @returns {Array} - An array of parsed links.
184 | */
185 | function parseLinks(content) {
186 | // Regular expression to match links in the format [[link]]
187 | const regexp = /\[\[(.*?)\]\]/g;
188 |
189 | // Get all matches of the regular expression in the content
190 | let matches = Array.from(content.matchAll(regexp));
191 |
192 | // Extract the links from the matches
193 | let links = matches.map((match) => match[1]);
194 |
195 | return links;
196 | }
197 |
--------------------------------------------------------------------------------
/sqlite-import/lib/files.test.js:
--------------------------------------------------------------------------------
1 | import { expect, test } from 'vitest'
2 | test()
--------------------------------------------------------------------------------
/sqlite-import/lib/model.js:
--------------------------------------------------------------------------------
1 | import { Sequelize, DataTypes } from "sequelize";
2 | const AGORA_DB = process.env.AGORA_DB || "./agora.db";
3 | const seq = new Sequelize({
4 | dialect: "sqlite",
5 | storage: AGORA_DB,
6 | logging: console.log,
7 | });
8 |
9 | export const Subnode = seq.define(
10 | "Subnode",
11 | {
12 | title: {
13 | type: DataTypes.STRING,
14 | unique: "user_title",
15 | },
16 | user: {
17 | type: DataTypes.STRING,
18 | unique: "user_title",
19 | },
20 | body: {
21 | type: DataTypes.TEXT,
22 | },
23 | links_to: {
24 | type: DataTypes.TEXT,
25 | },
26 | pushes: {
27 | type: DataTypes.TEXT,
28 | },
29 | },
30 | {
31 | tableName: "subnodes",
32 | uniqueKeys: {
33 | user_title: {
34 | fields: ["user", "title"],
35 | },
36 | },
37 | }
38 | );
39 |
40 | export const Sha = seq.define(
41 | "Sha",
42 | {
43 | user: {
44 | type: DataTypes.STRING,
45 | unique: true,
46 | },
47 | last_sha: {
48 | type: DataTypes.STRING,
49 | },
50 | },
51 | {
52 | tableName: "shas",
53 | }
54 | );
55 |
--------------------------------------------------------------------------------
/sqlite-import/lib/parser.js:
--------------------------------------------------------------------------------
1 | import $ from "jquery"
2 | import showdown from "showdown"
3 | import jsdom from "jsdom"
4 | import TurndownService from "turndown"
5 | TurndownService.prototype.escape = function (text) {
6 | return text
7 | }
8 | let turndownService = new TurndownService()
9 | let converter = new showdown.Converter()
10 | converter.setOption("disableForced4SpacesIndentedSublists", true)
11 | converter.setFlavor("github")
12 | /**
13 | * Parses the given markdown body and returns a jQuery object representing the parsed HTML.
14 | *
15 | * @param {string} body - The markdown body to be parsed.
16 | * @return {object} - The jQuery object representing the parsed HTML.
17 | */
18 | export function parse(body) {
19 | let html = converter.makeHtml(body)
20 | let dom = new jsdom.JSDOM(html)
21 | let jq = $(dom.window)
22 | return jq
23 | }
24 |
25 | /**
26 | * Converts HTML to Markdown.
27 | *
28 | * @param {string} html - The HTML to be converted.
29 | * @return {string} The Markdown representation of the HTML.
30 | */
31 | export function toMarkdown(html) {
32 | return turndownService.turndown(html)
33 | }
34 |
35 | /**
36 | * Retrieves a list of items from the given markdown that contain the string '#push' in the list item.
37 | *
38 | * @param {string} markdown - The markdown to parse and search through.
39 | * @return {Array} An array of objects containing the title and markdown of each list item that matches the search string.
40 | */
41 | export function pushes(markdown) {
42 | /**
43 | * Represents a push item with a title and markdown content.
44 | *
45 | * @typedef {Object} PushItem
46 | * @property {string} title - The title of the push item.
47 | * @property {string} markdown - The markdown of the push item.
48 | */
49 | let jq = parse(markdown)
50 |
51 | /** @type {Array} */
52 | let results = jq("li:contains('#push')").toArray().map(e => {
53 | let regex = /\[\[(?.*?)\]\]/g
54 | let title = regex.exec(e.outerHTML)[1]
55 |
56 | /** @type {PushItem} */
57 | let push = { title, markdown: toMarkdown(e.outerHTML) }
58 | return push
59 | })
60 |
61 | return results
62 | }
63 |
--------------------------------------------------------------------------------
/sqlite-import/lib/parser.test.js:
--------------------------------------------------------------------------------
1 | import { expect, test } from 'vitest'
2 | import { parse, toMarkdown, pushes } from './parser'
3 | import fs from 'fs'
4 | test('parses push link', () => {
5 | let fixture = fs.readFileSync('fixtures/subnode.md').toString()
6 | let results = pushes(fixture)
7 | console.log({ results })
8 | expect(results.length).toBe(1)
9 | expect(results[0].title).toBe("What is the Agora")
10 | })
11 |
12 | test('convert html to markdown', () => {
13 | let html = fs.readFileSync('fixtures/subnode.html').toString()
14 | let markdown = toMarkdown(html)
15 | //smoketest to ensure proper formatting, we don't need to retest library export
16 | console.log(markdown)
17 | })
--------------------------------------------------------------------------------
/sqlite-import/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "sqlite-import",
3 | "version": "0.1.0",
4 | "description": "import folders into sqlite database",
5 | "main": "sqlite-import.js",
6 | "scripts": {
7 | "test": "vitest",
8 | "import": "node sqlite-import.js"
9 | },
10 | "author": "github.com/codegod100",
11 | "license": "MIT",
12 | "type": "module",
13 | "dependencies": {
14 | "jquery": "^3.7.1",
15 | "jsdom": "^22.1.0",
16 | "sequelize": "^6.32.1",
17 | "showdown": "^2.1.0",
18 | "sqlite3": "^5.1.6",
19 | "turndown": "^7.1.2",
20 | "vitest": "^0.34.3"
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/sqlite-import/sqlite-import.js:
--------------------------------------------------------------------------------
1 | import { processFolder } from "./lib/files.js";
2 | import fs from "fs";
3 |
4 | const GARDEN_DIR = process.env.GARDEN_DIR;
5 | const STREAM_DIR = process.env.STREAM_DIR;
6 | const ROOT_DIR = process.env.ROOT_DIR;
7 |
8 | let folders = [
9 | { type: "garden", path: GARDEN_DIR },
10 | { type: "stream", path: STREAM_DIR },
11 | { type: "root", path: ROOT_DIR },
12 | ];
13 |
14 | for (const folder of folders) {
15 | if (!folder.path) {
16 | continue;
17 | }
18 | let users = fs.readdirSync(folder.path);
19 | for (let user of users) {
20 | if (folder.type == "root") {
21 | let path = folder.path.split("/");
22 | user = path.pop();
23 | folder.path = path.join("/");
24 | }
25 | await processFolder(user, folder);
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/test.yaml:
--------------------------------------------------------------------------------
1 |
2 | # - [[flancian]]
3 | # - this file is meant to be consumed by [[agora bridge]]: https://github.com/flancian/agora-bridge.
4 | # - for a specification of this format, please consult https://anagora.org/agora-bridge.
5 | # - for instructions to set up a fully working [[agora]] using this and other repos, please consult https://anagora.org/agora-install or the root repository: https://github.com/flancian/agora.
6 |
7 |
8 | - target: garden/vera.wiki.anagora.org
9 | url: http://vera.wiki.anagora.org
10 | format: fedwiki
11 |
12 |
--------------------------------------------------------------------------------
/update.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Copyright 2022 Google LLC
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 | # agora-bridge runs as a user systemd service
17 | systemctl --user restart agora-bridge
18 |
--------------------------------------------------------------------------------