├── .bumpversion.cfg
├── .gitignore
├── .travis.yml
├── Makefile
├── Pipfile
├── Pipfile.lock
├── README.md
├── config-example.json
├── notion_sync
├── __init__.py
└── __main__.py
├── scripts
├── bump-version-from-commit.sh
└── push-and-publish-if-changed.sh
└── setup.py
/.bumpversion.cfg:
--------------------------------------------------------------------------------
1 | [bumpversion]
2 | current_version = 0.3.2
3 | commit = True
4 | tag = True
5 |
6 | [bumpversion:file:setup.py]
7 |
8 | [bumpversion:file:notion_sync/__init__.py]
9 |
10 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | pip-wheel-metadata/
24 | share/python-wheels/
25 | *.egg-info/
26 | .installed.cfg
27 | *.egg
28 | MANIFEST
29 |
30 | # PyInstaller
31 | # Usually these files are written by a python script from a template
32 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
33 | *.manifest
34 | *.spec
35 |
36 | # Installer logs
37 | pip-log.txt
38 | pip-delete-this-directory.txt
39 |
40 | # Unit test / coverage reports
41 | htmlcov/
42 | .tox/
43 | .nox/
44 | .coverage
45 | .coverage.*
46 | .cache
47 | nosetests.xml
48 | coverage.xml
49 | *.cover
50 | .hypothesis/
51 | .pytest_cache/
52 |
53 | # Translations
54 | *.mo
55 | *.pot
56 |
57 | # Django stuff:
58 | *.log
59 | local_settings.py
60 | db.sqlite3
61 |
62 | # Flask stuff:
63 | instance/
64 | .webassets-cache
65 |
66 | # Scrapy stuff:
67 | .scrapy
68 |
69 | # Sphinx documentation
70 | docs/_build/
71 |
72 | # PyBuilder
73 | target/
74 |
75 | # Jupyter Notebook
76 | .ipynb_checkpoints
77 |
78 | # IPython
79 | profile_default/
80 | ipython_config.py
81 |
82 | # pyenv
83 | .python-version
84 |
85 | # pipenv
86 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
87 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
88 | # having no cross-platform support, pipenv may install dependencies that don’t work, or not
89 | # install all needed dependencies.
90 | #Pipfile.lock
91 |
92 | # celery beat schedule file
93 | celerybeat-schedule
94 |
95 | # SageMath parsed files
96 | *.sage.py
97 |
98 | # Environments
99 | .env
100 | .venv
101 | env/
102 | venv/
103 | ENV/
104 | env.bak/
105 | venv.bak/
106 |
107 | # Spyder project settings
108 | .spyderproject
109 | .spyproject
110 |
111 | # Rope project settings
112 | .ropeproject
113 |
114 | # mkdocs documentation
115 | /site
116 |
117 | # mypy
118 | .mypy_cache/
119 | .dmypy.json
120 | dmypy.json
121 |
122 | # Pyre type checker
123 | .pyre/
124 |
125 | # secrets
126 | config.json
127 |
128 | # tool output
129 | out
130 |
131 | # build intermediates
132 | requirements.txt
133 | requirements-dev.txt
134 |
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | dist: xenial
2 | language: python
3 | python:
4 | - "3.7"
5 | env:
6 | global:
7 | - PIPENV_VENV_IN_PROJECT=1
8 | - PIPENV_IGNORE_VIRTUALENVS=1
9 | install: make setup
10 | script: make travis
11 | deploy:
12 | skip_cleanup: true
13 | provider: script
14 | script: make travis-deploy
15 | on:
16 | branch: master
17 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 |
2 | test: lint-check
3 |
4 | lint-check:
5 | pipenv run black --check *.py **/*.py
6 |
7 | lint:
8 | pipenv run black *.py **/*.py
9 |
10 | setup:
11 | pip install pipenv
12 | pipenv install --dev --three
13 |
14 | clean:
15 | rm -rf dist
16 |
17 | #---- Packaging ----
18 |
19 | package: requirements.txt clean *.py */*.py
20 | pipenv run python setup.py sdist bdist_wheel
21 |
22 | requirements.txt: Pipfile Pipfile.lock
23 | pipenv run pipenv_to_requirements
24 |
25 | #---- Publishing ----
26 |
27 | check-package: package
28 | pipenv run twine check dist/*
29 |
30 | publish-test: check-package
31 | pipenv run twine upload --repository-url https://test.pypi.org/legacy/ dist/*
32 |
33 | publish-prod: check-package
34 | pipenv run twine upload dist/*
35 |
36 | publish-travis: check-package
37 | pipenv run twine upload dist/* -u $$PYPI_USERNAME -p $$PYPI_PASSWORD
38 |
39 | travis:
40 | make test
41 |
42 | travis-deploy:
43 | git checkout master ;
44 | ./scripts/bump-version-from-commit.sh ;
45 | ./scripts/push-and-publish-if-changed.sh ;
46 |
--------------------------------------------------------------------------------
/Pipfile:
--------------------------------------------------------------------------------
1 | [[source]]
2 | name = "pypi"
3 | url = "https://pypi.org/simple"
4 | verify_ssl = true
5 |
6 | [dev-packages]
7 | black = {version = "*",editable = true}
8 | bumpversion = {version = "*",editable = true}
9 | twine = {version = "*",editable = true}
10 | pipenv-to-requirements = "*"
11 |
12 | [packages]
13 | notion = {version = "*",editable = true}
14 | urllib3 = {version = ">=1.24.2",editable = true}
15 |
16 | [requires]
17 | python_version = "3.7"
18 |
19 | [pipenv]
20 | allow_prereleases = true
21 |
--------------------------------------------------------------------------------
/Pipfile.lock:
--------------------------------------------------------------------------------
1 | {
2 | "_meta": {
3 | "hash": {
4 | "sha256": "695a58501b6fca3728d0b5b8f08560cacf7a5690c7c9245452ea2730a0c87685"
5 | },
6 | "pipfile-spec": 6,
7 | "requires": {
8 | "python_version": "3.7"
9 | },
10 | "sources": [
11 | {
12 | "name": "pypi",
13 | "url": "https://pypi.org/simple",
14 | "verify_ssl": true
15 | }
16 | ]
17 | },
18 | "default": {
19 | "beautifulsoup4": {
20 | "hashes": [
21 | "sha256:034740f6cb549b4e932ae1ab975581e6103ac8f942200a0e9759065984391858",
22 | "sha256:945065979fb8529dd2f37dbb58f00b661bdbcbebf954f93b32fdf5263ef35348",
23 | "sha256:ba6d5c59906a85ac23dadfe5c88deaf3e179ef565f4898671253e50a78680718"
24 | ],
25 | "version": "==4.7.1"
26 | },
27 | "bs4": {
28 | "hashes": [
29 | "sha256:36ecea1fd7cc5c0c6e4a1ff075df26d50da647b75376626cc186e2212886dd3a"
30 | ],
31 | "version": "==0.0.1"
32 | },
33 | "cached-property": {
34 | "hashes": [
35 | "sha256:3a026f1a54135677e7da5ce819b0c690f156f37976f3e30c5430740725203d7f",
36 | "sha256:9217a59f14a5682da7c4b8829deadbfc194ac22e9908ccf7c8820234e80a1504"
37 | ],
38 | "version": "==1.5.1"
39 | },
40 | "certifi": {
41 | "hashes": [
42 | "sha256:59b7658e26ca9c7339e00f8f4636cdfe59d34fa37b9b04f6f9e9926b3cece1a5",
43 | "sha256:b26104d6835d1f5e49452a26eb2ff87fe7090b89dfcaee5ea2212697e1e1d7ae"
44 | ],
45 | "version": "==2019.3.9"
46 | },
47 | "chardet": {
48 | "hashes": [
49 | "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
50 | "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
51 | ],
52 | "version": "==3.0.4"
53 | },
54 | "commonmark": {
55 | "hashes": [
56 | "sha256:14c3df31e8c9c463377e287b2a1eefaa6019ab97b22dad36e2f32be59d61d68d",
57 | "sha256:867fc5db078ede373ab811e16b6789e9d033b15ccd7296f370ca52d1ee792ce0"
58 | ],
59 | "version": "==0.9.0"
60 | },
61 | "dictdiffer": {
62 | "hashes": [
63 | "sha256:97cf4ef98ebc1acf737074aed41e379cf48ab5ff528c92109dfb8e2e619e6809",
64 | "sha256:b3ad476fc9cca60302b52c50e1839342d2092aeaba586d69cbf9249f87f52463"
65 | ],
66 | "version": "==0.8.0"
67 | },
68 | "future": {
69 | "hashes": [
70 | "sha256:67045236dcfd6816dc439556d009594abf643e5eb48992e36beac09c2ca659b8"
71 | ],
72 | "version": "==0.17.1"
73 | },
74 | "idna": {
75 | "hashes": [
76 | "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407",
77 | "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c"
78 | ],
79 | "version": "==2.8"
80 | },
81 | "notion": {
82 | "hashes": [
83 | "sha256:22c148e02199ec0cb9cf911a4fec925394fb99a60fb2ef41a5801c470b5d40bd",
84 | "sha256:e95f7d984ff0f44e77bc3e450f6f481f7e7a267a7b68b5544c560ce971e2f313"
85 | ],
86 | "index": "pypi",
87 | "version": "==0.0.20"
88 | },
89 | "python-slugify": {
90 | "hashes": [
91 | "sha256:57163ffb345c7e26063435a27add1feae67fa821f1ef4b2f292c25847575d758"
92 | ],
93 | "version": "==3.0.2"
94 | },
95 | "pytz": {
96 | "hashes": [
97 | "sha256:303879e36b721603cc54604edcac9d20401bdbe31e1e4fdee5b9f98d5d31dfda",
98 | "sha256:d747dd3d23d77ef44c6a3526e274af6efeb0a6f1afd5a69ba4d5be4098c8e141"
99 | ],
100 | "version": "==2019.1"
101 | },
102 | "requests": {
103 | "hashes": [
104 | "sha256:502a824f31acdacb3a35b6690b5fbf0bc41d63a24a45c4004352b0242707598e",
105 | "sha256:7bf2a778576d825600030a110f3c0e3e8edc51dfaafe1c146e39a2027784957b"
106 | ],
107 | "version": "==2.21.0"
108 | },
109 | "soupsieve": {
110 | "hashes": [
111 | "sha256:6898e82ecb03772a0d82bd0d0a10c0d6dcc342f77e0701d0ec4a8271be465ece",
112 | "sha256:b20eff5e564529711544066d7dc0f7661df41232ae263619dede5059799cdfca"
113 | ],
114 | "version": "==1.9.1"
115 | },
116 | "text-unidecode": {
117 | "hashes": [
118 | "sha256:5a1375bb2ba7968740508ae38d92e1f889a0832913cb1c447d5e2046061a396d",
119 | "sha256:801e38bd550b943563660a91de8d4b6fa5df60a542be9093f7abf819f86050cc"
120 | ],
121 | "version": "==1.2"
122 | },
123 | "tzlocal": {
124 | "hashes": [
125 | "sha256:27d58a0958dc884d208cdaf45ef5892bf2a57d21d9611f2ac45e51f1973e8cab",
126 | "sha256:f124f198e5d86b3538b140883472beaa82d2c0efc0cd9694dfdbe39079e22e69"
127 | ],
128 | "version": "==2.0.0b1"
129 | },
130 | "urllib3": {
131 | "hashes": [
132 | "sha256:2393a695cd12afedd0dcb26fe5d50d0cf248e5a66f75dbd89a3d4eb333a61af4",
133 | "sha256:a637e5fae88995b256e3409dc4d52c2e2e0ba32c42a6365fee8bbd2238de3cfb"
134 | ],
135 | "index": "pypi",
136 | "version": "==1.24.3"
137 | }
138 | },
139 | "develop": {
140 | "appdirs": {
141 | "hashes": [
142 | "sha256:9e5896d1372858f8dd3344faf4e5014d21849c756c8d5701f78f8a103b372d92",
143 | "sha256:d8b24664561d0d34ddfaec54636d502d7cea6e29c3eaf68f3df6180863e2166e"
144 | ],
145 | "version": "==1.4.3"
146 | },
147 | "attrs": {
148 | "hashes": [
149 | "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79",
150 | "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399"
151 | ],
152 | "version": "==19.1.0"
153 | },
154 | "black": {
155 | "hashes": [
156 | "sha256:09a9dcb7c46ed496a9850b76e4e825d6049ecd38b611f1224857a79bd985a8cf",
157 | "sha256:68950ffd4d9169716bcb8719a56c07a2f4485354fec061cdd5910aa07369731c"
158 | ],
159 | "index": "pypi",
160 | "version": "==19.3b0"
161 | },
162 | "bleach": {
163 | "hashes": [
164 | "sha256:213336e49e102af26d9cde77dd2d0397afabc5a6bf2fed985dc35b5d1e285a16",
165 | "sha256:3fdf7f77adcf649c9911387df51254b813185e32b2c6619f690b593a617e19fa"
166 | ],
167 | "version": "==3.1.0"
168 | },
169 | "bumpversion": {
170 | "hashes": [
171 | "sha256:6744c873dd7aafc24453d8b6a1a0d6d109faf63cd0cd19cb78fd46e74932c77e",
172 | "sha256:6753d9ff3552013e2130f7bc03c1007e24473b4835952679653fb132367bdd57"
173 | ],
174 | "index": "pypi",
175 | "version": "==0.5.3"
176 | },
177 | "certifi": {
178 | "hashes": [
179 | "sha256:59b7658e26ca9c7339e00f8f4636cdfe59d34fa37b9b04f6f9e9926b3cece1a5",
180 | "sha256:b26104d6835d1f5e49452a26eb2ff87fe7090b89dfcaee5ea2212697e1e1d7ae"
181 | ],
182 | "version": "==2019.3.9"
183 | },
184 | "chardet": {
185 | "hashes": [
186 | "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
187 | "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
188 | ],
189 | "version": "==3.0.4"
190 | },
191 | "click": {
192 | "hashes": [
193 | "sha256:2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13",
194 | "sha256:5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7"
195 | ],
196 | "version": "==7.0"
197 | },
198 | "docutils": {
199 | "hashes": [
200 | "sha256:02aec4bd92ab067f6ff27a38a38a41173bf01bed8f89157768c1573f53e474a6",
201 | "sha256:51e64ef2ebfb29cae1faa133b3710143496eca21c530f3f71424d77687764274",
202 | "sha256:7a4bd47eaf6596e1295ecb11361139febe29b084a87bf005bf899f9a42edc3c6"
203 | ],
204 | "version": "==0.14"
205 | },
206 | "idna": {
207 | "hashes": [
208 | "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407",
209 | "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c"
210 | ],
211 | "version": "==2.8"
212 | },
213 | "pbr": {
214 | "hashes": [
215 | "sha256:6901995b9b686cb90cceba67a0f6d4d14ae003cd59bc12beb61549bdfbe3bc89",
216 | "sha256:d950c64aeea5456bbd147468382a5bb77fe692c13c9f00f0219814ce5b642755"
217 | ],
218 | "version": "==5.2.0"
219 | },
220 | "pipenv": {
221 | "hashes": [
222 | "sha256:56ad5f5cb48f1e58878e14525a6e3129d4306049cb76d2f6a3e95df0d5fc6330",
223 | "sha256:7df8e33a2387de6f537836f48ac6fcd94eda6ed9ba3d5e3fd52e35b5bc7ff49e",
224 | "sha256:a673e606e8452185e9817a987572b55360f4d28b50831ef3b42ac3cab3fee846"
225 | ],
226 | "version": "==2018.11.26"
227 | },
228 | "pipenv-to-requirements": {
229 | "hashes": [
230 | "sha256:04dc5b1fedfea18858efe3f28d62ed70ac0cd04c34bc32a8c3531384d11be460",
231 | "sha256:44bfefa3f6ded62e3bdcc5a6bb4de7a821b2658f821c1126f6abc3bffecb2c1e"
232 | ],
233 | "index": "pypi",
234 | "version": "==0.7.1"
235 | },
236 | "pkginfo": {
237 | "hashes": [
238 | "sha256:7424f2c8511c186cd5424bbf31045b77435b37a8d604990b79d4e70d741148bb",
239 | "sha256:a6d9e40ca61ad3ebd0b72fbadd4fba16e4c0e4df0428c041e01e06eb6ee71f32"
240 | ],
241 | "version": "==1.5.0.1"
242 | },
243 | "pygments": {
244 | "hashes": [
245 | "sha256:5ffada19f6203563680669ee7f53b64dabbeb100eb51b61996085e99c03b284a",
246 | "sha256:e8218dd399a61674745138520d0d4cf2621d7e032439341bc3f647bff125818d"
247 | ],
248 | "version": "==2.3.1"
249 | },
250 | "readme-renderer": {
251 | "hashes": [
252 | "sha256:bb16f55b259f27f75f640acf5e00cf897845a8b3e4731b5c1a436e4b8529202f",
253 | "sha256:c8532b79afc0375a85f10433eca157d6b50f7d6990f337fa498c96cd4bfc203d"
254 | ],
255 | "version": "==24.0"
256 | },
257 | "requests": {
258 | "hashes": [
259 | "sha256:502a824f31acdacb3a35b6690b5fbf0bc41d63a24a45c4004352b0242707598e",
260 | "sha256:7bf2a778576d825600030a110f3c0e3e8edc51dfaafe1c146e39a2027784957b"
261 | ],
262 | "version": "==2.21.0"
263 | },
264 | "requests-toolbelt": {
265 | "hashes": [
266 | "sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f",
267 | "sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0"
268 | ],
269 | "version": "==0.9.1"
270 | },
271 | "six": {
272 | "hashes": [
273 | "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
274 | "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
275 | ],
276 | "version": "==1.12.0"
277 | },
278 | "toml": {
279 | "hashes": [
280 | "sha256:229f81c57791a41d65e399fc06bf0848bab550a9dfd5ed66df18ce5f05e73d5c",
281 | "sha256:235682dd292d5899d361a811df37e04a8828a5b1da3115886b73cf81ebc9100e"
282 | ],
283 | "version": "==0.10.0"
284 | },
285 | "tqdm": {
286 | "hashes": [
287 | "sha256:d385c95361699e5cf7622485d9b9eae2d4864b21cd5a2374a9c381ffed701021",
288 | "sha256:e22977e3ebe961f72362f6ddfb9197cc531c9737aaf5f607ef09740c849ecd05"
289 | ],
290 | "version": "==4.31.1"
291 | },
292 | "twine": {
293 | "hashes": [
294 | "sha256:0fb0bfa3df4f62076cab5def36b1a71a2e4acb4d1fa5c97475b048117b1a6446",
295 | "sha256:d6c29c933ecfc74e9b1d9fa13aa1f87c5d5770e119f5a4ce032092f0ff5b14dc"
296 | ],
297 | "index": "pypi",
298 | "version": "==1.13.0"
299 | },
300 | "urllib3": {
301 | "hashes": [
302 | "sha256:2393a695cd12afedd0dcb26fe5d50d0cf248e5a66f75dbd89a3d4eb333a61af4",
303 | "sha256:a637e5fae88995b256e3409dc4d52c2e2e0ba32c42a6365fee8bbd2238de3cfb"
304 | ],
305 | "index": "pypi",
306 | "version": "==1.24.3"
307 | },
308 | "virtualenv": {
309 | "hashes": [
310 | "sha256:15ee248d13e4001a691d9583948ad3947bcb8a289775102e4c4aa98a8b7a6d73",
311 | "sha256:bfc98bb9b42a3029ee41b96dc00a34c2f254cbf7716bec824477b2c82741a5c4"
312 | ],
313 | "version": "==16.5.0"
314 | },
315 | "virtualenv-clone": {
316 | "hashes": [
317 | "sha256:532f789a5c88adf339506e3ca03326f20ee82fd08ee5586b44dc859b5b4468c5",
318 | "sha256:c88ae171a11b087ea2513f260cdac9232461d8e9369bcd1dc143fc399d220557"
319 | ],
320 | "version": "==0.5.3"
321 | },
322 | "webencodings": {
323 | "hashes": [
324 | "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78",
325 | "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"
326 | ],
327 | "version": "==0.5.1"
328 | }
329 | }
330 | }
331 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # notion-sync
2 |
3 | ```
4 | pip install notion-sync
5 | ```
6 |
7 |  
8 |
9 | A tool to sync a notion collection to markdown files
10 |
11 | ## Setting up Notion
12 |
13 | - Create a collection table view
14 | - Add columns for
15 | - Publish Date (type date)
16 | - Status (type select, with Published as an option)
17 | - Tags (type multi_select)
18 |
19 | ## Setting up you syncing directory
20 |
21 | - copy `config-example.json` to `config.json`
22 | - set token_v2 to the value of your token_v2 token on a logged-in session of notion
23 | - set sync_root to the url of a collection view page (the database-as-rows page)
24 |
25 | In the same directory as your config file, run:
26 |
27 | ```
28 | notion_sync
29 | ```
30 |
--------------------------------------------------------------------------------
/config-example.json:
--------------------------------------------------------------------------------
1 | {
2 | "token_v2": "get this from your cookies",
3 | "sync_root": "This should be a collection (e.g. a database table)",
4 | "destination": "./out"
5 | }
6 |
--------------------------------------------------------------------------------
/notion_sync/__init__.py:
--------------------------------------------------------------------------------
1 | __version__ = "0.3.2"
2 |
--------------------------------------------------------------------------------
/notion_sync/__main__.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | from notion.client import NotionClient
4 | from notion.markdown import notion_to_markdown
5 | import notion
6 | import os
7 | import stat
8 | import sys
9 | import errno
10 | import asyncio
11 | import json
12 | from shutil import rmtree
13 | from datetime import date
14 | from itertools import chain
15 | import argparse
16 |
17 |
18 | def is_file(filepath):
19 | if os.path.exists(filepath):
20 | filestat = os.stat(filepath)
21 | return filestat is not None and not stat.S_ISDIR(filestat.st_mode)
22 | return False
23 |
24 |
25 | def rm_file(filepath):
26 | if is_file(filepath):
27 | os.remove(filepath)
28 |
29 |
30 | def load_config_file(config_json_path):
31 | if not is_file(config_json_path):
32 | sys.exit("config file '%s' not found" % config_json_path)
33 |
34 | with open(config_json_path) as config_file:
35 | config = json.load(config_file)
36 | client = NotionClient(token_v2=config["token_v2"])
37 | return (
38 | client,
39 | client.get_collection_view(config["sync_root"]),
40 | config["destination"],
41 | )
42 |
43 |
44 | def get_post_meta(row):
45 | tags = chain(
46 | *[
47 | row.get_property(entry["id"])
48 | for entry in row.schema
49 | if (entry["name"] == "Tags" and entry["type"] == "multi_select")
50 | ]
51 | )
52 | return "---\ntitle: %s\ntags: %s\n---" % (
53 | get_decorated_row_title(row),
54 | ", ".join(tags),
55 | )
56 |
57 |
58 | def get_row_publish_date(row):
59 | publish_dates = [
60 | row.get_property(entry["id"])
61 | for entry in row.schema
62 | if (entry["name"] == "Publish Date" and entry["type"] == "date")
63 | ]
64 | dates = [
65 | publish_date.start for publish_date in publish_dates if publish_date is not None
66 | ]
67 | return None if len(dates) == 0 else max(dates)
68 |
69 |
70 | def set_row_status(row, value):
71 | for entry in row.schema:
72 | if entry["name"] == "Status":
73 | row.set_property(entry["id"], value)
74 |
75 |
76 | def set_row_published_pending(row):
77 | """
78 | Sets a row's status to either Published, Pending, or None based on
79 | if it has an assigned publish date
80 | """
81 | publish_date = get_row_publish_date(row)
82 | if publish_date is None:
83 | set_row_status(row, "Unpublished")
84 | elif not is_row_status(row, "Incomplete"):
85 | if publish_date > date.today():
86 | set_row_status(row, "Pending")
87 | else:
88 | set_row_status(row, "Published")
89 |
90 |
91 | def is_row_status(row, row_status):
92 | return any(
93 | [
94 | row.get_property(entry["id"]) == row_status
95 | for entry in row.schema
96 | if entry["name"] == "Status"
97 | ]
98 | )
99 |
100 |
101 | def get_row_link_slug(row):
102 | publish_date = get_row_publish_date(row)
103 |
104 | publish_date_slug = (
105 | "0000-00-00"
106 | if publish_date is None
107 | else "%04d-%02d-%02d"
108 | % (publish_date.year, publish_date.month, publish_date.day)
109 | )
110 |
111 | return "-".join([publish_date_slug] + row.title.split(" "))
112 |
113 |
114 | class CollectionGeneratorContext:
115 | def __init__(self, collection_file_sync):
116 | self.collection_file_sync = collection_file_sync
117 |
118 | def contains_row(self, block):
119 | # Explicitly opt not to support embedded subpages
120 | # (e.g. subpages that are _indirect_ descendents of the collection)
121 | is_block_in_root_collection = (
122 | block.collection.id == self.collection_file_sync.collection.id
123 | )
124 | return (
125 | is_block_in_root_collection
126 | and isinstance(block, notion.collection.CollectionRowBlock)
127 | and self.collection_file_sync.is_row_published(block)
128 | )
129 |
130 | def get_block_url(self, block):
131 | return "/posts/" + get_row_link_slug(block)
132 |
133 |
134 | def get_decorated_row_title(block):
135 | return block.title if block.icon is None else "%s %s" % (block.icon, block.title)
136 |
137 |
138 | class MarkdownGenerator:
139 | def __init__(self, context):
140 | self.context = context
141 |
142 | def get_markdown_from_page(self, page):
143 | if not self.context.contains_row(page):
144 | return None
145 | return self.get_markdown_from_block(page, is_page_root=True)
146 |
147 | def get_markdown_from_block(self, block, is_page_root=False):
148 | # print('traverse', type(block), block)
149 | if isinstance(block, notion.collection.CollectionRowBlock):
150 | if is_page_root:
151 | # if we are on the page root, traverse the subpage
152 | return "\n\n".join(
153 | [
154 | md
155 | for md in [
156 | self.get_markdown_from_block(child)
157 | for child in block.children
158 | ]
159 | if md is not None
160 | ]
161 | )
162 | else:
163 | # otherwise, just link to the page
164 | contains_row = self.context.contains_row(block)
165 | if not contains_row:
166 | return ""
167 |
168 | block_url = self.context.get_block_url(block)
169 |
170 | return "[%s](%s)" % (get_decorated_row_title(block), block_url)
171 |
172 | elif isinstance(block, notion.block.TextBlock):
173 | return block.title
174 | elif isinstance(block, notion.block.HeaderBlock):
175 | return "# " + block.title
176 | elif isinstance(block, notion.block.SubheaderBlock):
177 | return "## " + block.title
178 | elif block.type == "sub_sub_header":
179 | return "### " + notion_to_markdown(
180 | block._get_record_data()["properties"]["title"]
181 | )
182 | elif isinstance(block, notion.block.BulletedListBlock):
183 | row = "- " + block.title
184 | subrows = self.indent_children(block.children)
185 | return row + "\n" + subrows
186 | elif isinstance(block, notion.block.NumberedListBlock):
187 | row = "1. " + block.title
188 | subrows = self.indent_children(block.children)
189 | return row + "\n" + subrows
190 | elif isinstance(block, notion.block.ColumnListBlock):
191 | subsections = "\n".join(
192 | [self.get_markdown_from_block(child) for child in block.children]
193 | )
194 | return (
195 | ''
196 | % subsections
197 | )
198 | elif isinstance(block, notion.block.ColumnBlock):
199 | return '' % (
200 | block.column_ratio,
201 | "\n\n".join(
202 | self.get_markdown_from_block(child) for child in block.children
203 | ),
204 | )
205 | elif isinstance(block, notion.block.ImageBlock):
206 | raw_source = notion_to_markdown(
207 | block._get_record_data()["properties"]["source"]
208 | )
209 | return "" % (
210 | block.caption if block.caption != None else "",
211 | block.source,
212 | )
213 | elif isinstance(block, notion.block.CodeBlock):
214 | code_source = block.title
215 | code_language = block.language if block.language != "Plain Text" else ""
216 | return "```%s\n%s\n```" % (code_language, code_source)
217 | elif isinstance(block, notion.block.QuoteBlock):
218 | quote_body = block.title
219 | return "> " + "\n> ".join(quote_body.split("\n"))
220 | elif isinstance(block, notion.block.TodoBlock):
221 | row = "[%s] %s" % ("x" if block.checked else " ", block.title)
222 | subrows = self.indent_children(block.children)
223 | return row + "\n" + subrows
224 | elif isinstance(block, notion.block.DividerBlock):
225 | return "---\n"
226 | elif isinstance(block, notion.block.CollectionViewBlock):
227 | # TODO handle these if they are tables
228 | pass
229 | else:
230 | print("encountered unknown block type")
231 | print(type(block), block, block._get_record_data())
232 | return str(block)
233 |
234 | def indent_children(self, children):
235 | return "".join(
236 | [
237 | " " + md.replace("\n", "\n ")
238 | for md in [self.get_markdown_from_block(child) for child in children]
239 | if md is not None
240 | ]
241 | )
242 |
243 |
244 | class RowSync:
245 | """
246 | Synchronizes row's content to a markdown file
247 | """
248 |
249 | def __init__(self, root_dir, row, markdown_generator):
250 | self.root_dir = root_dir
251 | self.row = row
252 | self.markdown_generator = markdown_generator
253 | self.filename = self._get_sync_filename()
254 |
255 | def start_watching(self):
256 | self.callback_id = self.row.add_callback(self.update_file)
257 | self.update_file()
258 |
259 | def update_file(self):
260 | # Make sure the data on the row is consistent
261 | set_row_published_pending(self.row)
262 |
263 | if self.filename != self._get_sync_filename():
264 | rm_file(self.filename)
265 | self.filename = self._get_sync_filename()
266 |
267 | md_content = self.markdown_generator.get_markdown_from_page(self.row)
268 |
269 | if md_content != None:
270 | with open(self.filename, "w") as file_handle:
271 | meta = get_post_meta(self.row)
272 | file_handle.write(meta + "\n\n" + md_content)
273 | else:
274 | rm_file(self.filename)
275 |
276 | def stop_watching_and_remove(self):
277 | self.row.remove_callbacks(self.callback_id)
278 | os.remove(self.filename)
279 |
280 | def _get_sync_filename(self):
281 | # TODO format based on date of the entry
282 | return "%s/%s.md" % (self.root_dir, get_row_link_slug(self.row))
283 |
284 |
285 | class CollectionFileSync:
286 | """
287 | Synchronizes a collection's rows to individual markdown files
288 |
289 | Tracks row addition / removal
290 | """
291 |
292 | def __init__(self, collection, root_dir, watch=False, draft=False):
293 | self.collection = collection
294 | self.root_dir = root_dir
295 | self.markdown_generator = MarkdownGenerator(CollectionGeneratorContext(self))
296 | self.watch = watch
297 | self.draft = draft
298 |
299 | self.known_rows = dict()
300 |
301 | def start_watching(self):
302 | self.callback = self.collection.add_callback(self.sync_rows)
303 | self.sync_rows()
304 |
305 | def stop_watching(self):
306 | self.collection.add_callback(self.sync_rows)
307 | self.sync_rows()
308 |
309 | def sync_rows(self):
310 | print("syncing rows!")
311 | rows = self.collection.get_rows()
312 | rows_dict = dict((row.id, row) for row in rows)
313 | new_row_ids = frozenset(row.id for row in rows)
314 | old_row_ids = self.known_rows.keys()
315 |
316 | added_row_ids = new_row_ids - old_row_ids
317 | removed_row_ids = old_row_ids - new_row_ids
318 |
319 | for added_row_id in added_row_ids:
320 | row = rows_dict[added_row_id]
321 | print("tracking (id=%s) %s" % (row.id, get_row_link_slug(row)))
322 | row_sync = RowSync(self.root_dir, row, self.markdown_generator)
323 | self.known_rows[added_row_id] = row_sync
324 |
325 | for removed_row_id in removed_row_ids:
326 | print(
327 | "removing (id=%s) %s "
328 | % (row.id, self.known_rows[removed_row_id].filename)
329 | )
330 | self.known_rows[removed_row_id].stop_watching_and_remove()
331 | del self.known_rows[removed_row_id]
332 |
333 | # run generation after adding all rows to make sure state is sane when
334 | # trying to calculate between-page-links
335 | for added_row_id in added_row_ids:
336 | if self.watch:
337 | self.known_rows[added_row_id].start_watching()
338 | else:
339 | self.known_rows[added_row_id].update_file()
340 |
341 | def is_row_published(self, row):
342 | return self.draft or is_row_status(row, "Published")
343 |
344 |
345 | async def async_main():
346 | args = parse_args()
347 | client, root_view, destination_dir = load_config_file(args.config)
348 |
349 | # create destination
350 | if args.clean:
351 | rmtree(destination_dir, ignore_errors=True)
352 | os.makedirs(destination_dir, exist_ok=True)
353 |
354 | collectionRoot = CollectionFileSync(
355 | root_view.collection, destination_dir, watch=args.watch, draft=args.draft
356 | )
357 |
358 | if args.watch:
359 | collectionRoot.start_watching()
360 | while True:
361 | sys.stdout.flush()
362 | await asyncio.sleep(1)
363 | else:
364 | collectionRoot.sync_rows()
365 | print("Done!")
366 |
367 |
368 | def parse_args():
369 | parser = argparse.ArgumentParser(
370 | description="Synchronizes markdown documents from a notion Collection View"
371 | )
372 | parser.add_argument(
373 | "--config",
374 | "-c",
375 | metavar="config",
376 | type=str,
377 | default="./config.json",
378 | help="Path to a config file",
379 | )
380 | parser.add_argument(
381 | "--watch",
382 | dest="watch",
383 | action="store_true",
384 | default=False,
385 | help="run in polling/watch mode",
386 | )
387 | parser.add_argument(
388 | "--clean",
389 | dest="clean",
390 | action="store_true",
391 | default=False,
392 | help="Clean destination directory before running",
393 | )
394 | parser.add_argument(
395 | "--draft",
396 | dest="draft",
397 | action="store_true",
398 | default=False,
399 | help="Build all blog entries, even unpublished ones.",
400 | )
401 | return parser.parse_args(sys.argv[1:])
402 |
403 |
404 | def main():
405 | asyncio.run(async_main())
406 |
407 |
408 | if __name__ == "__main__":
409 | main()
410 |
--------------------------------------------------------------------------------
/scripts/bump-version-from-commit.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | set -ex
3 |
4 | CURRENT_VERSION=$(cat .bumpversion.cfg | grep current_version | sed 's/.*= //')
5 | echo "compare against $CURRENT_VERSION"
6 | MESSAGE=`git log v$CURRENT_VERSION..HEAD --pretty=%B`
7 |
8 | if [[ $MESSAGE == *"major"* ]]; then
9 | echo "bumping major version"
10 | pipenv run bumpversion major
11 | elif [[ $MESSAGE == *"minor"* ]]; then
12 | echo "bumping minor version"
13 | pipenv run bumpversion minor
14 | elif [[ $MESSAGE == *"patch"* ]]; then
15 | echo "bumping patch version"
16 | pipenv run bumpversion patch
17 | else
18 | echo "commit message did not match major/patch/minor. not bumping version"
19 | fi
20 |
--------------------------------------------------------------------------------
/scripts/push-and-publish-if-changed.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | set -x
3 |
4 | git branch
5 | git log -1
6 | git diff --exit-code master origin/master
7 | if [[ "$?" != "0" ]]; then
8 | git push https://$GITHUB_TOKEN:x-oauth-basic@$GITHUB_REMOTE master:master
9 | git push https://$GITHUB_TOKEN:x-oauth-basic@$GITHUB_REMOTE --tags
10 | make publish-travis
11 | fi
12 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | import setuptools
2 |
3 | try: # for pip >= 10
4 | from pip._internal.req import parse_requirements
5 | except ImportError: # for pip <= 9.0.3
6 | from pip.req import parse_requirements
7 |
8 | with open("README.md", "r") as fh:
9 | long_description = fh.read()
10 |
11 | reqs = parse_requirements("requirements.txt", session=False)
12 | install_requires = [str(ir.req) for ir in reqs]
13 |
14 | setuptools.setup(
15 | name="notion_sync",
16 | version="0.3.2",
17 | author="Maxwell Huang-Hobbs",
18 | author_email="mhuan13+pypy@gmail.com",
19 | description="A tool to sync a notion collection to markdown files",
20 | long_description=long_description,
21 | long_description_content_type="text/markdown",
22 | url="https://github.com/adjective-object/notion-sync",
23 | install_requires=install_requires,
24 | include_package_data=True,
25 | packages=setuptools.find_packages(),
26 | python_requires=">=3.7",
27 | classifiers=[
28 | "Programming Language :: Python :: 3",
29 | "License :: OSI Approved :: MIT License",
30 | "Operating System :: OS Independent",
31 | ],
32 | entry_points={"console_scripts": ["notion-sync=notion_sync.__main__:main"]},
33 | )
34 |
--------------------------------------------------------------------------------