├── .github └── workflows │ └── tag-latest.yml ├── .gitignore ├── Dockerfile ├── README.md ├── action.yml ├── notion_docs_sync ├── __init__.py └── markdown.py ├── poetry.lock └── pyproject.toml /.github/workflows/tag-latest.yml: -------------------------------------------------------------------------------- 1 | name: Add latest tag to new release 2 | on: 3 | release: 4 | types: [published] # This makes it run only when a new released is published 5 | 6 | jobs: 7 | run: 8 | name: Add/update tag to new release 9 | runs-on: ubuntu-latest 10 | 11 | steps: 12 | - name: Checkout repository 13 | uses: actions/checkout@master 14 | 15 | - name: Run latest-tag 16 | uses: EndBug/latest-tag@latest 17 | with: 18 | tag-name: latest 19 | env: 20 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 21 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | 2 | # Created by https://www.gitignore.io/api/dotenv,python,virtualenv 3 | # Edit at https://www.gitignore.io/?templates=dotenv,python,virtualenv 4 | 5 | ### dotenv ### 6 | .env 7 | 8 | ### Python ### 9 | # Byte-compiled / optimized / DLL files 10 | __pycache__/ 11 | *.py[cod] 12 | *$py.class 13 | 14 | # C extensions 15 | *.so 16 | 17 | # Distribution / packaging 18 | .Python 19 | build/ 20 | develop-eggs/ 21 | dist/ 22 | downloads/ 23 | eggs/ 24 | .eggs/ 25 | lib/ 26 | lib64/ 27 | parts/ 28 | sdist/ 29 | var/ 30 | wheels/ 31 | pip-wheel-metadata/ 32 | share/python-wheels/ 33 | *.egg-info/ 34 | .installed.cfg 35 | *.egg 36 | MANIFEST 37 | 38 | # PyInstaller 39 | # Usually these files are written by a python script from a template 40 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 41 | *.manifest 42 | *.spec 43 | 44 | # Installer logs 45 | pip-log.txt 46 | pip-delete-this-directory.txt 47 | 48 | # Unit test / coverage reports 49 | htmlcov/ 50 | .tox/ 51 | .nox/ 52 | .coverage 53 | .coverage.* 54 | .cache 55 | nosetests.xml 56 | coverage.xml 57 | *.cover 58 | .hypothesis/ 59 | .pytest_cache/ 60 | 61 | # Translations 62 | *.mo 63 | *.pot 64 | 65 | # Scrapy stuff: 66 | .scrapy 67 | 68 | # Sphinx documentation 69 | docs/_build/ 70 | 71 | # PyBuilder 72 | target/ 73 | 74 | # pyenv 75 | .python-version 76 | 77 | # pipenv 78 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 79 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 80 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 81 | # install all needed dependencies. 82 | #Pipfile.lock 83 | 84 | # celery beat schedule file 85 | celerybeat-schedule 86 | 87 | # SageMath parsed files 88 | *.sage.py 89 | 90 | # Spyder project settings 91 | .spyderproject 92 | .spyproject 93 | 94 | # Rope project settings 95 | .ropeproject 96 | 97 | # Mr Developer 98 | .mr.developer.cfg 99 | .project 100 | .pydevproject 101 | 102 | # mkdocs documentation 103 | /site 104 | 105 | # mypy 106 | .mypy_cache/ 107 | .dmypy.json 108 | dmypy.json 109 | 110 | # Pyre type checker 111 | .pyre/ 112 | 113 | ### VirtualEnv ### 114 | # Virtualenv 115 | # http://iamzed.com/2009/05/07/a-primer-on-virtualenv/ 116 | pyvenv.cfg 117 | .venv 118 | env/ 119 | venv/ 120 | ENV/ 121 | env.bak/ 122 | venv.bak/ 123 | pip-selfcheck.json 124 | 125 | # End of https://www.gitignore.io/api/dotenv,python,virtualenv 126 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.8-slim 2 | 3 | ARG ENVIRONMENT="production" 4 | 5 | ENV POETRY_VERSION="1.0.5" 6 | ENV APP_ENVIRONMENT=${ENVIRONMENT} 7 | ENV PYTHONUNBUFFERED=1 8 | 9 | RUN pip install "poetry==$POETRY_VERSION" 10 | 11 | RUN poetry config virtualenvs.create false 12 | 13 | WORKDIR /app/ 14 | 15 | COPY poetry.lock pyproject.toml ./ 16 | 17 | COPY ./notion_docs_sync/ ./notion_docs_sync/ 18 | 19 | RUN if [ "$APP_ENVIRONMENT" = "production" ]; then poetry install --no-dev; else poetry install; fi 20 | 21 | ENTRYPOINT [ "notion-docs-sync"] 22 | CMD [] 23 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Notion Documentation Sync 2 | 3 | Synchronizes documentation files to [Notion.so](https://notion.so). 4 | Reads in files as files & Markdown, converting them to Notion pages & blocks. 5 | 6 | ## Usage 7 | 8 | ### Command Line 9 | 10 | Run with Poetry: 11 | 12 | ```bash 13 | poetry run notion-docs-sync --notion-token $NOTION_TOKEN $DOCS_PATH $NOTION_URL 14 | ``` 15 | 16 | ### Getting a Notion Token 17 | 18 | The Notion token is stored in cookies during an authentication as `token_v2`. 19 | This can be retrieved in Firefox with the [Storage Inspector][firefox-storage-inspector] 20 | or in Chrome with the [Chrome DevTools Application tab.][chrome-devtools] 21 | 22 | The value of this can be used to make changes to your account as well as 23 | any workspace you have access to. There does not seem to be an easy way to 24 | invalidate this token so however you end up storing it, keep it secure! 25 | 26 | ### Github Actions 27 | 28 | **Warning:** If you include the Notion token directly in your workflow definition 29 | you will be allowing anyone with access to the repository to act on your behalf in 30 | Notion. Use the [Github Repository Secrets][gh-secrets] feature! 31 | 32 | #### Inputs 33 | 34 | * `docs_path` - The path that should be published to Notion. 35 | * `notion_url` - The URL to a page in Notion that serves as the root of the documentation. 36 | * `notion_token` - The Notion Access token to use for publishing. 37 | 38 | #### Example Workflow 39 | 40 | Create a Workflow with the following definition: 41 | 42 | ``` 43 | on: 44 | push: 45 | branches: 46 | - master 47 | 48 | jobs: 49 | release: 50 | timeout-minutes: 10 51 | runs-on: ubuntu-latest 52 | steps: 53 | - uses: actions/checkout@v2 54 | 55 | - name: Notion Documentation Sync 56 | uses: imnotjames/notion-docs-sync@latest 57 | with: 58 | docs_path: ./docs/ 59 | notion_url: ${{ secrets.NOTION_URL }} 60 | notion_token: ${{ secrets.NOTION_TOKEN }} 61 | ``` 62 | 63 | [gh-secrets]: https://help.github.com/en/actions/configuring-and-managing-workflows/creating-and-storing-encrypted-secrets 64 | [firefox-storage-inspector]: https://developer.mozilla.org/en-US/docs/Tools/Storage_Inspector 65 | [chrome-devtools]: https://developers.google.com/web/tools/chrome-devtools/storage/cookies 66 | -------------------------------------------------------------------------------- /action.yml: -------------------------------------------------------------------------------- 1 | # Github Actions YML file 2 | 3 | name: 'Notion Documentation Sync' 4 | description: 'Synchronize Documentation from GitHub to Notion' 5 | 6 | branding: 7 | icon: upload-cloud 8 | color: black 9 | 10 | inputs: 11 | docs_path: 12 | description: The path to publish. 13 | default: ./docs/ 14 | required: true 15 | notion_url: 16 | description: The Notion URL to publish to. 17 | required: true 18 | notion_token: 19 | required: true 20 | description: The Notion Access Token to publish with. 21 | runs: 22 | using: 'docker' 23 | image: 'Dockerfile' 24 | args: 25 | - --notion-token=${{ inputs.notion_token }} 26 | - ${{ inputs.docs_path }} 27 | - ${{ inputs.notion_url }} 28 | -------------------------------------------------------------------------------- /notion_docs_sync/__init__.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import os 3 | from random import choice 4 | from argparse import ArgumentParser 5 | from urllib.parse import urlparse 6 | 7 | from notion.client import NotionClient 8 | from notion.block import Block, PageBlock, CollectionViewBlock 9 | from emoji import EMOJI_UNICODE 10 | import frontmatter 11 | 12 | from .markdown import convert 13 | 14 | try: 15 | from dotenv import load_dotenv 16 | load_dotenv() 17 | except: 18 | pass 19 | 20 | 21 | logger = logging.getLogger(__name__) 22 | 23 | 24 | def random_emoji(): 25 | # Don't allow people, hands, or fingers. 26 | forbidden_emoji_patterns = ['child', 'skin_tone', 'person', 'hand', 'finger'] 27 | 28 | emoji_key = None 29 | while not emoji_key: 30 | emoji_key = choice(list(EMOJI_UNICODE.keys())) 31 | 32 | for pattern in forbidden_emoji_patterns: 33 | if pattern in emoji_key: 34 | emoji_key = None 35 | break 36 | 37 | return EMOJI_UNICODE[emoji_key] 38 | 39 | 40 | def infer_block(root_block, path) -> Block: 41 | name, ext = os.path.splitext(path) 42 | 43 | if name == 'index': 44 | return root_block 45 | 46 | if ext != '.md' and ext != '': 47 | return None 48 | 49 | title = name.replace('-', ' ').replace('_', ' ').capitalize() 50 | 51 | for block in root_block.children: 52 | if block.type != 'page': 53 | continue 54 | 55 | if block.title != title: 56 | continue 57 | 58 | return block 59 | 60 | # Create a new page block 61 | 62 | return root_block.children.add_new(PageBlock, title=title) 63 | 64 | 65 | def move_pages_to_end(block): 66 | # Move pages to the end of the document if they aren't already 67 | pages_to_move = [] 68 | pages_seen = [] 69 | 70 | for c in block.children: 71 | if c.type == 'page': 72 | pages_seen.append(c) 73 | else: 74 | pages_to_move.extend(pages_seen) 75 | pages_seen.clear() 76 | 77 | for page in pages_to_move: 78 | logger.info(f"Moving page {page.id} to end of {block.id}") 79 | page.move_to(block, 'last-child') 80 | 81 | 82 | def block_matches_markdown_block(block, markdown_block_type, **markdown_block): 83 | if markdown_block_type != type(block): 84 | return False 85 | 86 | for key, value in markdown_block.items(): 87 | if key in ['type', 'schema', 'rows']: 88 | continue 89 | 90 | block_attr = getattr(block, key) 91 | 92 | if block_attr != value: 93 | return False 94 | 95 | return True 96 | 97 | 98 | def sync_collection_schema(collection, expected_schema): 99 | existing_schema = collection.get('schema') 100 | 101 | # The schemas must match! 102 | if existing_schema == expected_schema: 103 | return 104 | 105 | logger.info(f"Updating schema of {collection.id}") 106 | 107 | # If they don't, try to make them match. 108 | collection.set('schema', expected_schema) 109 | 110 | 111 | def sync_collection_rows(block, collection_schema, collection_rows): 112 | if block.collection is None: 113 | logger.info(f"Creating a new collection for {block.id}") 114 | # We should have generated a schema and rows for this one 115 | client = block._client # Hacky internals stuff... 116 | block.collection = client.get_collection( 117 | # Low-level use of the API 118 | # TODO: Update when notion-py provides a better interface for this 119 | client.create_record("collection", parent=block, schema={"title": {"text": "_", "type": "text"}}) 120 | ) 121 | 122 | block.views.add_new(view_type="table") 123 | 124 | collection_schema_ids = ['title'] 125 | 126 | for i in range(len(collection_schema) - 1): 127 | collection_schema_ids.append('x' + format(i, '0>4x')) 128 | 129 | sync_collection_schema(block.collection, dict(zip(collection_schema_ids, collection_schema))) 130 | 131 | existing_rows = block.collection.get_rows() 132 | 133 | for extra_row in existing_rows[len(collection_rows):]: 134 | extra_row.remove() 135 | 136 | existing_rows_iter = iter(existing_rows) 137 | 138 | for row in collection_rows: 139 | try: 140 | row_block = next(existing_rows_iter) 141 | except StopIteration: 142 | row_block = block.collection.add_row() 143 | 144 | if len(row) > len(collection_schema_ids): 145 | row = row[:len(collection_schema_ids)] 146 | 147 | row = zip(collection_schema_ids, row) 148 | 149 | for schema_id, prop_value in row: 150 | if row_block.get_property(schema_id) != prop_value: 151 | row_block.set_property(schema_id, prop_value) 152 | 153 | 154 | def sync_markdown_blocks_to_block(markdown_blocks, block): 155 | touched_blocks = set() 156 | children_iter = iter(block.children) 157 | 158 | for markdown_block in markdown_blocks: 159 | markdown_block_class = markdown_block["type"] 160 | del markdown_block["type"] 161 | 162 | markdown_contents = markdown_block.pop("title", None) 163 | collection_schema = markdown_block.pop("schema", None) 164 | collection_rows = markdown_block.pop("rows", None) 165 | block_children = markdown_block.pop("children", None) 166 | 167 | try: 168 | child_block = next(children_iter) 169 | while not block_matches_markdown_block(child_block, markdown_block_class, **markdown_block): 170 | child_block = next(children_iter) 171 | logger.info(f"Using existing markdown block {child_block.id} in {block.id}") 172 | except StopIteration: 173 | # If we've hit the end of the children create a new child. 174 | child_block = block.children.add_new(markdown_block_class, **markdown_block) 175 | logger.info(f"Creating new markdown block {child_block.id} in {block.id}") 176 | 177 | if markdown_contents is not None: 178 | # Manually set the title property to bypass the `markdown_to_notion` in `notion-py` 179 | # This is because it chokes up on URLs and really we just don't need this 'cause 180 | # we're parsing the markdown ourselves. 181 | if child_block.get(["properties", "title"]) != markdown_contents: 182 | child_block.set(["properties", "title"], markdown_contents) 183 | 184 | touched_blocks.add(child_block.id) 185 | 186 | if isinstance(child_block, CollectionViewBlock): 187 | sync_collection_rows(child_block, collection_schema, collection_rows) 188 | 189 | if block_children: 190 | sync_markdown_blocks_to_block(block_children, child_block) 191 | elif len(child_block.get(child_block.child_list_key, [])) > 0: 192 | # If no children should exist but there are children attached to this block 193 | # (a list, etc) we should remove them as they're no longer needed! 194 | for c in child_block.children: 195 | c.remove() 196 | 197 | 198 | for c in block.children: 199 | if c.type != 'page' and c.id not in touched_blocks: 200 | logger.info(f"Removing child block {c.id} from {block.id}") 201 | c.remove() 202 | 203 | 204 | def sync_file_to_block(filename, block, links : dict={}): 205 | logger.info(f"Syncing {filename} to block {block.id}") 206 | 207 | with open(filename) as markdown_fd: 208 | contents = markdown_fd.read() 209 | 210 | post = frontmatter.loads(contents) 211 | 212 | def resolve_link(target): 213 | try: 214 | parsed = urlparse(target) 215 | 216 | if parsed.scheme: 217 | return target 218 | except: 219 | pass 220 | 221 | target_path = os.path.realpath(os.path.join(os.path.dirname(filename), target)) 222 | 223 | block = links.get(target_path) 224 | 225 | if not block: 226 | return target 227 | 228 | return block.get_browseable_url() 229 | 230 | markdown_blocks = convert(str(post), link_resolver=resolve_link) 231 | 232 | sync_markdown_blocks_to_block(markdown_blocks, block) 233 | 234 | 235 | def create_page_structure(directory, root_block): 236 | touched_pages = set() 237 | 238 | files_to_pages = dict() 239 | 240 | index_path = os.path.realpath(os.path.join(directory, "index.md")) 241 | readme_path = os.path.realpath(os.path.join(directory, "README.md")) 242 | readme_lower_path = os.path.realpath(os.path.join(directory, "README.md")) 243 | 244 | # Do the index/readme first to ensure the correct sort order. 245 | if os.path.isfile(index_path): 246 | files_to_pages[index_path] = root_block 247 | elif os.path.isfile(readme_path): 248 | files_to_pages[readme_path] = root_block 249 | elif os.path.isfile(readme_lower_path): 250 | files_to_pages[readme_lower_path] = root_block 251 | 252 | for path in os.listdir(directory): 253 | if path.startswith('.'): 254 | # Skip any "private" files / directories 255 | continue 256 | 257 | if path.lower() == 'index.md' or path.lower() == 'readme.md': 258 | # Skip because we had a special case for this above. 259 | continue 260 | 261 | block = infer_block(root_block, path) 262 | 263 | if not block: 264 | continue 265 | 266 | full_path = os.path.realpath(os.path.join(directory, path)) 267 | 268 | touched_pages.add(block.id) 269 | 270 | if os.path.isdir(full_path): 271 | files_to_pages.update(create_page_structure(full_path, block)) 272 | elif os.path.splitext(full_path)[1].lower() == '.md': 273 | files_to_pages[full_path] = block 274 | 275 | return files_to_pages 276 | 277 | 278 | def sync_directory_to_block(directory, root_block): 279 | # Do Two Passes: First, create blocks for all files that need them 280 | # Keep track of absolute file path -> block 281 | logger.info("Creating page structure..") 282 | files_to_pages = create_page_structure(os.path.realpath(directory), root_block) 283 | 284 | touched_pages = set(block.id for block in files_to_pages.values()) 285 | 286 | # Then, for iterate through every single page block created and: 287 | for full_path, block in files_to_pages.items(): 288 | # Lock it 289 | if not block.get(['format', 'block_locked'], default=False): 290 | block.set(['format', 'block_locked'], True) 291 | 292 | if block.icon is None: 293 | block.icon = random_emoji() 294 | 295 | # Sync it. 296 | sync_file_to_block(full_path, block, links=files_to_pages) 297 | 298 | # Sort it. 299 | move_pages_to_end(block) 300 | 301 | # Clean it. 302 | for child in block.children: 303 | # Any children that are pages under block but aren't in touched_pages should be pruned 304 | if child.type == 'page' and child.id not in touched_pages: 305 | child.remove() 306 | 307 | # Technologic. 308 | 309 | def main(): 310 | import sys 311 | logger.addHandler(logging.StreamHandler(sys.stdout)) 312 | logger.setLevel(logging.INFO) 313 | 314 | parser = ArgumentParser() 315 | 316 | parser.add_argument('--notion-token', type=str, default=os.environ.get('NOTION_TOKEN')) 317 | parser.add_argument('docs_path', type=str) 318 | parser.add_argument('notion_url', type=str) 319 | 320 | args = parser.parse_args() 321 | 322 | token = args.notion_token 323 | root_url = args.notion_url 324 | docs_path = args.docs_path 325 | 326 | # add row to notion collection and add a text block with link to the new card 327 | client = NotionClient(token_v2=token) 328 | root_block = client.get_block(root_url) 329 | 330 | sync_directory_to_block(docs_path, root_block) 331 | -------------------------------------------------------------------------------- /notion_docs_sync/markdown.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | import collections 4 | from notion.block import CodeBlock, DividerBlock, HeaderBlock, SubheaderBlock, \ 5 | SubsubheaderBlock, QuoteBlock, TextBlock, NumberedListBlock, \ 6 | BulletedListBlock, ImageBlock, CollectionViewBlock 7 | from mistletoe.block_token import Document 8 | from mistletoe.base_renderer import BaseRenderer 9 | 10 | 11 | NOTION_CODE_LANGUAGES = [ 12 | "ABAP", 13 | "Arduino", 14 | "Bash", 15 | "BASIC", 16 | "C", 17 | "Clojure", 18 | "CoffeeScript", 19 | "C++", 20 | "C#", 21 | "CSS", 22 | "Dart", 23 | "Diff", 24 | "Docker", 25 | "Elixir", 26 | "Elm", 27 | "Erlang", 28 | "Flow", 29 | "Fortran", 30 | "F#", 31 | "Gherkin", 32 | "GLSL", 33 | "Go", 34 | "GraphQL", 35 | "Groovy", 36 | "Haskell", 37 | "HTML", 38 | "Java", 39 | "JavaScript", 40 | "JSON", 41 | "Kotlin", 42 | "LaTeX", 43 | "Less", 44 | "Lisp", 45 | "LiveScript", 46 | "Lua", 47 | "Makefile", 48 | "Markdown", 49 | "Markup", 50 | "MATLAB", 51 | "Nix", 52 | "Objective-C", 53 | "OCaml", 54 | "Pascal", 55 | "Perl", 56 | "PHP", 57 | "Plain Text", 58 | "PowerShell", 59 | "Prolog", 60 | "Python", 61 | "R", 62 | "Reason", 63 | "Ruby", 64 | "Rust", 65 | "Sass", 66 | "Scala", 67 | "Scheme", 68 | "Scss", 69 | "Shell", 70 | "SQL", 71 | "Swift", 72 | "TypeScript", 73 | "VB.Net", 74 | "Verilog", 75 | "VHDL", 76 | "Visual Basic", 77 | "WebAssembly", 78 | "XML", 79 | "YAML" 80 | ] 81 | NOTION_CODE_LANGUAGES_MAPPING = { 82 | lang.lower(): lang for lang in NOTION_CODE_LANGUAGES 83 | } 84 | 85 | 86 | logger = logging.getLogger(__name__) 87 | 88 | NOTION_STYLE_STRIKETHROUGH = "s" 89 | NOTION_STYLE_EMPHASIS = "i" 90 | NOTION_STYLE_STRONG = "b" 91 | NOTION_STYLE_CODE = "c" 92 | NOTION_STYLE_ANCHOR = "a" 93 | 94 | 95 | def flatten(iterable): 96 | return [item for sublist in iterable for item in sublist] 97 | 98 | 99 | def merge_adjacent_textblocks(blocks): 100 | if not blocks: 101 | return 102 | 103 | previous = blocks.pop(0) 104 | 105 | for block in blocks: 106 | if previous["type"] == TextBlock and block["type"] == TextBlock: 107 | previous["title"] += block["title"] 108 | continue 109 | 110 | yield previous 111 | 112 | previous = block 113 | 114 | yield previous 115 | 116 | 117 | def merge_adjacent_tokens(tokens): 118 | if not tokens: 119 | return 120 | 121 | previous = tokens.pop(0) 122 | 123 | for token in tokens: 124 | if token[1] == previous[1]: 125 | previous[0] += token[0] 126 | continue 127 | 128 | yield previous 129 | 130 | previous = token 131 | 132 | yield previous 133 | 134 | 135 | def without_notion_text(blocks): 136 | return [block for block in blocks if block['type'] != TextBlock] 137 | 138 | 139 | def only_notion_text(blocks): 140 | notion_tokens = flatten([ block['title'] or [] for block in blocks if block['type'] == TextBlock ]) 141 | 142 | # Combine similar adjacent tokens 143 | return list(merge_adjacent_tokens(notion_tokens)) 144 | 145 | 146 | def collect_notion_text(tokens, block_type, **kwargs): 147 | new_block = { 148 | 'type': block_type, 149 | 'title': only_notion_text(tokens), 150 | **kwargs 151 | } 152 | 153 | return [new_block, *without_notion_text(tokens)] 154 | 155 | 156 | def notion_as_plain_text(tokens): 157 | return "".join([t[0] for t in tokens]) 158 | 159 | 160 | def apply_style(notion_tokens, style, *style_args): 161 | return [ 162 | [literal, existing_styles + [[style, *style_args]]] 163 | for literal, existing_styles in notion_tokens 164 | ] 165 | 166 | 167 | def as_inline_block(title): 168 | return { 169 | "type": TextBlock, 170 | "title": title 171 | } 172 | 173 | 174 | def as_inline_style_block(tokens, style, *style_args): 175 | return as_inline_block(apply_style(only_notion_text(tokens), style, *style_args)) 176 | 177 | 178 | class NotionRenderer(BaseRenderer): 179 | def __init__(self, link_resolver=(lambda path: path), *extras): 180 | super().__init__(*extras) 181 | self.__link_resolver = link_resolver 182 | 183 | def __render_multiple(self, tokens): 184 | return flatten([self.render(t) for t in tokens]) 185 | 186 | def render(self, token): 187 | blocks = self.render_map[token.__class__.__name__](token) 188 | 189 | if blocks is None: 190 | blocks = [] 191 | 192 | if isinstance(blocks, collections.Iterable) and not isinstance(blocks, (str, bytes, dict)): 193 | blocks = list(blocks) 194 | 195 | if not isinstance(blocks, list): 196 | blocks = [blocks] 197 | 198 | return blocks 199 | 200 | def render_document(self, token): 201 | return self.__render_multiple(token.children) 202 | 203 | def render_block_code(self, token): 204 | match_lang = NOTION_CODE_LANGUAGES_MAPPING.get(token.language.lower(), "Plain Text") 205 | 206 | children = self.__render_multiple(token.children) 207 | 208 | code_block = { 209 | "type": CodeBlock, 210 | "language": match_lang, 211 | "title_plaintext": notion_as_plain_text(only_notion_text(children)) 212 | } 213 | 214 | return [code_block, *without_notion_text(children)] 215 | 216 | def render_thematic_break(self, token): 217 | return { 218 | 'type': DividerBlock 219 | } 220 | 221 | def render_heading(self, token): 222 | level = token.level 223 | if level > 3: 224 | logger.debug(f"h{level} not supported in notion.so, converting to h3") 225 | level = 3 226 | 227 | block_type = [HeaderBlock, SubheaderBlock, SubsubheaderBlock][level - 1] 228 | 229 | return collect_notion_text(self.__render_multiple(token.children), block_type) 230 | 231 | def render_quote(self, token): 232 | return collect_notion_text(self.__render_multiple(token.children), QuoteBlock) 233 | 234 | def render_paragraph(self, token): 235 | # Collapse adjacent text blocks 236 | return list(merge_adjacent_textblocks(self.__render_multiple(token.children))) 237 | 238 | def render_list(self, token): 239 | return self.__render_multiple(token.children) 240 | 241 | def render_list_item(self, token): 242 | leader_contains_number = re.match(r'\d', token.leader) 243 | block_type = NumberedListBlock if leader_contains_number else BulletedListBlock 244 | 245 | children = self.__render_multiple(token.children) 246 | 247 | title = [] 248 | 249 | if len(children) > 0: 250 | if children[0]['type'] == TextBlock: 251 | title = only_notion_text(children[0:1]) 252 | children = children[1:] 253 | 254 | return { 255 | 'type': block_type, 256 | 'title': title, 257 | 'children': children 258 | } 259 | 260 | def render_table(self, token): 261 | header_row = [notion_as_plain_text(h["title"]) for h in self.render(token.header)] 262 | rows = [ 263 | [ 264 | notion_as_plain_text(c["title"]) 265 | for c in self.render(r) 266 | ] 267 | for r in token.children 268 | ] 269 | 270 | return { 271 | 'type': CollectionViewBlock, 272 | 'rows': rows, 273 | 'schema': [{"name": h, "type": "text"} for h in header_row] 274 | } 275 | 276 | def render_table_row(self, token): 277 | return self.__render_multiple(token.children) 278 | 279 | def render_table_cell(self, token): 280 | return as_inline_block(only_notion_text(self.__render_multiple(token.children))) 281 | 282 | def render_strong(self, token): 283 | return as_inline_style_block(self.__render_multiple(token.children), NOTION_STYLE_STRONG) 284 | 285 | def render_emphasis(self, token): 286 | return as_inline_style_block(self.__render_multiple(token.children), NOTION_STYLE_EMPHASIS) 287 | 288 | def render_inline_code(self, token): 289 | return as_inline_style_block(self.__render_multiple(token.children), NOTION_STYLE_CODE) 290 | 291 | def render_raw_text(self, token): 292 | return as_inline_block([[token.content, []]]) 293 | 294 | def render_strikethrough(self, token): 295 | return as_inline_style_block(self.__render_multiple(token.children), NOTION_STYLE_STRIKETHROUGH) 296 | 297 | def render_link(self, token): 298 | return as_inline_style_block( 299 | self.__render_multiple(token.children), 300 | NOTION_STYLE_ANCHOR, 301 | self.__link_resolver(token.target) 302 | ) 303 | 304 | def render_escape_sequence(self, token): 305 | return self.__render_multiple(token.children) 306 | 307 | def render_line_break(self, token): 308 | return as_inline_block([[' ', []]]) 309 | 310 | def render_image(self, token): 311 | if token.title: 312 | alt = [[ token.title, [] ]] 313 | else: 314 | alt = notion_as_plain_text(only_notion_text(self.__render_multiple(token.children))) 315 | 316 | return { 317 | 'type': ImageBlock, 318 | 'display_source': token.src, 319 | 'source': token.src, 320 | 'caption': alt 321 | } 322 | 323 | 324 | def convert(markdown, link_resolver=(lambda path: path)): 325 | with NotionRenderer(link_resolver=link_resolver) as renderer: 326 | return renderer.render(Document(markdown)) 327 | -------------------------------------------------------------------------------- /poetry.lock: -------------------------------------------------------------------------------- 1 | [[package]] 2 | category = "main" 3 | description = "Screen-scraping library" 4 | name = "beautifulsoup4" 5 | optional = false 6 | python-versions = "*" 7 | version = "4.8.2" 8 | 9 | [package.dependencies] 10 | soupsieve = ">=1.2" 11 | 12 | [package.extras] 13 | html5lib = ["html5lib"] 14 | lxml = ["lxml"] 15 | 16 | [[package]] 17 | category = "main" 18 | description = "Dummy package for Beautiful Soup" 19 | name = "bs4" 20 | optional = false 21 | python-versions = "*" 22 | version = "0.0.1" 23 | 24 | [package.dependencies] 25 | beautifulsoup4 = "*" 26 | 27 | [[package]] 28 | category = "main" 29 | description = "A decorator for caching properties in classes." 30 | name = "cached-property" 31 | optional = false 32 | python-versions = "*" 33 | version = "1.5.1" 34 | 35 | [[package]] 36 | category = "main" 37 | description = "Python package for providing Mozilla's CA Bundle." 38 | name = "certifi" 39 | optional = false 40 | python-versions = "*" 41 | version = "2019.11.28" 42 | 43 | [[package]] 44 | category = "main" 45 | description = "Universal encoding detector for Python 2 and 3" 46 | name = "chardet" 47 | optional = false 48 | python-versions = "*" 49 | version = "3.0.4" 50 | 51 | [[package]] 52 | category = "main" 53 | description = "Python parser for the CommonMark Markdown spec" 54 | name = "commonmark" 55 | optional = false 56 | python-versions = "*" 57 | version = "0.9.1" 58 | 59 | [package.extras] 60 | test = ["flake8 (3.7.8)", "hypothesis (3.55.3)"] 61 | 62 | [[package]] 63 | category = "main" 64 | description = "Dictdiffer is a library that helps you to diff and patch dictionaries." 65 | name = "dictdiffer" 66 | optional = false 67 | python-versions = "*" 68 | version = "0.8.1" 69 | 70 | [package.extras] 71 | all = ["Sphinx (>=1.4.4)", "sphinx-rtd-theme (>=0.1.9)", "check-manifest (>=0.25)", "coverage (>=4.0)", "isort (>=4.2.2)", "mock (>=1.3.0)", "pydocstyle (>=1.0.0)", "pytest-cov (>=1.8.0)", "pytest-pep8 (>=1.0.6)", "pytest (>=2.8.0)", "tox (>=3.7.0)", "numpy (>=1.11.0)"] 72 | docs = ["Sphinx (>=1.4.4)", "sphinx-rtd-theme (>=0.1.9)"] 73 | numpy = ["numpy (>=1.11.0)"] 74 | tests = ["check-manifest (>=0.25)", "coverage (>=4.0)", "isort (>=4.2.2)", "mock (>=1.3.0)", "pydocstyle (>=1.0.0)", "pytest-cov (>=1.8.0)", "pytest-pep8 (>=1.0.6)", "pytest (>=2.8.0)", "tox (>=3.7.0)"] 75 | 76 | [[package]] 77 | category = "main" 78 | description = "Emoji for Python" 79 | name = "emoji" 80 | optional = false 81 | python-versions = "*" 82 | version = "0.5.4" 83 | 84 | [package.extras] 85 | dev = ["nose", "coverage", "coveralls"] 86 | 87 | [[package]] 88 | category = "main" 89 | description = "Internationalized Domain Names in Applications (IDNA)" 90 | name = "idna" 91 | optional = false 92 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 93 | version = "2.9" 94 | 95 | [[package]] 96 | category = "main" 97 | description = "A fast, extensible Markdown parser in pure Python." 98 | name = "mistletoe" 99 | optional = false 100 | python-versions = "~=3.3" 101 | version = "0.7.2" 102 | 103 | [[package]] 104 | category = "main" 105 | description = "Unofficial Python API client for Notion.so" 106 | name = "notion" 107 | optional = false 108 | python-versions = ">=3.5" 109 | version = "0.0.25" 110 | 111 | [package.dependencies] 112 | bs4 = "*" 113 | cached-property = "*" 114 | commonmark = "*" 115 | dictdiffer = "*" 116 | python-slugify = "*" 117 | requests = "*" 118 | tzlocal = "*" 119 | 120 | [[package]] 121 | category = "main" 122 | description = "Add .env support to your django/flask apps in development and deployments" 123 | name = "python-dotenv" 124 | optional = false 125 | python-versions = "*" 126 | version = "0.12.0" 127 | 128 | [package.extras] 129 | cli = ["click (>=5.0)"] 130 | 131 | [[package]] 132 | category = "main" 133 | description = "Parse and manage posts with YAML (or other) frontmatter" 134 | name = "python-frontmatter" 135 | optional = false 136 | python-versions = "*" 137 | version = "0.5.0" 138 | 139 | [package.dependencies] 140 | PyYAML = "*" 141 | six = "*" 142 | 143 | [[package]] 144 | category = "main" 145 | description = "A Python Slugify application that handles Unicode" 146 | name = "python-slugify" 147 | optional = false 148 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 149 | version = "4.0.0" 150 | 151 | [package.dependencies] 152 | text-unidecode = ">=1.3" 153 | 154 | [package.extras] 155 | unidecode = ["Unidecode (>=1.1.1)"] 156 | 157 | [[package]] 158 | category = "main" 159 | description = "World timezone definitions, modern and historical" 160 | name = "pytz" 161 | optional = false 162 | python-versions = "*" 163 | version = "2019.3" 164 | 165 | [[package]] 166 | category = "main" 167 | description = "YAML parser and emitter for Python" 168 | name = "pyyaml" 169 | optional = false 170 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 171 | version = "5.3.1" 172 | 173 | [[package]] 174 | category = "main" 175 | description = "Python HTTP for Humans." 176 | name = "requests" 177 | optional = false 178 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 179 | version = "2.23.0" 180 | 181 | [package.dependencies] 182 | certifi = ">=2017.4.17" 183 | chardet = ">=3.0.2,<4" 184 | idna = ">=2.5,<3" 185 | urllib3 = ">=1.21.1,<1.25.0 || >1.25.0,<1.25.1 || >1.25.1,<1.26" 186 | 187 | [package.extras] 188 | security = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)"] 189 | socks = ["PySocks (>=1.5.6,<1.5.7 || >1.5.7)", "win-inet-pton"] 190 | 191 | [[package]] 192 | category = "main" 193 | description = "Python 2 and 3 compatibility utilities" 194 | name = "six" 195 | optional = false 196 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" 197 | version = "1.14.0" 198 | 199 | [[package]] 200 | category = "main" 201 | description = "A modern CSS selector implementation for Beautiful Soup." 202 | name = "soupsieve" 203 | optional = false 204 | python-versions = ">=3.5" 205 | version = "2.0" 206 | 207 | [[package]] 208 | category = "main" 209 | description = "The most basic Text::Unidecode port" 210 | name = "text-unidecode" 211 | optional = false 212 | python-versions = "*" 213 | version = "1.3" 214 | 215 | [[package]] 216 | category = "main" 217 | description = "tzinfo object for the local timezone" 218 | name = "tzlocal" 219 | optional = false 220 | python-versions = "*" 221 | version = "2.0.0" 222 | 223 | [package.dependencies] 224 | pytz = "*" 225 | 226 | [[package]] 227 | category = "main" 228 | description = "HTTP library with thread-safe connection pooling, file post, and more." 229 | name = "urllib3" 230 | optional = false 231 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" 232 | version = "1.25.8" 233 | 234 | [package.extras] 235 | brotli = ["brotlipy (>=0.6.0)"] 236 | secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] 237 | socks = ["PySocks (>=1.5.6,<1.5.7 || >1.5.7,<2.0)"] 238 | 239 | [metadata] 240 | content-hash = "7be7b19a87d7fcd89927e752e53a9dc50d24832b8b358d7d5a0fc8c2e6191325" 241 | python-versions = "^3.8" 242 | 243 | [metadata.files] 244 | beautifulsoup4 = [ 245 | {file = "beautifulsoup4-4.8.2-py2-none-any.whl", hash = "sha256:e1505eeed31b0f4ce2dbb3bc8eb256c04cc2b3b72af7d551a4ab6efd5cbe5dae"}, 246 | {file = "beautifulsoup4-4.8.2-py3-none-any.whl", hash = "sha256:9fbb4d6e48ecd30bcacc5b63b94088192dcda178513b2ae3c394229f8911b887"}, 247 | {file = "beautifulsoup4-4.8.2.tar.gz", hash = "sha256:05fd825eb01c290877657a56df4c6e4c311b3965bda790c613a3d6fb01a5462a"}, 248 | ] 249 | bs4 = [ 250 | {file = "bs4-0.0.1.tar.gz", hash = "sha256:36ecea1fd7cc5c0c6e4a1ff075df26d50da647b75376626cc186e2212886dd3a"}, 251 | ] 252 | cached-property = [ 253 | {file = "cached-property-1.5.1.tar.gz", hash = "sha256:9217a59f14a5682da7c4b8829deadbfc194ac22e9908ccf7c8820234e80a1504"}, 254 | {file = "cached_property-1.5.1-py2.py3-none-any.whl", hash = "sha256:3a026f1a54135677e7da5ce819b0c690f156f37976f3e30c5430740725203d7f"}, 255 | ] 256 | certifi = [ 257 | {file = "certifi-2019.11.28-py2.py3-none-any.whl", hash = "sha256:017c25db2a153ce562900032d5bc68e9f191e44e9a0f762f373977de9df1fbb3"}, 258 | {file = "certifi-2019.11.28.tar.gz", hash = "sha256:25b64c7da4cd7479594d035c08c2d809eb4aab3a26e5a990ea98cc450c320f1f"}, 259 | ] 260 | chardet = [ 261 | {file = "chardet-3.0.4-py2.py3-none-any.whl", hash = "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"}, 262 | {file = "chardet-3.0.4.tar.gz", hash = "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae"}, 263 | ] 264 | commonmark = [ 265 | {file = "commonmark-0.9.1-py2.py3-none-any.whl", hash = "sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9"}, 266 | {file = "commonmark-0.9.1.tar.gz", hash = "sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60"}, 267 | ] 268 | dictdiffer = [ 269 | {file = "dictdiffer-0.8.1-py2.py3-none-any.whl", hash = "sha256:d79d9a39e459fe33497c858470ca0d2e93cb96621751de06d631856adfd9c390"}, 270 | {file = "dictdiffer-0.8.1.tar.gz", hash = "sha256:1adec0d67cdf6166bda96ae2934ddb5e54433998ceab63c984574d187cc563d2"}, 271 | ] 272 | emoji = [ 273 | {file = "emoji-0.5.4.tar.gz", hash = "sha256:60652d3a2dcee5b8af8acb097c31776fb6d808027aeb7221830f72cdafefc174"}, 274 | ] 275 | idna = [ 276 | {file = "idna-2.9-py2.py3-none-any.whl", hash = "sha256:a068a21ceac8a4d63dbfd964670474107f541babbd2250d61922f029858365fa"}, 277 | {file = "idna-2.9.tar.gz", hash = "sha256:7588d1c14ae4c77d74036e8c22ff447b26d0fde8f007354fd48a7814db15b7cb"}, 278 | ] 279 | mistletoe = [ 280 | {file = "mistletoe-0.7.2-py3-none-any.whl", hash = "sha256:3e2d31b2fa6231ea2ee46981274ebac8d5e5736e3aad2d1cd449e2c053b7023b"}, 281 | {file = "mistletoe-0.7.2.tar.gz", hash = "sha256:24d0f18cc5f0381c2cfb8a24ef3de83eb9f7929cb7d0e71ecb164b671d86e6a3"}, 282 | ] 283 | notion = [ 284 | {file = "notion-0.0.25-py3-none-any.whl", hash = "sha256:d6ad33fab45fbf31bfe5186a3b0dd50dc88893a252f4ba45f4ddf6a8a467237f"}, 285 | {file = "notion-0.0.25.tar.gz", hash = "sha256:96b1e5ed495b6b0d6ace21fbf49c409d3c46be710d08cecaee12cb364b8d0049"}, 286 | ] 287 | python-dotenv = [ 288 | {file = "python-dotenv-0.12.0.tar.gz", hash = "sha256:92b3123fb2d58a284f76cc92bfe4ee6c502c32ded73e8b051c4f6afc8b6751ed"}, 289 | {file = "python_dotenv-0.12.0-py2.py3-none-any.whl", hash = "sha256:81822227f771e0cab235a2939f0f265954ac4763cafd806d845801c863bf372f"}, 290 | ] 291 | python-frontmatter = [ 292 | {file = "python-frontmatter-0.5.0.tar.gz", hash = "sha256:a9c2e90fc38e9f0c68d8b82299040f331ca3b8525ac7fa5f6beffef52b26c426"}, 293 | {file = "python_frontmatter-0.5.0-py3-none-any.whl", hash = "sha256:a7dcdfdaf498d488dce98bfa9452f8b70f803a923760ceab1ebd99291d98d28a"}, 294 | ] 295 | python-slugify = [ 296 | {file = "python-slugify-4.0.0.tar.gz", hash = "sha256:a8fc3433821140e8f409a9831d13ae5deccd0b033d4744d94b31fea141bdd84c"}, 297 | ] 298 | pytz = [ 299 | {file = "pytz-2019.3-py2.py3-none-any.whl", hash = "sha256:1c557d7d0e871de1f5ccd5833f60fb2550652da6be2693c1e02300743d21500d"}, 300 | {file = "pytz-2019.3.tar.gz", hash = "sha256:b02c06db6cf09c12dd25137e563b31700d3b80fcc4ad23abb7a315f2789819be"}, 301 | ] 302 | pyyaml = [ 303 | {file = "PyYAML-5.3.1-cp27-cp27m-win32.whl", hash = "sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f"}, 304 | {file = "PyYAML-5.3.1-cp27-cp27m-win_amd64.whl", hash = "sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76"}, 305 | {file = "PyYAML-5.3.1-cp35-cp35m-win32.whl", hash = "sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2"}, 306 | {file = "PyYAML-5.3.1-cp35-cp35m-win_amd64.whl", hash = "sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c"}, 307 | {file = "PyYAML-5.3.1-cp36-cp36m-win32.whl", hash = "sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2"}, 308 | {file = "PyYAML-5.3.1-cp36-cp36m-win_amd64.whl", hash = "sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648"}, 309 | {file = "PyYAML-5.3.1-cp37-cp37m-win32.whl", hash = "sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a"}, 310 | {file = "PyYAML-5.3.1-cp37-cp37m-win_amd64.whl", hash = "sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf"}, 311 | {file = "PyYAML-5.3.1-cp38-cp38-win32.whl", hash = "sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97"}, 312 | {file = "PyYAML-5.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee"}, 313 | {file = "PyYAML-5.3.1.tar.gz", hash = "sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d"}, 314 | ] 315 | requests = [ 316 | {file = "requests-2.23.0-py2.py3-none-any.whl", hash = "sha256:43999036bfa82904b6af1d99e4882b560e5e2c68e5c4b0aa03b655f3d7d73fee"}, 317 | {file = "requests-2.23.0.tar.gz", hash = "sha256:b3f43d496c6daba4493e7c431722aeb7dbc6288f52a6e04e7b6023b0247817e6"}, 318 | ] 319 | six = [ 320 | {file = "six-1.14.0-py2.py3-none-any.whl", hash = "sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c"}, 321 | {file = "six-1.14.0.tar.gz", hash = "sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a"}, 322 | ] 323 | soupsieve = [ 324 | {file = "soupsieve-2.0-py2.py3-none-any.whl", hash = "sha256:fcd71e08c0aee99aca1b73f45478549ee7e7fc006d51b37bec9e9def7dc22b69"}, 325 | {file = "soupsieve-2.0.tar.gz", hash = "sha256:e914534802d7ffd233242b785229d5ba0766a7f487385e3f714446a07bf540ae"}, 326 | ] 327 | text-unidecode = [ 328 | {file = "text-unidecode-1.3.tar.gz", hash = "sha256:bad6603bb14d279193107714b288be206cac565dfa49aa5b105294dd5c4aab93"}, 329 | {file = "text_unidecode-1.3-py2.py3-none-any.whl", hash = "sha256:1311f10e8b895935241623731c2ba64f4c455287888b18189350b67134a822e8"}, 330 | ] 331 | tzlocal = [ 332 | {file = "tzlocal-2.0.0-py2.py3-none-any.whl", hash = "sha256:11c9f16e0a633b4b60e1eede97d8a46340d042e67b670b290ca526576e039048"}, 333 | {file = "tzlocal-2.0.0.tar.gz", hash = "sha256:949b9dd5ba4be17190a80c0268167d7e6c92c62b30026cf9764caf3e308e5590"}, 334 | ] 335 | urllib3 = [ 336 | {file = "urllib3-1.25.8-py2.py3-none-any.whl", hash = "sha256:2f3db8b19923a873b3e5256dc9c2dedfa883e33d87c690d9c7913e1f40673cdc"}, 337 | {file = "urllib3-1.25.8.tar.gz", hash = "sha256:87716c2d2a7121198ebcb7ce7cccf6ce5e9ba539041cfbaeecfb641dc0bf6acc"}, 338 | ] 339 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "notion-docs-sync" 3 | version = "0.1.0" 4 | description = "" 5 | authors = ["James Ward "] 6 | license = "MIT" 7 | 8 | [tool.poetry.dependencies] 9 | python = "^3.8" 10 | notion = "^0.0.25" 11 | python-dotenv = "^0.12.0" 12 | mistletoe = "^0.7.2" 13 | emoji = "^0.5.4" 14 | python-frontmatter = "^0.5.0" 15 | 16 | [tool.poetry.dev-dependencies] 17 | 18 | [tool.poetry.scripts] 19 | notion-docs-sync = "notion_docs_sync:main" 20 | 21 | [build-system] 22 | requires = ["poetry>=0.12"] 23 | build-backend = "poetry.masonry.api" 24 | 25 | --------------------------------------------------------------------------------