├── .github ├── FUNDING.yml └── workflows │ └── tests.yml ├── scriv.d └── README.txt ├── src └── dinghy │ ├── __init__.py │ ├── graphql │ ├── author_frag.graphql │ ├── repo_frag.graphql │ ├── org_project_entries.graphql │ ├── reviewthread_frag.graphql │ ├── comment_frag.graphql │ ├── pr_reviews.graphql │ ├── issue_comments.graphql │ ├── pr_comments.graphql │ ├── review_frag.graphql │ ├── pr_reviewthreads.graphql │ ├── release_frag.graphql │ ├── review_comments.graphql │ ├── repo_releases.graphql │ ├── reviewthread_comments.graphql │ ├── project_entries_frag.graphql │ ├── repo_issues.graphql │ ├── search_entries.graphql │ ├── repo_pull_requests.graphql │ ├── issue_frag.graphql │ └── pull_request_frag.graphql │ ├── __main__.py │ ├── adhoc.py │ ├── jinja_helpers.py │ ├── cli.py │ ├── helpers.py │ ├── graphql_helpers.py │ ├── templates │ └── digest.html.j2 │ └── digest.py ├── .gitignore ├── dev-requirements.txt ├── .ignore ├── MANIFEST.in ├── tox.ini ├── .editorconfig ├── tests └── test_helpers.py ├── pyproject.toml ├── Makefile ├── README.rst ├── LICENSE.txt └── CHANGELOG.rst /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | github: nedbat 2 | -------------------------------------------------------------------------------- /scriv.d/README.txt: -------------------------------------------------------------------------------- 1 | This directory will hold the changelog entries managed by scriv. 2 | -------------------------------------------------------------------------------- /src/dinghy/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Dinghy daily digest tool. 3 | """ 4 | 5 | __version__ = "1.4.1" 6 | -------------------------------------------------------------------------------- /src/dinghy/graphql/author_frag.graphql: -------------------------------------------------------------------------------- 1 | fragment authorData on Actor { 2 | __typename 3 | login 4 | } 5 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | out_*.json 2 | save_*.json 3 | 4 | *.egg-info/ 5 | __pycache__ 6 | build/ 7 | dist/ 8 | lab/ 9 | 10 | .idea 11 | -------------------------------------------------------------------------------- /src/dinghy/graphql/repo_frag.graphql: -------------------------------------------------------------------------------- 1 | fragment repoData on Repository { 2 | owner { 3 | login 4 | } 5 | name 6 | nameWithOwner 7 | url 8 | } 9 | -------------------------------------------------------------------------------- /dev-requirements.txt: -------------------------------------------------------------------------------- 1 | -e . 2 | black 3 | build 4 | check-manifest 5 | freezegun 6 | pylint 7 | pytest 8 | readme_renderer 9 | scriv 10 | tox 11 | twine 12 | -------------------------------------------------------------------------------- /src/dinghy/__main__.py: -------------------------------------------------------------------------------- 1 | """Enable 'python -m dinghy'.""" 2 | 3 | from .cli import cli 4 | 5 | # pylint: disable=unexpected-keyword-arg 6 | # pylint: disable=no-value-for-parameter 7 | cli(prog_name="dinghy") 8 | -------------------------------------------------------------------------------- /.ignore: -------------------------------------------------------------------------------- 1 | # .ignore to control what gets searched. 2 | build 3 | dist 4 | htmlcov 5 | .tox* 6 | .coverage* 7 | _build 8 | _spell 9 | *.egg 10 | *.egg-info 11 | .mypy_cache 12 | .pytest_cache 13 | tmp 14 | out_*.json 15 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include .editorconfig 2 | include .ignore 3 | include dev-requirements.txt 4 | include Makefile 5 | include tox.ini 6 | include *.rst 7 | 8 | recursive-include tests * 9 | recursive-include scriv.d * 10 | -------------------------------------------------------------------------------- /src/dinghy/graphql/org_project_entries.graphql: -------------------------------------------------------------------------------- 1 | query getOrgProjectItems( 2 | $org: String! 3 | $projectNumber: Int! 4 | $after: String 5 | ) { 6 | organization(login: $org) { 7 | project: projectV2(number: $projectNumber) { 8 | ...projectV2Data # fragment: project_entries_frag.graphql 9 | } 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /src/dinghy/graphql/reviewthread_frag.graphql: -------------------------------------------------------------------------------- 1 | fragment reviewThreadData on PullRequestReviewThread { 2 | id 3 | isResolved 4 | comments(first: 100) { 5 | totalCount 6 | nodes { 7 | pullRequestReview { 8 | id 9 | } 10 | ...commentData # fragment: comment_frag.graphql 11 | } 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = py3{9,10,11,12,13} 3 | 4 | [testenv] 5 | package = wheel 6 | wheel_build_env = .pkg 7 | deps = 8 | -r{toxinidir}/dev-requirements.txt 9 | commands = 10 | python -m pytest {posargs} tests 11 | 12 | [testenv:.pkg] 13 | # Force wheels to be built with the latest pip, wheel, and setuptools. 14 | set_env = 15 | VIRTUALENV_DOWNLOAD=1 16 | -------------------------------------------------------------------------------- /src/dinghy/graphql/comment_frag.graphql: -------------------------------------------------------------------------------- 1 | # Comment is an interface for a number of kinds of comments, not all of which 2 | # have a url. 3 | fragment commentData on Comment { 4 | id 5 | bodyText 6 | updatedAt 7 | author { 8 | ...authorData # fragment: author_frag.graphql 9 | } 10 | ... on IssueComment { 11 | url 12 | } 13 | ... on PullRequestReviewComment { 14 | url 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /src/dinghy/graphql/pr_reviews.graphql: -------------------------------------------------------------------------------- 1 | query getPrReviews( 2 | $id: ID! 3 | $after: String 4 | ) { 5 | node(id:$id) { 6 | ... on PullRequest { 7 | reviews(first: 100, after: $after) { 8 | pageInfo { 9 | hasNextPage 10 | endCursor 11 | } 12 | nodes { 13 | ...reviewData # fragment: review_frag.graphql 14 | } 15 | } 16 | } 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /src/dinghy/graphql/issue_comments.graphql: -------------------------------------------------------------------------------- 1 | query getIssueComments( 2 | $id: ID! 3 | $after: String 4 | ) { 5 | node(id:$id) { 6 | ... on Issue { 7 | comments(first: 100, after: $after) { 8 | pageInfo { 9 | hasNextPage 10 | endCursor 11 | } 12 | nodes { 13 | ...commentData # fragment: comment_frag.graphql 14 | } 15 | } 16 | } 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /src/dinghy/graphql/pr_comments.graphql: -------------------------------------------------------------------------------- 1 | query getPrComments( 2 | $id: ID! 3 | $after: String 4 | ) { 5 | node(id:$id) { 6 | ... on PullRequest { 7 | comments(first: 100, after: $after) { 8 | pageInfo { 9 | hasNextPage 10 | endCursor 11 | } 12 | nodes { 13 | ...commentData # fragment: comment_frag.graphql 14 | } 15 | } 16 | } 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /src/dinghy/graphql/review_frag.graphql: -------------------------------------------------------------------------------- 1 | fragment reviewData on PullRequestReview { 2 | id 3 | url 4 | state 5 | author { 6 | ...authorData # fragment: author_frag.graphql 7 | } 8 | bodyText 9 | updatedAt 10 | comments(first: 100) { 11 | totalCount 12 | nodes { 13 | pullRequestReview { 14 | id 15 | } 16 | ...commentData # fragment: comment_frag.graphql 17 | } 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /src/dinghy/graphql/pr_reviewthreads.graphql: -------------------------------------------------------------------------------- 1 | query getPrReviewThreads( 2 | $id: ID! 3 | $after: String 4 | ) { 5 | node(id:$id) { 6 | ... on PullRequest { 7 | reviewThreads(first: 100, after: $after) { 8 | pageInfo { 9 | hasNextPage 10 | endCursor 11 | } 12 | nodes { 13 | ...reviewThreadData # fragment: reviewthread_frag.graphql 14 | } 15 | } 16 | } 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /src/dinghy/graphql/release_frag.graphql: -------------------------------------------------------------------------------- 1 | # https://docs.github.com/en/graphql/reference/objects#release 2 | fragment releaseData on Release { 3 | __typename 4 | repository { 5 | ...repoData # fragment: repo_frag.graphql 6 | } 7 | name 8 | description 9 | isDraft 10 | isPrerelease 11 | url 12 | createdAt 13 | updatedAt 14 | tagCommit { abbreviatedOid } 15 | tagName 16 | author { 17 | ...authorData # fragment: author_frag.graphql 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /src/dinghy/graphql/review_comments.graphql: -------------------------------------------------------------------------------- 1 | query getReviewComments( 2 | $id: ID! 3 | $after: String 4 | ) { 5 | node(id:$id) { 6 | ... on PullRequestReview { 7 | comments(first: 100, after: $after) { 8 | pageInfo { 9 | hasNextPage 10 | endCursor 11 | } 12 | nodes { 13 | pullRequestReview { 14 | id 15 | } 16 | ...commentData # fragment: comment_frag.graphql 17 | } 18 | } 19 | } 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /src/dinghy/graphql/repo_releases.graphql: -------------------------------------------------------------------------------- 1 | query getRepoReleases( 2 | $owner: String! 3 | $name: String! 4 | $after: String 5 | ) { 6 | repository(owner: $owner, name: $name) { 7 | ...repoData # fragment: repo_frag.graphql 8 | releases( 9 | first: 100 10 | after: $after 11 | ) { 12 | pageInfo { 13 | hasNextPage 14 | endCursor 15 | } 16 | nodes { 17 | ...releaseData # fragment: release_frag.graphql 18 | } 19 | } 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /src/dinghy/graphql/reviewthread_comments.graphql: -------------------------------------------------------------------------------- 1 | query getReviewThreadComments( 2 | $id: ID! 3 | $after: String 4 | ) { 5 | node(id:$id) { 6 | ... on PullRequestReviewThread { 7 | comments(first: 100, after: $after) { 8 | pageInfo { 9 | hasNextPage 10 | endCursor 11 | } 12 | nodes { 13 | pullRequestReview { 14 | id 15 | } 16 | ...commentData # fragment: comment_frag.graphql 17 | } 18 | } 19 | } 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /src/dinghy/graphql/project_entries_frag.graphql: -------------------------------------------------------------------------------- 1 | # A project in an org or repo. 2 | fragment projectV2Data on ProjectV2 { 3 | title 4 | url 5 | items(first: 10, after: $after) { 6 | pageInfo { 7 | hasNextPage 8 | endCursor 9 | } 10 | nodes { 11 | content { 12 | ... on Issue { 13 | ...issueData # fragment: issue_frag.graphql 14 | } 15 | ... on PullRequest { 16 | ...pullRequestData # fragment: pull_request_frag.graphql 17 | } 18 | } 19 | } 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /src/dinghy/graphql/repo_issues.graphql: -------------------------------------------------------------------------------- 1 | query getRepoIssues( 2 | $owner: String! 3 | $name: String! 4 | $since: DateTime! 5 | $after: String 6 | ) { 7 | repository(owner: $owner, name: $name) { 8 | ...repoData # fragment: repo_frag.graphql 9 | issues( 10 | first: 100 11 | filterBy: { 12 | since: $since 13 | }, 14 | after: $after 15 | ) { 16 | pageInfo { 17 | hasNextPage 18 | endCursor 19 | } 20 | nodes { 21 | ...issueData # fragment: issue_frag.graphql 22 | } 23 | } 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /src/dinghy/graphql/search_entries.graphql: -------------------------------------------------------------------------------- 1 | query searchItems( 2 | $query: String! 3 | $after: String 4 | ) { 5 | # 10 at a time is slow, but more than that always bumps against node limits. 6 | search(query: $query, type: ISSUE, first: 10, after: $after) { 7 | pageInfo { 8 | hasNextPage 9 | endCursor 10 | } 11 | nodes { 12 | ... on Issue { 13 | ...issueData # fragment: issue_frag.graphql 14 | } 15 | ... on PullRequest { 16 | ...pullRequestData # fragment: pull_request_frag.graphql 17 | } 18 | } 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /src/dinghy/graphql/repo_pull_requests.graphql: -------------------------------------------------------------------------------- 1 | query getPullRequests( 2 | $owner: String! 3 | $name: String! 4 | $after: String 5 | ) { 6 | repository(owner: $owner, name: $name) { 7 | ...repoData # fragment: repo_frag.graphql 8 | pullRequests( 9 | first: 10 10 | orderBy: { 11 | field: UPDATED_AT 12 | direction: DESC 13 | } 14 | after: $after 15 | ) { 16 | pageInfo { 17 | hasNextPage 18 | endCursor 19 | } 20 | nodes { 21 | ...pullRequestData # fragment: pull_request_frag.graphql 22 | } 23 | } 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | # This file is for unifying the coding style for different editors and IDEs. 2 | # More information at http://EditorConfig.org 3 | 4 | root = true 5 | 6 | [*] 7 | charset = utf-8 8 | end_of_line = lf 9 | indent_size = 4 10 | indent_style = space 11 | insert_final_newline = true 12 | max_line_length = 80 13 | trim_trailing_whitespace = true 14 | 15 | [*.{yml,yaml}] 16 | indent_size = 2 17 | 18 | [*.graphql] 19 | indent_size = 2 20 | 21 | [*.html.j2] 22 | indent_size = 2 23 | 24 | [*.rst] 25 | max_line_length = 79 26 | 27 | [Makefile] 28 | indent_style = tab 29 | indent_size = 8 30 | 31 | [*.diff] 32 | trim_trailing_whitespace = false 33 | 34 | [.git/*] 35 | trim_trailing_whitespace = false 36 | -------------------------------------------------------------------------------- /src/dinghy/graphql/issue_frag.graphql: -------------------------------------------------------------------------------- 1 | # https://docs.github.com/en/graphql/reference/objects#issue 2 | fragment issueData on Issue { 3 | __typename 4 | id 5 | repository { 6 | ...repoData # fragment: repo_frag.graphql 7 | } 8 | number 9 | url 10 | title 11 | state 12 | stateReason 13 | createdAt 14 | updatedAt 15 | closed 16 | closedAt 17 | author { 18 | ...authorData # fragment: author_frag.graphql 19 | } 20 | bodyText 21 | comments(last: 100) { 22 | totalCount 23 | nodes { 24 | ...commentData # fragment: comment_frag.graphql 25 | } 26 | } 27 | labels(first: 30) { 28 | nodes { 29 | color 30 | name 31 | } 32 | } 33 | # Issues have timelineItems, but added or removed from projectNext isn't listed. 34 | } 35 | -------------------------------------------------------------------------------- /src/dinghy/graphql/pull_request_frag.graphql: -------------------------------------------------------------------------------- 1 | # https://docs.github.com/en/graphql/reference/objects#pullrequest 2 | fragment pullRequestData on PullRequest { 3 | __typename 4 | id 5 | repository { 6 | ...repoData # fragment: repo_frag.graphql 7 | } 8 | author { 9 | ...authorData # fragment: author_frag.graphql 10 | } 11 | number 12 | title 13 | url 14 | bodyText 15 | createdAt 16 | updatedAt 17 | closed 18 | closedAt 19 | merged 20 | mergedAt 21 | isDraft 22 | labels(first: 30) { 23 | nodes { 24 | color 25 | name 26 | } 27 | } 28 | comments(first: 100) { 29 | totalCount 30 | nodes { 31 | ...commentData # fragment: comment_frag.graphql 32 | } 33 | } 34 | reviews(first: 100) { 35 | totalCount 36 | nodes { 37 | ...reviewData # fragment: review_frag.graphql 38 | } 39 | } 40 | reviewThreads(first: 100) { 41 | # https://docs.github.com/en/graphql/reference/objects#pullrequestreviewthread 42 | totalCount 43 | nodes { 44 | ...reviewThreadData # fragment: reviewthread_frag.graphql 45 | } 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /.github/workflows/tests.yml: -------------------------------------------------------------------------------- 1 | # Run dinghy CI 2 | 3 | name: "Test Suite" 4 | 5 | on: 6 | push: 7 | pull_request: 8 | workflow_dispatch: 9 | 10 | permissions: 11 | contents: read 12 | 13 | defaults: 14 | run: 15 | shell: bash 16 | 17 | concurrency: 18 | group: "${{ github.workflow }}-${{ github.ref }}" 19 | cancel-in-progress: true 20 | 21 | env: 22 | PIP_DISABLE_PIP_VERSION_CHECK: 1 23 | 24 | jobs: 25 | tests: 26 | name: "Test on ${{ matrix.os }}" 27 | runs-on: "${{ matrix.os }}-latest" 28 | 29 | strategy: 30 | fail-fast: false 31 | matrix: 32 | os: 33 | - ubuntu 34 | - macos 35 | - windows 36 | 37 | steps: 38 | - name: "Check out the repo" 39 | uses: "actions/checkout@v4" 40 | with: 41 | persist-credentials: false 42 | 43 | - name: "Set up Python" 44 | id: "setup-python" 45 | uses: "actions/setup-python@v5" 46 | with: 47 | # The last listed Python version is the default. 48 | python-version: | 49 | 3.9 50 | 3.10 51 | 3.11 52 | 3.12 53 | 3.13 54 | 55 | - name: "Install dependencies" 56 | run: | 57 | python -m pip install -U setuptools tox 58 | 59 | - name: "Run tox" 60 | run: | 61 | python -m tox 62 | -------------------------------------------------------------------------------- /src/dinghy/adhoc.py: -------------------------------------------------------------------------------- 1 | """ 2 | A module-main for running ad-hoc GitHub GraphQL queries. 3 | 4 | After installing dinghy, run it like this: 5 | 6 | $ python -m dinghy.adhoc --help 7 | 8 | """ 9 | 10 | import json 11 | import os 12 | import sys 13 | 14 | import click 15 | import click_log 16 | 17 | from .cli import main_run 18 | from .graphql_helpers import GraphqlHelper 19 | 20 | 21 | logger = click_log.basic_config("dinghy") 22 | 23 | 24 | TYPES = { 25 | "int": int, 26 | "str": str, 27 | } 28 | 29 | 30 | @click.command() 31 | @click_log.simple_verbosity_option(logger) 32 | @click.option( 33 | "--nodes", 34 | is_flag=True, 35 | help="Get paginated list of nodes instead of raw result", 36 | ) 37 | @click.argument("query_file", type=click.File("r")) 38 | @click.argument("var", metavar="[VAR[:type]=VAL]...", nargs=-1) 39 | def adhoc(nodes, query_file, var): 40 | """ 41 | Run an ad-hoc GraphQL query. 42 | """ 43 | query = query_file.read() 44 | variables = {} 45 | for v in var: 46 | name, val = v.split("=", 1) 47 | if ":" in name: 48 | name, type_name = name.split(":") 49 | val = TYPES[type_name](val) 50 | variables[name] = val 51 | 52 | token = os.environ.get("GITHUB_TOKEN", "") 53 | gql = GraphqlHelper("https://api.github.com/graphql", token) 54 | if nodes: 55 | data, _ = main_run( 56 | gql.nodes(query=query, variables=variables, clear_nodes=False) 57 | ) 58 | else: 59 | data = main_run(gql.execute(query=query, variables=variables)) 60 | json.dump(data, sys.stdout, indent=2) 61 | 62 | 63 | if __name__ == "__main__": 64 | # pylint: disable=no-value-for-parameter 65 | adhoc() 66 | -------------------------------------------------------------------------------- /src/dinghy/jinja_helpers.py: -------------------------------------------------------------------------------- 1 | """ 2 | Utilities for working with Jina2 templates. 3 | """ 4 | 5 | import colorsys 6 | import datetime 7 | from pathlib import Path 8 | 9 | import aiofiles 10 | import emoji 11 | import jinja2 12 | 13 | from dinghy.helpers import slugify 14 | 15 | 16 | def datetime_format(value, fmt="%m-%d %H:%M"): 17 | """Format a datetime or ISO datetime string, for Jinja filtering.""" 18 | if isinstance(value, str): 19 | value = datetime.datetime.fromisoformat(value.replace("Z", "+00:00")) 20 | return value.strftime(fmt) 21 | 22 | 23 | def label_color_css(bg_color): 24 | """Create CSS for a label color.""" 25 | r, g, b = [int(bg_color[i : i + 2], 16) / 255 for i in [0, 2, 4]] 26 | h, l, s = colorsys.rgb_to_hls(r, g, b) 27 | return "".join( 28 | f"--label-{ltr}:{int(val * fac)};" 29 | for ltr, val, fac in zip( 30 | "rgbhsl", [r, g, b, h, s, l], [255, 255, 255, 360, 100, 100] 31 | ) 32 | ) 33 | 34 | 35 | def render_jinja(template_filename, **variables): 36 | """Render a template file, with variables.""" 37 | jenv = jinja2.Environment( 38 | loader=jinja2.FileSystemLoader( 39 | [ 40 | Path("."), 41 | Path(__file__).parent / "templates", 42 | ] 43 | ), 44 | autoescape=True, 45 | ) 46 | jenv.filters["datetime"] = datetime_format 47 | jenv.filters["label_color_css"] = label_color_css 48 | jenv.filters["slugify"] = slugify 49 | template = jenv.get_template(template_filename) 50 | html = template.render(**variables) 51 | return html 52 | 53 | 54 | async def render_jinja_to_file(template_filename, output_file, **variables): 55 | """Render a template file with variables, and write it to a file.""" 56 | text = render_jinja(template_filename, **variables) 57 | text = emoji.emojize(text, language="alias") 58 | async with aiofiles.open(output_file, "w", encoding="utf-8") as out: 59 | await out.write(text) 60 | -------------------------------------------------------------------------------- /tests/test_helpers.py: -------------------------------------------------------------------------------- 1 | """ 2 | Test dinghy.helpers 3 | """ 4 | 5 | import datetime 6 | 7 | import freezegun 8 | import pytest 9 | 10 | from dinghy.helpers import find_dict_with_key, parse_since, parse_timedelta, slugify 11 | 12 | 13 | @pytest.mark.parametrize( 14 | "tds, kwargs", 15 | [ 16 | ("1d", dict(days=1)), 17 | ("1day", dict(days=1)), 18 | ("1d2h3m", dict(days=1, hours=2, minutes=3)), 19 | ( 20 | "6 day 7.5 hours 8 min .25 s", 21 | dict(days=6, hours=7.5, minutes=8, seconds=0.25), 22 | ), 23 | ("10 weeks 2minutes", dict(weeks=10, minutes=2)), 24 | ], 25 | ) 26 | def test_parse_timedelta(tds, kwargs): 27 | assert parse_timedelta(tds) == datetime.timedelta(**kwargs) 28 | 29 | 30 | @pytest.mark.parametrize( 31 | "tds", 32 | [ 33 | "", 34 | "one", 35 | "123", 36 | "1month", 37 | "2 years", 38 | ], 39 | ) 40 | def test_bad_parse_timedelta(tds): 41 | assert parse_timedelta(tds) is None 42 | 43 | 44 | @freezegun.freeze_time("2023-06-16") 45 | @pytest.mark.parametrize( 46 | "since, dtargs", 47 | [ 48 | ("20230730", (2023, 7, 30)), 49 | ("2023-06-16T12:34:56", (2023, 6, 16, 12, 34, 56)), 50 | ("forever", (1980, 1, 1)), 51 | ("1day", (2023, 6, 15)), 52 | ("2 weeks", (2023, 6, 2)), 53 | ("1 week 1 day", (2023, 6, 8)), 54 | ], 55 | ) 56 | def test_parse_since(since, dtargs): 57 | assert parse_since(since) == datetime.datetime(*dtargs) 58 | 59 | 60 | @pytest.mark.parametrize( 61 | "d, k, res", 62 | [ 63 | ({"a": 1, "b": {"k": 1}, "c": "hello"}, "k", {"k": 1}), 64 | ( 65 | {"a": 1, "b": {"x": 0, "d": {"k": 1, "z": 2}}, "c": "hello"}, 66 | "k", 67 | {"k": 1, "z": 2}, 68 | ), 69 | ({"a": 1, "b": {"k": 1}, "c": "hello"}, "z", None), 70 | ], 71 | ) 72 | def test_find_dict_with_key(d, k, res): 73 | assert find_dict_with_key(d, k) == res 74 | 75 | 76 | def test_slugify(): 77 | assert slugify("Hello, World!") == "hello-world" 78 | -------------------------------------------------------------------------------- /src/dinghy/cli.py: -------------------------------------------------------------------------------- 1 | """Dinghy command-line interface.""" 2 | 3 | import asyncio 4 | import sys 5 | 6 | import click 7 | import click_log 8 | 9 | from .digest import make_digest, make_digests_from_config 10 | from .graphql_helpers import GraphqlHelper 11 | from .helpers import DinghyError 12 | 13 | # Fix for https://github.com/nedbat/dinghy/issues/9 14 | # Work around a known problem (https://github.com/python/cpython/issues/83413) 15 | # that is fixed in 3.10.6 (https://github.com/python/cpython/pull/92904). 16 | if sys.version_info < (3, 10, 6) and sys.platform.startswith("win"): 17 | asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) 18 | 19 | logger = click_log.basic_config("dinghy") 20 | 21 | 22 | def main_run(coro): 23 | """ 24 | Run a coroutine for a Dinghy command. 25 | """ 26 | try: 27 | return asyncio.run(coro) 28 | except DinghyError as err: 29 | logger.error(f"dinghy error: {err}") 30 | sys.exit(1) 31 | finally: 32 | lrl = GraphqlHelper.last_rate_limit() 33 | if lrl is not None: 34 | resource = lrl.get("resource", "general") 35 | logger.debug( 36 | f"Remaining {resource} rate limit: " 37 | + f"{lrl['remaining']} of {lrl['limit']}, " 38 | + f"next reset at {lrl['reset_when']}" 39 | ) 40 | 41 | 42 | @click.command() 43 | @click_log.simple_verbosity_option(logger) 44 | @click.version_option() 45 | @click.option( 46 | "--since", 47 | metavar="DELTA-OR-DATE", 48 | help="Specify a since date. [default: 1 week]", 49 | ) 50 | @click.argument("_input", metavar="[INPUT]", default="dinghy.yaml") 51 | @click.argument("digests", metavar="[DIGEST ...]", nargs=-1) 52 | def cli(since, _input, digests): 53 | """ 54 | Generate HTML digests of GitHub activity. 55 | 56 | INPUT is a dinghy YAML configuration file (default: dinghy.yaml), or a 57 | GitHub repo URL. 58 | 59 | DIGEST(s) are the file names of digests from the configuration file to 60 | create. If none are specified, all of the digests are written. 61 | 62 | """ 63 | if "://" in _input: 64 | coro = make_digest([_input], since=since) 65 | else: 66 | coro = make_digests_from_config(_input, digests or None, since=since) 67 | 68 | main_run(coro) 69 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | # Dinghy pyproject.toml 2 | 3 | [project] 4 | name = "dinghy" 5 | description = "Dinghy daily digest tool" 6 | authors = [ 7 | {name = "Ned Batchelder", email = "ned@nedbatchelder.com"}, 8 | ] 9 | license = "Apache-2.0" 10 | classifiers = [ 11 | "Development Status :: 5 - Production/Stable", 12 | "Intended Audience :: Developers", 13 | "Programming Language :: Python", 14 | "Programming Language :: Python :: 3", 15 | "Programming Language :: Python :: 3.9", 16 | "Programming Language :: Python :: 3.10", 17 | "Programming Language :: Python :: 3.11", 18 | "Programming Language :: Python :: 3.12", 19 | "Programming Language :: Python :: 3.13", 20 | ] 21 | 22 | requires-python = ">= 3.9" 23 | 24 | dependencies = [ 25 | "aiofiles>=0.8", 26 | "aiohttp>3", 27 | "backports-datetime-fromisoformat; python_version<'3.11'", 28 | "click>8", 29 | "click-log>0.3", 30 | "emoji", 31 | "glom", 32 | "jinja2>3", 33 | "pyyaml>=6", 34 | ] 35 | 36 | dynamic = ["readme", "version"] 37 | 38 | [project.urls] 39 | "Mastodon" = "https://hachyderm.io/@nedbat" 40 | "Funding" = "https://github.com/sponsors/nedbat" 41 | "Issues" = "https://github.com/nedbat/dinghy/issues" 42 | "Source" = "https://github.com/nedbat/dinghy" 43 | "Home" = "https://github.com/nedbat/dinghy" 44 | 45 | [project.scripts] 46 | dinghy = "dinghy.__main__:cli" 47 | 48 | [tool.setuptools.packages.find] 49 | where = ["src"] 50 | 51 | [tool.setuptools.package-data] 52 | dinghy = [ 53 | "graphql/*.*", 54 | "templates/*.*", 55 | ] 56 | 57 | [tool.setuptools.dynamic] 58 | version.attr = "dinghy.__version__" 59 | readme.file = ["README.rst", "CHANGELOG.rst"] 60 | 61 | [build-system] 62 | requires = ["setuptools"] 63 | build-backend = "setuptools.build_meta" 64 | 65 | ## Other tools 66 | 67 | [tool.scriv] 68 | fragment_directory = "scriv.d" 69 | rst_header_chars = "-." 70 | version = "literal: src/dinghy/__init__.py: __version__" 71 | 72 | [tool.pylint.REPORTS] 73 | # No need for a score. 74 | score = "no" 75 | # Regular expression which should only match function or class names that do 76 | # not require a docstring. 77 | no-docstring-rgx = "__.*__|test[A-Z_].*" 78 | 79 | [tool.pylint."MESSAGES CONTROL"] 80 | # Disable the message(s) with the given id(s). 81 | disable = [ 82 | "invalid-name", 83 | "logging-fstring-interpolation", 84 | "logging-not-lazy", 85 | "no-else-return", 86 | "use-dict-literal", 87 | ] 88 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: help clean sterile requirements 2 | 3 | .DEFAULT_GOAL := help 4 | 5 | help: ## display this help message 6 | @echo "Please use \`make ' where is one of" 7 | @awk -F ':.*?## ' '/^[a-zA-Z]/ && NF==2 {printf "\033[36m %-25s\033[0m %s\n", $$1, $$2}' $(MAKEFILE_LIST) | sort 8 | 9 | clean: ## remove stuff we don't need 10 | find . -name '__pycache__' -exec rm -rf {} + 11 | find . -name '*.pyc' -exec rm -f {} + 12 | find . -name '*.pyo' -exec rm -f {} + 13 | find . -name '*~' -exec rm -f {} + 14 | rm -fr build/ dist/ src/*.egg-info 15 | rm -fr .*_cache/ 16 | rm -f out_*.json save_*.json 17 | 18 | sterile: clean ## remove all transient stuff 19 | rm -rf .tox 20 | 21 | requirements: ## install development environment requirements 22 | pip install -U -r dev-requirements.txt 23 | 24 | 25 | .PHONY: test quality black lint _check_manifest sample 26 | 27 | test: ## run tests in the current virtualenv 28 | pytest tests 29 | 30 | quality: black lint _check_manifest ## run code-checking tools 31 | 32 | black: 33 | black -q src tests 34 | 35 | lint: 36 | pylint src tests 37 | 38 | _check_manifest: 39 | python -m check_manifest 40 | 41 | 42 | .PHONY: check_release _check_version _check_scriv 43 | 44 | VERSION := $(shell python -c "import dinghy as d; print(d.__version__)") 45 | 46 | check_release: _check_manifest _check_version _check_scriv ## check that we are ready for a release 47 | @echo "Release checks passed" 48 | 49 | _check_version: 50 | @if [[ $$(git tags | grep -q -w $(VERSION) && echo "x") == "x" ]]; then \ 51 | echo 'A git tag for $(VERSION) exists! Did you forget to bump the version in src/dinghy/__init__.py?'; \ 52 | exit 1; \ 53 | fi 54 | 55 | _check_scriv: 56 | @if (( $$(ls -1 scriv.d | wc -l) != 1 )); then \ 57 | echo 'There are scriv fragments! Did you forget `scriv collect`?'; \ 58 | exit 1; \ 59 | fi 60 | 61 | 62 | .PHONY: release dist testpypi pypi tag gh_release 63 | 64 | release: clean check_release dist pypi tag gh_release ## do all the steps for a release 65 | 66 | dist: ## build the distributions 67 | python -m build --sdist --wheel 68 | python -m twine check dist/* 69 | 70 | testpypi: ## upload the distributions to PyPI's testing server. 71 | @if [[ -z "$$TWINE_TEST_PASSWORD" ]]; then \ 72 | echo 'Missing TWINE_TEST_PASSWORD: opvars'; \ 73 | exit 1; \ 74 | fi 75 | python -m twine upload --verbose --repository testpypi --password $$TWINE_TEST_PASSWORD dist/* 76 | 77 | pypi: ## upload the built distributions to PyPI. 78 | @if [[ -z "$$TWINE_PASSWORD" ]]; then \ 79 | echo 'Missing TWINE_PASSWORD: opvars'; \ 80 | exit 1; \ 81 | fi 82 | python -m twine upload --verbose dist/* 83 | 84 | tag: ## make a git tag with the version number 85 | git tag -s -m "Version $(VERSION)" $(VERSION) 86 | git push --all 87 | 88 | gh_release: ## make a GitHub release 89 | python -m scriv github-release 90 | -------------------------------------------------------------------------------- /src/dinghy/helpers.py: -------------------------------------------------------------------------------- 1 | """ 2 | Misc helpers. 3 | """ 4 | 5 | import datetime 6 | import json 7 | import re 8 | import sys 9 | import unicodedata 10 | 11 | import aiofiles 12 | 13 | 14 | if sys.version_info < (3, 11): 15 | from backports.datetime_fromisoformat import MonkeyPatch 16 | MonkeyPatch.patch_fromisoformat() 17 | 18 | 19 | class DinghyError(Exception): 20 | """An error in how Dinghy is being used.""" 21 | 22 | 23 | async def json_save(data, filename): 24 | """Write `data` to `filename` as JSON.""" 25 | async with aiofiles.open(filename, "w", encoding="utf-8") as json_out: 26 | await json_out.write(json.dumps(data, indent=4)) 27 | 28 | 29 | def parse_timedelta(timedelta_str): 30 | """ 31 | Parse a timedelta string ("2h13m") into a timedelta object. 32 | 33 | From https://stackoverflow.com/a/51916936/14343 34 | 35 | Args: 36 | timedelta_str (str): A string identifying a duration, like "2h13m". 37 | 38 | Returns: 39 | A datetime.timedelta object, or None if it can't be parsed. 40 | 41 | """ 42 | parts = re.match( 43 | r"""(?x) 44 | ^ 45 | ((?P[.\d]+)w(eeks?)?)? 46 | ((?P[.\d]+)d(ays?)?)? 47 | ((?P[.\d]+)h(ours?)?)? 48 | ((?P[.\d]+)m(in(utes?)?)?)? 49 | ((?P[.\d]+)s(ec(onds?)?)?)? 50 | $ 51 | """, 52 | timedelta_str.replace(" ", ""), 53 | ) 54 | if not timedelta_str or parts is None: 55 | return None 56 | kwargs = {name: float(val) for name, val in parts.groupdict().items() if val} 57 | return datetime.timedelta(**kwargs) 58 | 59 | 60 | def parse_since(since): 61 | """ 62 | Parse a since specification: 63 | 64 | - "forever" uses a long-ago date. 65 | - A time delta (like "1 week") computes that long ago. 66 | - A specific time (like "2023-07-30") is used as-is. 67 | 68 | """ 69 | if since == "forever": 70 | since_date = datetime.datetime(year=1980, month=1, day=1) 71 | else: 72 | delta = parse_timedelta(since) 73 | if delta is not None: 74 | since_date = datetime.datetime.now() - delta 75 | else: 76 | try: 77 | since_date = datetime.datetime.fromisoformat(since) 78 | except ValueError: 79 | raise DinghyError(f"Can't parse 'since' value: {since!r}") from None 80 | return since_date 81 | 82 | 83 | def find_dict_with_key(d, key): 84 | """Return the subdict of `d` that has `key`.""" 85 | if key in d: 86 | return d 87 | for dd in d.values(): 88 | if isinstance(dd, dict): 89 | sd = find_dict_with_key(dd, key) 90 | if sd is not None: 91 | return sd 92 | return None 93 | 94 | 95 | def slugify(value, allow_unicode=False): 96 | """ 97 | Convert to ASCII if 'allow_unicode' is False. Convert spaces or repeated 98 | dashes to single dashes. Remove characters that aren't alphanumerics, 99 | underscores, or hyphens. Convert to lowercase. Also strip leading and 100 | trailing whitespace, dashes, and underscores. 101 | 102 | from django.template.defaultfilters import slugify 103 | """ 104 | value = str(value).strip() 105 | if allow_unicode: 106 | value = unicodedata.normalize("NFKC", value) 107 | else: 108 | value = ( 109 | unicodedata.normalize("NFKD", value) 110 | .encode("ascii", "ignore") 111 | .decode("ascii") 112 | ) 113 | value = re.sub(r"[^\w\s-]", "", value.lower()) 114 | return re.sub(r"[-\s]+", "-", value).strip("-_") 115 | -------------------------------------------------------------------------------- /src/dinghy/graphql_helpers.py: -------------------------------------------------------------------------------- 1 | """ 2 | GraphQL helpers. 3 | """ 4 | 5 | import asyncio 6 | import collections 7 | import datetime 8 | import itertools 9 | import logging 10 | import os 11 | import pkgutil 12 | import re 13 | import time 14 | 15 | import aiohttp 16 | 17 | from .helpers import DinghyError, find_dict_with_key, json_save 18 | 19 | 20 | logger = logging.getLogger(__name__) 21 | 22 | 23 | def _summarize_rate_limit(response): 24 | """ 25 | Create a dict of information about the current rate limit. 26 | 27 | Reads GitHub X-RateLimit- headers. 28 | """ 29 | rate_limit_info = { 30 | k.rpartition("-")[-1].lower(): v 31 | for k, v in response.headers.items() 32 | if k.startswith("X-RateLimit-") 33 | } 34 | rate_limit_helpfully = { 35 | **rate_limit_info, 36 | "reset_when": time.strftime( 37 | "%H:%M:%S", 38 | time.localtime(int(rate_limit_info["reset"])), 39 | ), 40 | "when": datetime.datetime.now().strftime("%H:%M:%S"), 41 | } 42 | return rate_limit_helpfully 43 | 44 | 45 | # GraphQL error types that could be user mistakes. 46 | USER_FIXABLE_ERR_TYPES = { 47 | "INSUFFICIENT_SCOPES": "Insufficient GitHub token scope.", 48 | } 49 | 50 | 51 | def _raise_if_error(data): 52 | """ 53 | If `data` is an error response, raise a useful exception. 54 | """ 55 | if "message" in data: 56 | raise RuntimeError(data["message"]) 57 | if "errors" in data: 58 | err = data["errors"][0] 59 | if user_fix_msg := USER_FIXABLE_ERR_TYPES.get(err.get("type")): 60 | raise DinghyError(f"{user_fix_msg} {err['message']}") 61 | msg = f"GraphQL error: {err['message']}" 62 | if "path" in err: 63 | msg += f" @{'.'.join(err['path'])}" 64 | if "locations" in err: 65 | loc = err["locations"][0] 66 | msg += f", line {loc['line']} column {loc['column']}" 67 | logger.debug(f"Error data: {data}") 68 | raise RuntimeError(msg) 69 | if "data" in data and data["data"] is None: 70 | # Another kind of failure response? 71 | raise ValueError("GraphQL query returned null") 72 | 73 | 74 | def _query_synopsis(query, variables): 75 | """ 76 | Create a one-line synopsis of the query, for debugging and error messages. 77 | """ 78 | args = ", ".join(f"{k}: {v!r}" for k, v in variables.items()) 79 | query_head = next(line for line in query.splitlines() if not line.startswith("#")) 80 | return query_head + args + ")" 81 | 82 | 83 | class GraphqlHelper: 84 | """ 85 | A helper for GraphQL, including error handling and pagination. 86 | """ 87 | 88 | json_names = (f"out_{i:04}.json" for i in itertools.count()) 89 | rate_limit_history = collections.deque(maxlen=50) 90 | 91 | def __init__(self, endpoint, token): 92 | self.endpoint = endpoint 93 | self.headers = {"Authorization": f"Bearer {token}"} 94 | 95 | @classmethod 96 | def save_rate_limit(cls, rate_limit): 97 | """Keep rate limit history.""" 98 | cls.rate_limit_history.append(rate_limit) 99 | 100 | @classmethod 101 | def last_rate_limit(cls): 102 | """Get the latest rate limit info.""" 103 | if not cls.rate_limit_history: 104 | return None 105 | return cls.rate_limit_history[-1] 106 | 107 | async def _raw_execute(self, query, variables=None): 108 | """ 109 | Execute one GraphQL query, and return the JSON data. 110 | """ 111 | jbody = {"query": query} 112 | if variables: 113 | jbody["variables"] = variables 114 | async with aiohttp.ClientSession(headers=self.headers) as session: 115 | NUM_TRIES = 200 116 | PAUSE = 5 117 | total_wait = 0 118 | for trynum in range(NUM_TRIES): 119 | async with session.post(self.endpoint, json=jbody) as response: 120 | if response.status == 401: 121 | raise DinghyError( 122 | "Unauthorized. You need to create a GITHUB_TOKEN environment variable." 123 | ) 124 | if response.status in {403, 502} and trynum < NUM_TRIES - 1: 125 | # GitHub sometimes gives us these. 403 seems like an ad-hoc 126 | # unreported rate limit. 502 seems like straight-up 127 | # flakiness. If we wait them out, it goes away. 128 | logger.debug(f"Wait out a 403... {total_wait} so far.") 129 | await asyncio.sleep(PAUSE) 130 | total_wait += PAUSE 131 | continue 132 | response.raise_for_status() 133 | self.save_rate_limit(_summarize_rate_limit(response)) 134 | return await response.json() 135 | 136 | async def execute(self, query, variables=None): 137 | """ 138 | Execute one GraphQL query, with logging, retrying, and error handling. 139 | """ 140 | logger.debug(_query_synopsis(query, variables)) 141 | 142 | while True: 143 | data = await self._raw_execute(query=query, variables=variables) 144 | if "errors" in data: 145 | if data["errors"][0].get("type") == "RATE_LIMITED": 146 | reset_when = self.last_rate_limit()["reset_when"] 147 | logger.info(f"Waiting for rate limit to reset at {reset_when}") 148 | await asyncio.sleep( 149 | int(self.last_rate_limit()["reset"]) - time.time() + 10 150 | ) 151 | continue 152 | break 153 | 154 | # $set_env.py: DINGHY_SAVE_RESPONSES - save every query response in a JSON file. 155 | if int(os.environ.get("DINGHY_SAVE_RESPONSES", 0)): 156 | json_name = next(self.json_names) 157 | await json_save(data, json_name) 158 | logger.info(f"Wrote query data: {json_name}") 159 | 160 | _raise_if_error(data) 161 | return data 162 | 163 | async def nodes(self, query, variables=None, donefn=None, clear_nodes=True): 164 | """ 165 | Execute a GraphQL query, and follow the pagination to get all the nodes. 166 | 167 | Returns the last query result (for the information outside the pagination), 168 | and the list of all paginated nodes. 169 | """ 170 | nodes = [] 171 | variables = dict(variables) 172 | while True: 173 | data = await self.execute(query, variables) 174 | fetched = find_dict_with_key(data, "pageInfo") 175 | if fetched is None: 176 | raise DinghyError( 177 | "Query returned no data, you may need more permissions in your token: " 178 | + _query_synopsis(query, variables) 179 | ) 180 | nodes.extend(fetched["nodes"]) 181 | if not fetched["pageInfo"]["hasNextPage"]: 182 | break 183 | if donefn is not None and donefn(fetched["nodes"]): 184 | break 185 | variables["after"] = fetched["pageInfo"]["endCursor"] 186 | # Remove the nodes from the top-level data we return, to keep things clean. 187 | if clear_nodes: 188 | fetched["nodes"] = [] 189 | else: 190 | fetched["nodes"] = nodes 191 | return data, nodes 192 | 193 | 194 | # $set_env.py: DINGHY_FAKE_PAGE - smaller page size to force pagination 195 | FAKE_PAGE = int(os.environ.get("DINGHY_FAKE_PAGE", 0)) 196 | 197 | 198 | def build_query(gql_filename): 199 | """Read a GraphQL file, and complete it with requested fragments.""" 200 | filenames = [gql_filename] 201 | query = [] 202 | 203 | seen_filenames = set() 204 | while filenames: 205 | next_filenames = [] 206 | for filename in filenames: 207 | gtext = pkgutil.get_data("dinghy", f"graphql/{filename}").decode("utf-8") 208 | query.append(gtext) 209 | 210 | for match in re.finditer(r"#\s*fragment: ([.\w]+)", gtext): 211 | frag_name = match[1] 212 | if frag_name not in seen_filenames: 213 | next_filenames.append(frag_name) 214 | seen_filenames.add(frag_name) 215 | filenames = next_filenames 216 | 217 | full_query = "\n".join(query) 218 | if FAKE_PAGE: 219 | full_query = full_query.replace("first: 100", f"first: {FAKE_PAGE}") 220 | return full_query 221 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | ###### 2 | Dinghy 3 | ###### 4 | 5 | Dinghy, a GitHub activity digest tool. 6 | 7 | | |pypi-badge| |pyversions-badge| |license-badge| 8 | | |sponsor-badge| |bluesky-nedbat| |mastodon-nedbat| 9 | 10 | -------------------- 11 | 12 | Dinghy uses the GitHub GraphQL API to find recent activity on releases, issues 13 | and pull requests, and writes a compact HTML digest `like this `_. 14 | 15 | 16 | Sample Digest 17 | ============= 18 | 19 | Here's a sample of a Dinghy digest reporting on `some PSF repos: black, 20 | requests, and PEPs `_. 21 | 22 | 23 | Getting Started 24 | =============== 25 | 26 | 1. Install dinghy: 27 | 28 | .. code-block:: bash 29 | 30 | $ python -m pip install dinghy 31 | 32 | 2. To run dinghy you will need a GitHub `personal access token`_. The scopes 33 | you need to assign to it depend on what repos you'll be accessing. If you 34 | are only accessing public repos, then you don't need any scopes. If you 35 | will be accessing any private repos, then you need the "repo" scope. Create 36 | a token and define the GITHUB_TOKEN environment variable with the value: 37 | 38 | .. code-block:: bash 39 | 40 | $ export GITHUB_TOKEN=ghp_Y2oxDn9gHJ3W2NcQeyJsrMOez 41 | 42 | .. _personal access token: https://github.com/settings/tokens 43 | 44 | 3. Then run dinghy with a GitHub URL: 45 | 46 | .. code-block:: bash 47 | 48 | $ dinghy https://github.com/Me/MyProject 49 | Wrote digest: digest.html 50 | 51 | You will have a digest of the repo's last week of activity in digest.html. 52 | It will look `something like this `_. 53 | 54 | You can also write a YAML configuration file to digest multiple sources, or 55 | with different time periods: 56 | 57 | .. code-block:: bash 58 | 59 | $ dinghy my-dinghy-config.yaml 60 | Wrote digest: proj1.html 61 | Wrote digest: proj2-daily.html 62 | Wrote digest: proj2-weekly.html 63 | 64 | Extra arguments specify which digests to write: 65 | 66 | .. code-block:: bash 67 | 68 | $ dinghy my-dinghy-config.yaml proj1.html 69 | Wrote digest: proj1.html 70 | 71 | 72 | 73 | Configuration 74 | ============= 75 | 76 | Dinghy configuration is read from a YAML file (``dinghy.yaml`` by default). 77 | Here's an example: 78 | 79 | .. code-block:: yaml 80 | 81 | digests: 82 | - digest: lastweek.html 83 | title: My projects last week 84 | since: 1 week 85 | items: 86 | - https://github.com/orgs/myorg/projects/17 87 | - https://github.com/orgs/anotherorg/projects/8 88 | - https://github.com/myorg/myrepo/pulls 89 | 90 | - digest: hotnews.html 91 | title: Today's news 92 | since: 1 day 93 | items: 94 | - url: https://github.com/orgs/anotherorg/projects/8 95 | home_repo: anotherorg/wg 96 | - https://github.com/myorg/churnchurn/issues 97 | 98 | - digest: all_prs.html 99 | since: 1 day 100 | items: 101 | - search: org:myorg is:pr 102 | title: MyOrg pull requests 103 | 104 | defaults: 105 | ignore_users: 106 | - app-user 107 | - fake-bot 108 | 109 | The ``digests`` clause is a list of digests to produce. The ``defaults`` 110 | clause sets defaults for the digest options in the rest of the file. Each 111 | ``digests`` clause specifies what to digest: 112 | 113 | - The ``digest`` setting is the HTML digest file to write. 114 | 115 | - The ``since`` setting indicates how far back to look for activity. It can use 116 | units of weeks, days, hours, minutes and seconds, and can also be 117 | abbreviated, like ``1d6h``. Using ``since: forever`` will include all 118 | activity regardless of when it happened. If ``since`` is omitted, it 119 | defaults to one week. You can specify ``--since=`` on the dinghy 120 | command line to provide an explicit value. 121 | 122 | - The ``items`` setting is a list of things to report on, specified in a few 123 | different ways: 124 | 125 | - The ``url`` setting is a GitHub URL, in a number of forms: 126 | 127 | - An organization project URL will report on the issues and pull requests 128 | in the project. Your GitHub token will need the "read:project" scope. 129 | 130 | - A URL to a repo will report on the issues, pull requests and releases in 131 | the repo. 132 | 133 | - A URL to a repo's issues will report on the issues in the repo. 134 | 135 | - A URL to a repo's pull requests will report on the pull requests in the 136 | repo. 137 | 138 | - A URL to a repo's releases will report on the releases in the repo. 139 | 140 | - Any of these URLs can point to a GitHub Enterprise installation instead 141 | of https://github.com. 142 | 143 | - The ``search`` setting can specify a GitHub search query to find issues or 144 | pull requests. The query will have an ``updated:`` term added to it to 145 | account for the ``since:`` setting. 146 | 147 | - If an item only needs to specify a GitHub URL, then it can simply be the 148 | URL string. 149 | 150 | - The optional ``title`` setting will be used to construct the title 151 | and main header of the HTML page. 152 | 153 | - The ``template`` setting is the name of a Jinja2 template file to use to 154 | produce the digest. It defaults to "digest.html.j2", which is packaged with 155 | dinghy. The data passed to the template is under-specified; if you want to 156 | write a template of your own, model it on the built-in `digest.html.j2`_. 157 | 158 | .. _digest.html.j2: https://github.com/nedbat/dinghy/blob/main/src/dinghy/templates/digest.html.j2 159 | 160 | - For GitHub Enterprise, you can specify ``api_root``, which is the URL to 161 | build on for GraphQL API requests. It defaults to 162 | "https://api.github.com/graphql". 163 | 164 | Items can have additional options: 165 | 166 | - By default, no activity is reported for bot users. If you want to include 167 | them, use ``include_bots: true``. 168 | 169 | - Some applications perform actions using real user accounts, but you'd like to 170 | ignore them anyway. You can list those user names that should be ignored in 171 | the ``ignore_users`` setting. 172 | 173 | - Digests can have an explicit title set with the ``title`` setting. 174 | 175 | - Options for organization projects include: 176 | 177 | - ``home_repo`` is the owner/repo of the repo in which most issues will be 178 | created. Issues in other repos will have the repo indicated in the 179 | digest. 180 | 181 | 182 | Daily Publishing 183 | ================ 184 | 185 | The `sample digest `_ is published daily using a GitHub Action from 186 | its own repo: `nedbat/dinghy_sample `_. You can use it as a 187 | starting point for your own publishing. 188 | 189 | There's also a `separate project `_ which sends a daily email 190 | of GitHub activity. 191 | 192 | 193 | .. _sample: https://nedbat.github.io/dinghy_sample/3day.html 194 | .. _sample_repo: https://github.com/nedbat/dinghy_sample 195 | .. _email_project: https://github.com/iloveitaly/github-digest 196 | 197 | 198 | Contributors 199 | ============ 200 | 201 | Thanks to all who have helped: 202 | 203 | - Ned Batchelder 204 | - Andreas Motl 205 | - Bill Mill 206 | - Doug Hellmann 207 | - Henry Gessau 208 | - Lucas Taylor 209 | - Quentin Pradet 210 | - Simon de Vlieger 211 | 212 | 213 | .. |pypi-badge| image:: https://img.shields.io/pypi/v/dinghy.svg 214 | :target: https://pypi.python.org/pypi/dinghy/ 215 | :alt: PyPI 216 | .. |pyversions-badge| image:: https://img.shields.io/pypi/pyversions/dinghy.svg 217 | :target: https://pypi.python.org/pypi/dinghy/ 218 | :alt: Supported Python versions 219 | .. |license-badge| image:: https://img.shields.io/github/license/nedbat/dinghy.svg 220 | :target: https://github.com/nedbat/dinghy/blob/master/LICENSE.txt 221 | :alt: License 222 | .. |bluesky-nedbat| image:: https://img.shields.io/badge/dynamic/json?style=flat&color=96a3b0&labelColor=3686f7&logo=icloud&logoColor=white&label=@nedbat&url=https%3A%2F%2Fpublic.api.bsky.app%2Fxrpc%2Fapp.bsky.actor.getProfile%3Factor=nedbat.com&query=followersCount 223 | :target: https://bsky.app/profile/nedbat.com 224 | :alt: nedbat on Bluesky 225 | .. |mastodon-nedbat| image:: https://img.shields.io/badge/dynamic/json?style=flat&labelColor=450657&logo=mastodon&logoColor=ffffff&label=@nedbat&query=followers_count&url=https%3A%2F%2Fhachyderm.io%2Fapi%2Fv1%2Faccounts%2Flookup%3Facct=nedbat 226 | :target: https://hachyderm.io/@nedbat 227 | :alt: nedbat on Mastodon 228 | .. |sponsor-badge| image:: https://img.shields.io/badge/%E2%9D%A4-Sponsor%20me-brightgreen?style=flat&logo=GitHub 229 | :target: https://github.com/sponsors/nedbat 230 | :alt: Sponsor me on GitHub 231 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | 2 | 3 | Apache License 4 | Version 2.0, January 2004 5 | http://www.apache.org/licenses/ 6 | 7 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 8 | 9 | 1. Definitions. 10 | 11 | "License" shall mean the terms and conditions for use, reproduction, 12 | and distribution as defined by Sections 1 through 9 of this document. 13 | 14 | "Licensor" shall mean the copyright owner or entity authorized by 15 | the copyright owner that is granting the License. 16 | 17 | "Legal Entity" shall mean the union of the acting entity and all 18 | other entities that control, are controlled by, or are under common 19 | control with that entity. For the purposes of this definition, 20 | "control" means (i) the power, direct or indirect, to cause the 21 | direction or management of such entity, whether by contract or 22 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 23 | outstanding shares, or (iii) beneficial ownership of such entity. 24 | 25 | "You" (or "Your") shall mean an individual or Legal Entity 26 | exercising permissions granted by this License. 27 | 28 | "Source" form shall mean the preferred form for making modifications, 29 | including but not limited to software source code, documentation 30 | source, and configuration files. 31 | 32 | "Object" form shall mean any form resulting from mechanical 33 | transformation or translation of a Source form, including but 34 | not limited to compiled object code, generated documentation, 35 | and conversions to other media types. 36 | 37 | "Work" shall mean the work of authorship, whether in Source or 38 | Object form, made available under the License, as indicated by a 39 | copyright notice that is included in or attached to the work 40 | (an example is provided in the Appendix below). 41 | 42 | "Derivative Works" shall mean any work, whether in Source or Object 43 | form, that is based on (or derived from) the Work and for which the 44 | editorial revisions, annotations, elaborations, or other modifications 45 | represent, as a whole, an original work of authorship. For the purposes 46 | of this License, Derivative Works shall not include works that remain 47 | separable from, or merely link (or bind by name) to the interfaces of, 48 | the Work and Derivative Works thereof. 49 | 50 | "Contribution" shall mean any work of authorship, including 51 | the original version of the Work and any modifications or additions 52 | to that Work or Derivative Works thereof, that is intentionally 53 | submitted to Licensor for inclusion in the Work by the copyright owner 54 | or by an individual or Legal Entity authorized to submit on behalf of 55 | the copyright owner. For the purposes of this definition, "submitted" 56 | means any form of electronic, verbal, or written communication sent 57 | to the Licensor or its representatives, including but not limited to 58 | communication on electronic mailing lists, source code control systems, 59 | and issue tracking systems that are managed by, or on behalf of, the 60 | Licensor for the purpose of discussing and improving the Work, but 61 | excluding communication that is conspicuously marked or otherwise 62 | designated in writing by the copyright owner as "Not a Contribution." 63 | 64 | "Contributor" shall mean Licensor and any individual or Legal Entity 65 | on behalf of whom a Contribution has been received by Licensor and 66 | subsequently incorporated within the Work. 67 | 68 | 2. Grant of Copyright License. Subject to the terms and conditions of 69 | this License, each Contributor hereby grants to You a perpetual, 70 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 71 | copyright license to reproduce, prepare Derivative Works of, 72 | publicly display, publicly perform, sublicense, and distribute the 73 | Work and such Derivative Works in Source or Object form. 74 | 75 | 3. Grant of Patent License. Subject to the terms and conditions of 76 | this License, each Contributor hereby grants to You a perpetual, 77 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 78 | (except as stated in this section) patent license to make, have made, 79 | use, offer to sell, sell, import, and otherwise transfer the Work, 80 | where such license applies only to those patent claims licensable 81 | by such Contributor that are necessarily infringed by their 82 | Contribution(s) alone or by combination of their Contribution(s) 83 | with the Work to which such Contribution(s) was submitted. If You 84 | institute patent litigation against any entity (including a 85 | cross-claim or counterclaim in a lawsuit) alleging that the Work 86 | or a Contribution incorporated within the Work constitutes direct 87 | or contributory patent infringement, then any patent licenses 88 | granted to You under this License for that Work shall terminate 89 | as of the date such litigation is filed. 90 | 91 | 4. Redistribution. You may reproduce and distribute copies of the 92 | Work or Derivative Works thereof in any medium, with or without 93 | modifications, and in Source or Object form, provided that You 94 | meet the following conditions: 95 | 96 | (a) You must give any other recipients of the Work or 97 | Derivative Works a copy of this License; and 98 | 99 | (b) You must cause any modified files to carry prominent notices 100 | stating that You changed the files; and 101 | 102 | (c) You must retain, in the Source form of any Derivative Works 103 | that You distribute, all copyright, patent, trademark, and 104 | attribution notices from the Source form of the Work, 105 | excluding those notices that do not pertain to any part of 106 | the Derivative Works; and 107 | 108 | (d) If the Work includes a "NOTICE" text file as part of its 109 | distribution, then any Derivative Works that You distribute must 110 | include a readable copy of the attribution notices contained 111 | within such NOTICE file, excluding those notices that do not 112 | pertain to any part of the Derivative Works, in at least one 113 | of the following places: within a NOTICE text file distributed 114 | as part of the Derivative Works; within the Source form or 115 | documentation, if provided along with the Derivative Works; or, 116 | within a display generated by the Derivative Works, if and 117 | wherever such third-party notices normally appear. The contents 118 | of the NOTICE file are for informational purposes only and 119 | do not modify the License. You may add Your own attribution 120 | notices within Derivative Works that You distribute, alongside 121 | or as an addendum to the NOTICE text from the Work, provided 122 | that such additional attribution notices cannot be construed 123 | as modifying the License. 124 | 125 | You may add Your own copyright statement to Your modifications and 126 | may provide additional or different license terms and conditions 127 | for use, reproduction, or distribution of Your modifications, or 128 | for any such Derivative Works as a whole, provided Your use, 129 | reproduction, and distribution of the Work otherwise complies with 130 | the conditions stated in this License. 131 | 132 | 5. Submission of Contributions. Unless You explicitly state otherwise, 133 | any Contribution intentionally submitted for inclusion in the Work 134 | by You to the Licensor shall be under the terms and conditions of 135 | this License, without any additional terms or conditions. 136 | Notwithstanding the above, nothing herein shall supersede or modify 137 | the terms of any separate license agreement you may have executed 138 | with Licensor regarding such Contributions. 139 | 140 | 6. Trademarks. This License does not grant permission to use the trade 141 | names, trademarks, service marks, or product names of the Licensor, 142 | except as required for reasonable and customary use in describing the 143 | origin of the Work and reproducing the content of the NOTICE file. 144 | 145 | 7. Disclaimer of Warranty. Unless required by applicable law or 146 | agreed to in writing, Licensor provides the Work (and each 147 | Contributor provides its Contributions) on an "AS IS" BASIS, 148 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 149 | implied, including, without limitation, any warranties or conditions 150 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 151 | PARTICULAR PURPOSE. You are solely responsible for determining the 152 | appropriateness of using or redistributing the Work and assume any 153 | risks associated with Your exercise of permissions under this License. 154 | 155 | 8. Limitation of Liability. In no event and under no legal theory, 156 | whether in tort (including negligence), contract, or otherwise, 157 | unless required by applicable law (such as deliberate and grossly 158 | negligent acts) or agreed to in writing, shall any Contributor be 159 | liable to You for damages, including any direct, indirect, special, 160 | incidental, or consequential damages of any character arising as a 161 | result of this License or out of the use or inability to use the 162 | Work (including but not limited to damages for loss of goodwill, 163 | work stoppage, computer failure or malfunction, or any and all 164 | other commercial damages or losses), even if such Contributor 165 | has been advised of the possibility of such damages. 166 | 167 | 9. Accepting Warranty or Additional Liability. While redistributing 168 | the Work or Derivative Works thereof, You may choose to offer, 169 | and charge a fee for, acceptance of support, warranty, indemnity, 170 | or other liability obligations and/or rights consistent with this 171 | License. However, in accepting such obligations, You may act only 172 | on Your own behalf and on Your sole responsibility, not on behalf 173 | of any other Contributor, and only if You agree to indemnify, 174 | defend, and hold each Contributor harmless for any liability 175 | incurred by, or claims asserted against, such Contributor by reason 176 | of your accepting any such warranty or additional liability. 177 | 178 | END OF TERMS AND CONDITIONS 179 | 180 | 181 | -------------------------------------------------------------------------------- /src/dinghy/templates/digest.html.j2: -------------------------------------------------------------------------------- 1 | 2 | {# The template for the digest.html output file. -#} 3 | 4 | {%- macro page_title() -%} 5 | {{ title }} 6 | {% if title and since %} — {% endif %} 7 | {% if since %}Activity since {{ since|datetime("%Y-%m-%d") }}{% endif %} 8 | {%- endmacro -%} 9 | 10 | {%- macro octicon_url(name, size=16) -%} 11 | {#- Octicons: https://github.com/primer/octicons/tree/main/icons -#} 12 | {#- also: https://primer.style/octicons/ -#} 13 | https://raw.githubusercontent.com/primer/octicons/main/icons/{{ name }}-{{ size }}.svg 14 | {%- endmacro -%} 15 | 16 | {%- macro octicon_bg(name, size=16, color="") -%} 17 | background-image: url({{ octicon_url(name, size) }}); 18 | {# sepia: #aa9a7c -#} 19 | {% if color == "red" -%} 20 | filter: invert(1) brightness(.5) sepia(1) hue-rotate(316deg) saturate(6) brightness(.8); 21 | {% elif color == "green" -%} 22 | {# open color: #1a7f37 -#} 23 | filter: invert(1) brightness(.5) sepia(1) hue-rotate(98deg) saturate(8) brightness(0.5); 24 | {% elif color == "purple" -%} 25 | {# merged color: #8250df -#} 26 | filter: invert(1) brightness(.5) sepia(1) hue-rotate(220deg) saturate(8) brightness(.65); 27 | {% elif color == "light gray" -%} 28 | filter: invert(1) brightness(.5) sepia(1) hue-rotate(172deg) saturate(0.43) brightness(1.25); 29 | {% elif color == "gray" -%} 30 | {# draft color: #57606a -#} 31 | filter: invert(1) brightness(.5) sepia(1) hue-rotate(172deg) saturate(0.43) brightness(0.65); 32 | {% endif -%} 33 | {%- endmacro -%} 34 | 35 | 36 | 37 | 38 | {{ page_title() }} 39 | 40 | 203 | 204 | 205 | 206 |

{{ page_title() }}

207 | 208 | {% if results|length > 1 %} 209 | 215 | {% endif %} 216 | 217 | 329 | 330 | 337 | 338 | 339 | -------------------------------------------------------------------------------- /CHANGELOG.rst: -------------------------------------------------------------------------------- 1 | 2 | .. this will be appended to README.rst 3 | 4 | Changelog 5 | ========= 6 | 7 | .. 8 | All enhancements and patches to dinghy will be documented 9 | in this file. It adheres to the structure of http://keepachangelog.com/ , 10 | but in reStructuredText instead of Markdown (for ease of incorporation into 11 | Sphinx documentation and the PyPI description). 12 | 13 | This project adheres to Semantic Versioning (http://semver.org/). 14 | 15 | Unreleased 16 | ---------- 17 | 18 | See the fragment files in the `scriv.d directory`_. 19 | 20 | .. _scriv.d directory: https://github.com/nedbat/dinghy/tree/master/scriv.d 21 | 22 | 23 | .. scriv-insert-here 24 | 25 | .. _changelog-1.4.1: 26 | 27 | 1.4.1 — 2025-04-03 28 | ------------------ 29 | 30 | Fixed 31 | ..... 32 | 33 | - Fixed the import of backports so people can use Python newer than 3.10. 34 | Thanks, `Simon de Vlieger `_. 35 | 36 | - Fixed a link in the README and added some badges. 37 | 38 | .. _pull 46: https://github.com/nedbat/dinghy/pull/46 39 | 40 | .. _changelog-1.4.0: 41 | 42 | 1.4.0 — 2025-04-01 43 | ------------------ 44 | 45 | Added 46 | ..... 47 | 48 | - Added breadcrumbs with navigation at the top of the HTML page if there is 49 | more than one section. Specifically with long sections, it makes navigating 50 | so much easier. 51 | 52 | Changed 53 | ....... 54 | 55 | - Install backports-datetime-fromisoformat only on Python <3.11. 56 | 57 | - Dropped support for Python 3.8. 58 | 59 | .. _changelog-1.3.3: 60 | 61 | 1.3.3 — 2024-11-09 62 | ------------------ 63 | 64 | Added 65 | ..... 66 | 67 | - Declared support for Python 3.12 and 3.13. 68 | 69 | .. _changelog-1.3.2: 70 | 71 | 1.3.2 — 2023-09-21 72 | ------------------ 73 | 74 | Fixed 75 | ..... 76 | 77 | - The 1.3.1 fix for the ignore ``--since`` option accidentally ignored 78 | ``since`` settings in configuration files. This is now fixed, closing `issue 79 | 36`_. Thanks, `Lucas Taylor `_. 80 | 81 | .. _issue 36: https://github.com/nedbat/dinghy/issues/36 82 | .. _pull 37: https://github.com/nedbat/dinghy/issues/37 83 | 84 | 85 | .. _changelog-1.3.1: 86 | 87 | 1.3.1 — 2023-09-17 88 | ------------------ 89 | 90 | Fixed 91 | ..... 92 | 93 | - When using a URL on the command line, the ``--since`` option was ignored. 94 | This is now fixed, closing `issue 35`_. 95 | 96 | .. _issue 35: https://github.com/nedbat/dinghy/issues/35 97 | 98 | 99 | .. _changelog-1.3.0: 100 | 101 | 1.3.0 — 2023-07-31 102 | ------------------ 103 | 104 | Added 105 | ..... 106 | 107 | - The ``since`` date can now be specified on the command line with ``--since``. 108 | This will override any specification in the YAML file. 109 | 110 | - The ``since`` value can be specified as a specific ISO 8601 date or datetime, 111 | closing `issue 26`_. 112 | 113 | .. _issue 26: https://github.com/nedbat/dinghy/issues/26 114 | 115 | 116 | .. _changelog-1.2.0: 117 | 118 | 1.2.0 — 2023-01-27 119 | ------------------ 120 | 121 | Added 122 | ..... 123 | 124 | - Now you can additionally specify digests on the command line to write, which 125 | will choose just those digests from the configuration file. 126 | 127 | Fixed 128 | ..... 129 | 130 | - If the config file has no ``digests:`` clause, it could be because it's not a 131 | dinghy config file at all, so print an error message about it. 132 | 133 | .. _changelog-1.1.0: 134 | 135 | 1.1.0 — 2023-01-25 136 | ------------------ 137 | 138 | Added 139 | ..... 140 | 141 | - A digest can specify ``template``, a Jinja2 template file to produce the 142 | digest. This opens the possibility for other output formats than HTML. 143 | 144 | .. _changelog-1.0.0: 145 | 146 | 1.0.0 — 2022-12-03 147 | ------------------ 148 | 149 | - Nothing has changed, just decided Dinghy was stable enough to call 1.0.0. 150 | 151 | .. _changelog-0.15.0: 152 | 153 | 0.15.0 — 2022-11-09 154 | ------------------- 155 | 156 | Added 157 | ..... 158 | 159 | - Show releases in the digest. Thanks, Simon de Vlieger. 160 | 161 | - A new setting ``include_bots: true`` will include pull requests, issues, or 162 | comments created by bot users. The default remains False, to exclude them. 163 | Closes `issue 25`_. 164 | 165 | .. _issue 25: https://github.com/nedbat/dinghy/issues/25 166 | 167 | 168 | .. _changelog-0.14.0: 169 | 170 | 0.14.0 — 2022-10-25 171 | ------------------- 172 | 173 | Added 174 | ..... 175 | 176 | - Now a CLI command is registered so you can use ``dinghy`` as a command 177 | instead of ``python -m dinghy`` (though that still works). 178 | 179 | - You can now specify ``since: forever`` to include all activity regardless of 180 | when it happened. 181 | 182 | Changed 183 | ....... 184 | 185 | - Search results now always show the repo containing the item. 186 | 187 | Fixed 188 | ..... 189 | 190 | - Comments by deleted GitHub users would cause a crash. This is now fixed 191 | (`issue 23`_). 192 | 193 | .. _issue 23: https://github.com/nedbat/dinghy/issues/23 194 | 195 | .. _changelog-0.13.4: 196 | 197 | 0.13.4 — 2022-10-06 198 | ------------------- 199 | 200 | Fixed 201 | ..... 202 | 203 | - Comments on pull requests were only filtered by their age, not their authors, 204 | so bot comments, and comments by "ignored users" were still included. This 205 | is now fixed. 206 | 207 | .. _changelog-0.13.3: 208 | 209 | 0.13.3 — 2022-09-29 210 | ------------------- 211 | 212 | Fixed 213 | ..... 214 | 215 | - The hover tip for icons on pull requests and issues has text in the same 216 | order as the icons, making them easier to understand. 217 | 218 | .. _changelog-0.13.2: 219 | 220 | 0.13.2 — 2022-08-13 221 | ------------------- 222 | 223 | Fixed 224 | ..... 225 | 226 | - Add an HTML `` tag to ensure content is properly decoded as UTF-8. 227 | Fixes `issue 12`_. Thanks, Bill Mill. 228 | 229 | .. _issue 12: https://github.com/nedbat/dinghy/issues/12 230 | 231 | .. _changelog-0.13.1: 232 | 233 | 0.13.1 — 2022-08-03 234 | ------------------- 235 | 236 | Fixed 237 | ..... 238 | 239 | - On Windows, an alarming but harmless error would appear when finishing. 240 | This is now fixed, closing `issue 9`. Thanks, Carlton Gibson. 241 | 242 | .. _issue 9: https://github.com/nedbat/dinghy/issues/9 243 | 244 | .. _changelog-0.13.0: 245 | 246 | 0.13.0 — 2022-07-29 247 | ------------------- 248 | 249 | Removed 250 | ....... 251 | 252 | - Removed the deprecated "pull_requests" setting. 253 | 254 | Added 255 | ..... 256 | 257 | - The `api_root` setting lets GitHub Enterprise users control the GraphQL 258 | endpoint to use. 259 | 260 | Changed 261 | ....... 262 | 263 | - Adapt to the `2022-06-23 GitHub issues update`__, using the ProjectsV2 API 264 | instead of the ProjectsNext API. 265 | 266 | __ https://github.blog/changelog/2022-06-23-the-new-github-issues-june-23rd-update/ 267 | 268 | .. _changelog-0.12.0: 269 | 270 | 0.12.0 — 2022-06-12 271 | ------------------- 272 | 273 | Added 274 | ..... 275 | 276 | - The `title` option can be used on individual digests to add text to the 277 | title of the report. Thanks, Doug Hellmann. 278 | 279 | .. _changelog-0.11.5: 280 | 281 | 0.11.5 — 2022-06-07 282 | ------------------- 283 | 284 | Fixed 285 | ..... 286 | 287 | - Closed issues now distinguish between "completed" and "not planned". 288 | 289 | .. _changelog-0.11.4: 290 | 291 | 0.11.4 — 2022-05-10 292 | ------------------- 293 | 294 | Added 295 | ..... 296 | 297 | - HTML escaping is applied to the text pulled from GitHub (oops!) 298 | 299 | - Emojis are displayed as emojis rather than as text. 300 | 301 | .. _changelog-0.11.3: 302 | 303 | 0.11.3 — 2022-05-06 304 | ------------------- 305 | 306 | Fixed 307 | ..... 308 | 309 | - GitHub sometimes responds with "502 Bad Gateway". Pause and retry if that 310 | happens. 311 | 312 | .. _changelog-0.11.2: 313 | 314 | 0.11.2 — 2022-04-12 315 | ------------------- 316 | 317 | Added 318 | ..... 319 | 320 | - Added a ``--version`` option. 321 | 322 | Fixed 323 | ..... 324 | 325 | - Pull requests with many reviews would skip some reviews. Now all pull 326 | request data is fully retrieved. 327 | 328 | - On large digests, GitHub sometimes returns 403 as a rate limit. Retry when 329 | this happens to finish the queries. 330 | 331 | .. _changelog-0.11.1: 332 | 333 | 0.11.1 — 2022-03-29 334 | ------------------- 335 | 336 | Fixed 337 | ..... 338 | 339 | - Corrected a packaging mistake (missing Changelog entry). 340 | 341 | 342 | .. _changelog-0.11.0: 343 | 344 | 0.11.0 — 2022-03-29 345 | ------------------- 346 | 347 | Added 348 | ..... 349 | 350 | - Resolved comments are now indicated with a checkbox icon, and hover text of 351 | "resolved comment". 352 | 353 | Fixed 354 | ..... 355 | 356 | - Fixed a crash trying to get the repository for an issue in a project. 357 | 358 | .. _changelog-0.10.0: 359 | 360 | 0.10.0 — 2022-03-28 361 | ------------------- 362 | 363 | Changed 364 | ....... 365 | 366 | - Pull request data was not properly displayed: comments weren't included in 367 | the digest that should have been. 368 | 369 | - Pull request comments older than the cutoff date will be included if they are 370 | needed to show the discussion threads of newer comments. The old comments 371 | are shown in gray to help stay focused on recent activity. 372 | 373 | - Parsing of time durations was made stricter, so that "1 month" isn't 374 | mistaken for "1 minute". Fixes `issue 7`_ 375 | 376 | .. _issue 7: https://github.com/nedbat/dinghy/issues/7 377 | 378 | Removed 379 | ....... 380 | 381 | - Oops, it turns out there's no such thing as a repo project for "Projects 382 | (beta)". That thing that wouldn't have worked has been removed. 383 | 384 | 385 | 0.9.0 — 2022-03-17 386 | ------------------ 387 | 388 | Added 389 | ..... 390 | 391 | - GitHub enterprise support: you can use URLs pointing to your own GitHub 392 | Enterprise installation. Only a single host can be used. Thanks, Henry 393 | Gessau. 394 | 395 | - A "search:" entry in the configuration file will find issues or pull requests 396 | matching the query. 397 | 398 | - Items in the configuration file can have ``title:`` to set an explicit title. 399 | 400 | Deprecated 401 | .......... 402 | 403 | - The ``pull_requests:`` configuration setting is deprecated in favor of 404 | ``search:``. ``pull_requests: org:my_org`` becomes ``search: org:my_org 405 | is:pr``. 406 | 407 | 0.8.0 — 2022-03-16 408 | ------------------ 409 | 410 | Added 411 | ..... 412 | 413 | - Repo projects are supported. 414 | 415 | Fixed 416 | ..... 417 | 418 | - Error handling failed on certain errors. This is now fixed, closing 419 | `issue 4`_. 420 | 421 | .. _issue 4: https://github.com/nedbat/dinghy/issues/4 422 | 423 | 0.7.1 — 2022-03-13 424 | ------------------ 425 | 426 | Fixed 427 | ..... 428 | 429 | - Better handling of authorization problems, with error message presented so 430 | that the user can fix them. 431 | 432 | 0.7.0 — 2022-03-12 433 | ------------------ 434 | 435 | Added 436 | ..... 437 | 438 | - The command line now accepts a GitHub URL to quickly get a week's digest of 439 | activity from a repo (or issues, pull requests, etc). 440 | 441 | - The logging level can now be specified with the ``-v``/``--verbosity`` 442 | command-line option. 443 | 444 | Fixed 445 | ..... 446 | 447 | - Dependencies now have minimum pins, fixing `issue 1`_. 448 | 449 | .. _issue 1: https://github.com/nedbat/dinghy/issues/1 450 | 451 | 0.6.0 — 2022-03-10 452 | ------------------ 453 | 454 | Added 455 | ..... 456 | 457 | - GitHub's @ghost user shows up in GraphQL results as an "author" of None. 458 | Properly handle that case. 459 | 460 | Fixed 461 | ..... 462 | 463 | - Fixes to the color of labels. 464 | 465 | - Correct handling of HTML in bodies. 466 | 467 | 0.5.2 — 2022-03-08 468 | ------------------ 469 | 470 | Changed 471 | ....... 472 | 473 | - More HTML tweaks to indentation and information. 474 | 475 | 0.5.1 — 2022-03-07 476 | ------------------ 477 | 478 | Changed 479 | ....... 480 | 481 | - Indentation tweaks to make thread structure clearer. 482 | 483 | 0.5.0 — 2022-03-03 484 | ------------------ 485 | 486 | Changed 487 | ....... 488 | 489 | - Pull request reviews are displayed more compactly. 490 | 491 | 0.4.0 — 2022-02-28 492 | ------------------ 493 | 494 | Added 495 | ..... 496 | 497 | - A repo URL will report on both pull requests and issues in the repo. 498 | 499 | 0.3.0 — 2022-02-27 500 | ------------------ 501 | 502 | Added 503 | ..... 504 | 505 | - The configuration file can be specified as the argument on the command line. 506 | 507 | - GitHub icons decorate pull requests, issues, and comments to distinguish them 508 | and indicate their status. 509 | 510 | Changed 511 | ....... 512 | 513 | - The configuration file syntax changed. Now there is a top-level ``digests`` 514 | clause and an optional ``defaults`` clause. 515 | 516 | - The ``bots`` setting is now called ``ignore_users``. 517 | 518 | - Pull request review threads are presented hierarchically. 519 | 520 | 0.2.0 — 2022-02-21 521 | ------------------ 522 | 523 | Added 524 | ..... 525 | 526 | - Items can have options. Organization projects have a ``home_repo`` option so 527 | that issues from other repos will get an indication of the other repo. 528 | 529 | - Organizatons can be searched for pull requests. 530 | 531 | - If dinghy hits a GraphQL API rate limit, it will sleep until the limit is 532 | reset. 533 | 534 | - Don't report on activity by bot users. The ``bot`` setting can be used to 535 | list user accounts that should be considered bots. 536 | 537 | 0.1.0 — 2022-02-19 538 | ------------------ 539 | 540 | * First release. 541 | -------------------------------------------------------------------------------- /src/dinghy/digest.py: -------------------------------------------------------------------------------- 1 | """ 2 | Summarize issue activity in GitHub repos and projects. 3 | """ 4 | 5 | import asyncio 6 | import datetime 7 | import json 8 | import logging 9 | import operator 10 | import os 11 | import re 12 | import urllib.parse 13 | 14 | import yaml 15 | from glom import glom 16 | 17 | from . import __version__ 18 | from .graphql_helpers import build_query, GraphqlHelper 19 | from .helpers import DinghyError, json_save, parse_since 20 | from .jinja_helpers import render_jinja_to_file 21 | 22 | 23 | logger = logging.getLogger(__name__) 24 | 25 | GITHUB_URL_MAP = [] 26 | 27 | 28 | def github_route(url_pattern): 29 | """A decorator to associate a GitHub URL path regex with a Digester.get_ method. 30 | 31 | The regexes will be tried in the order the decorator is used in the class, 32 | so be careful if a path could match multiple patterns. 33 | """ 34 | 35 | def _dec(func): 36 | GITHUB_URL_MAP.append((url_pattern, func.__name__)) 37 | return func 38 | 39 | return _dec 40 | 41 | 42 | def dd(name: str) -> str: 43 | """Create an key for Dinghy-specfic data. (dd == Dinghy Data) 44 | 45 | Dinghy adds extra data to GitHub responses. All of the keys for this data 46 | are prefixed, created with this function. 47 | """ 48 | return f"dinghy_{name}" 49 | 50 | 51 | DD_children = dd("children") 52 | 53 | 54 | class Digester: 55 | """ 56 | Use GitHub GraphQL to get data about recent changes. 57 | """ 58 | 59 | def __init__(self, since, options): 60 | self.since = since.strftime("%Y-%m-%dT%H:%M:%S") 61 | self.ignore_users = options.get("ignore_users", []) 62 | self.user_types = {"User"} 63 | if options.get("include_bots", False): 64 | self.user_types.add("Bot") 65 | self.api_root = options.get("api_root") 66 | self.github = "github.com" 67 | self.gql = None 68 | 69 | def prepare(self): 70 | """Create the network helpers we need.""" 71 | token = os.environ.get("GITHUB_TOKEN", "") 72 | api_root = self.api_root or f"https://api.{self.github}/graphql" 73 | self.gql = GraphqlHelper(api_root, token) 74 | 75 | @github_route(r"/orgs/(?P[^/]+)/projects/(?P\d+)/?") 76 | async def get_org_project_entries(self, org, number, home_repo="", title=None): 77 | """ 78 | Get entries from a organization project. 79 | 80 | Args: 81 | org (str): the organization owner of the repo. 82 | number (int|str): the project number. 83 | home_repo (str): the owner/name of a repo that most entries are in. 84 | """ 85 | project, project_data = await self.gql.nodes( 86 | query=build_query("org_project_entries.graphql"), 87 | variables=dict(org=org, projectNumber=int(number)), 88 | ) 89 | entries = [content for data in project_data if (content := data["content"])] 90 | entries = await self._process_entries(entries) 91 | for entry in entries: 92 | entry["other_repo"] = entry["repository"]["nameWithOwner"] != home_repo 93 | if DD_children not in entry: 94 | entry[DD_children] = entry["comments"]["nodes"] 95 | project = glom(project, "data.organization.project") 96 | container = { 97 | "url": project["url"], 98 | "container_kind": "project", 99 | "title": title or project["title"], 100 | "kind": "items", 101 | "entries": entries, 102 | } 103 | return container 104 | 105 | async def get_search_results(self, query, title=None): 106 | """ 107 | Get issues or pull requests returned by a search query. 108 | """ 109 | query += f" updated:>{self.since}" 110 | _, entries = await self.gql.nodes( 111 | query=build_query("search_entries.graphql"), 112 | variables=dict(query=query), 113 | ) 114 | entries = await self._process_entries(entries) 115 | for entry in entries: 116 | entry["other_repo"] = True 117 | url_q = urllib.parse.quote_plus(query) 118 | if "is:pr" in query: 119 | kind = "pull requests" 120 | elif "is:issue" in query: 121 | kind = "issues" 122 | else: 123 | kind = "items" 124 | container = { 125 | "url": f"https://{self.github}/search?q={url_q}&type=issues", 126 | "container_kind": "search", 127 | "title": title or query, 128 | "kind": kind, 129 | "entries": entries, 130 | } 131 | return container 132 | 133 | @github_route(r"/(?P[^/]+)/(?P[^/]+)/?") 134 | async def get_repo_entries(self, owner, name, title=None): 135 | """ 136 | Get issues, pull requests, and releases from a repo. 137 | """ 138 | issue_container, pr_container, release_container = await asyncio.gather( 139 | self.get_repo_issues(owner, name, title=title), 140 | self.get_repo_pull_requests(owner, name), 141 | self.get_repo_releases(owner, name), 142 | ) 143 | entries = ( 144 | issue_container["entries"] 145 | + pr_container["entries"] 146 | + release_container["entries"] 147 | ) 148 | entries = self._trim_unwanted(entries) 149 | container = { 150 | **issue_container, 151 | "kind": "issues, pull requests, and releases", 152 | "entries": entries, 153 | } 154 | return container 155 | 156 | @github_route(r"/(?P[^/]+)/(?P[^/]+)/issues/?") 157 | async def get_repo_issues(self, owner, name, title=None): 158 | """ 159 | Get issues from a repo updated since a date, with comments since that date. 160 | 161 | Args: 162 | owner (str): the owner of the repo. 163 | name (str): the name of the repo. 164 | """ 165 | repo, issues = await self.gql.nodes( 166 | query=build_query("repo_issues.graphql"), 167 | variables=dict(owner=owner, name=name, since=self.since), 168 | ) 169 | issues = await self._process_entries(issues) 170 | repo = glom(repo, "data.repository") 171 | container = { 172 | "url": repo["url"], 173 | "container_kind": "repo", 174 | "title": title or repo["nameWithOwner"], 175 | "kind": "issues", 176 | "entries": issues, 177 | } 178 | return container 179 | 180 | @github_route(r"/(?P[^/]+)/(?P[^/]+)/releases/?") 181 | async def get_repo_releases(self, owner, name, title=None): 182 | """ 183 | Get releases from a repo updated since a date 184 | 185 | Args: 186 | owner (str): the owner of the repo. 187 | name (str): the name of the repo. 188 | """ 189 | repo, releases = await self.gql.nodes( 190 | query=build_query("repo_releases.graphql"), 191 | variables=dict(owner=owner, name=name, since=self.since), 192 | ) 193 | releases = await self._process_entries(releases) 194 | repo = glom(repo, "data.repository") 195 | container = { 196 | "url": repo["url"], 197 | "container_kind": "repo", 198 | "title": title or repo["nameWithOwner"], 199 | "kind": "releases", 200 | "entries": releases, 201 | } 202 | return container 203 | 204 | @github_route(r"/(?P[^/]+)/(?P[^/]+)/pulls/?") 205 | async def get_repo_pull_requests(self, owner, name, title=None): 206 | """ 207 | Get pull requests from a repo updated since a date, with comments since that date. 208 | 209 | Args: 210 | owner (str): the owner of the repo. 211 | name (str): the name of the repo. 212 | """ 213 | repo, pulls = await self.gql.nodes( 214 | query=build_query("repo_pull_requests.graphql"), 215 | variables=dict(owner=owner, name=name), 216 | donefn=(lambda nodes: nodes[-1]["updatedAt"] < self.since), 217 | ) 218 | pulls = await self._process_entries(pulls) 219 | 220 | repo = glom(repo, "data.repository") 221 | container = { 222 | "url": repo["url"], 223 | "container_kind": "repo", 224 | "title": title or repo["nameWithOwner"], 225 | "kind": "pull_requests", 226 | "entries": pulls, 227 | } 228 | return container 229 | 230 | def method_from_url(self, url): 231 | """ 232 | Dispatch to a get_* method from a GitHub URL. 233 | 234 | Args: 235 | url (str): A GitHub URL 236 | 237 | Returns: 238 | A method, and a dict of **kwargs. 239 | """ 240 | parsed = urllib.parse.urlparse(url) 241 | self.github = parsed.netloc 242 | for rx, fn_name in GITHUB_URL_MAP: 243 | if match_url := re.fullmatch(rx, parsed.path): 244 | return getattr(self, fn_name), match_url.groupdict() 245 | 246 | raise DinghyError(f"Can't understand URL {url!r}") 247 | 248 | async def get_more( 249 | self, container, graphql, id 250 | ): # pylint: disable=redefined-builtin 251 | """ 252 | If the `container` isn't full yet, get all the nodes. 253 | """ 254 | if container["totalCount"] > len(container["nodes"]): 255 | _, all_nodes = await self.gql.nodes( 256 | query=build_query(graphql), 257 | variables=dict(id=id), 258 | ) 259 | container["nodes"] = all_nodes 260 | 261 | def _node_is_interesting(self, node): 262 | """ 263 | Is a node interesting to show? It has to be new enough, by a real user, 264 | and not by someone we want to ignore. 265 | """ 266 | return ( 267 | node["updatedAt"] > self.since 268 | and node["author"]["__typename"] in self.user_types 269 | and node["author"]["login"] not in self.ignore_users 270 | ) 271 | 272 | def _trim_unwanted(self, nodes): 273 | """ 274 | Trim a list to keep only activity since `self.since`, and only by real 275 | users. 276 | 277 | The returned list is also sorted by updatedAt date. 278 | """ 279 | nodes = (n for n in nodes if self._node_is_interesting(n)) 280 | nodes = sorted(nodes, key=operator.itemgetter("updatedAt")) 281 | return nodes 282 | 283 | def _fix_ghosts(self, obj): 284 | """ 285 | GitHub has a @ghost account for deleted users. That shows up in our 286 | data as None. Fix those to have data we can use. 287 | """ 288 | if isinstance(obj, list): 289 | for elt in obj: 290 | self._fix_ghosts(elt) 291 | elif isinstance(obj, dict): 292 | for key in obj: 293 | if key == "author": 294 | if obj["author"] is None: 295 | obj["author"] = { 296 | "__typename": "User", 297 | "login": "ghost", 298 | } 299 | else: 300 | self._fix_ghosts(obj[key]) 301 | 302 | async def _process_entries(self, entries): 303 | """ 304 | Process entries after they've been retrieved. 305 | 306 | Keep only things updated since our date, and sort them. 307 | """ 308 | # $set_env.py: DINGHY_SAVE_ENTRIES - save each entry in its own JSON file. 309 | if int(os.environ.get("DINGHY_SAVE_ENTRIES", 0)): 310 | for entry in entries: 311 | try: 312 | kind = entry["__typename"].lower() 313 | num = entry["number"] 314 | except KeyError: 315 | pass 316 | else: 317 | await json_save(entry, f"save_{kind}_{num}.json") 318 | 319 | self._fix_ghosts(entries) 320 | 321 | entries = self._trim_unwanted(entries) 322 | entries = await asyncio.gather(*map(self._process_entry, entries)) 323 | return entries 324 | 325 | async def _process_entry(self, entry): 326 | """ 327 | Apply entry-specific processing to an entry. 328 | """ 329 | if entry["__typename"] == "Issue": 330 | await self._process_issue(entry) 331 | elif entry["__typename"] == "PullRequest": 332 | await self._process_pull_request(entry) 333 | self._add_reasons(entry) 334 | return entry 335 | 336 | async def _process_issue(self, issue): 337 | """ 338 | Add more comments to an issue. 339 | 340 | We can't paginate the comments on issues while paginating issues, so 341 | this method gets the rest of the comments. 342 | 343 | Args: 344 | issue (dict): the issue to populate. 345 | """ 346 | await self.get_more(issue["comments"], "issue_comments.graphql", issue["id"]) 347 | issue[DD_children] = self._trim_unwanted(issue["comments"]["nodes"]) 348 | 349 | async def _process_pull_request(self, pull): 350 | """ 351 | Do extra work to make a pull request right for reporting. 352 | """ 353 | # Pull requests have complex trees of data, with comments in 354 | # multiple places, and duplications. Reviews can also be finished 355 | # with no comment, but we want them to appear in the digest. 356 | # 357 | # Pull requests have: 358 | # comments: 359 | # Standalone comments that should always be included 360 | # reviews: 361 | # Each is a review by a person, who can add comments all over the 362 | # pull request, including in different threads. 363 | # reviewThreads: 364 | # Each is a sequence of comments that follow one another. 365 | # 366 | 367 | # Pull all the data from paginated components. 368 | await asyncio.gather( 369 | self.get_more(pull["comments"], "pr_comments.graphql", pull["id"]), 370 | self.get_more(pull["reviews"], "pr_reviews.graphql", pull["id"]), 371 | self.get_more( 372 | pull["reviewThreads"], "pr_reviewthreads.graphql", pull["id"] 373 | ), 374 | ) 375 | 376 | coros = [] 377 | coros.extend( 378 | self.get_more(r["comments"], "review_comments.graphql", r["id"]) 379 | for r in pull["reviews"]["nodes"] 380 | ) 381 | coros.extend( 382 | self.get_more(rt["comments"], "reviewthread_comments.graphql", rt["id"]) 383 | for rt in pull["reviewThreads"]["nodes"] 384 | ) 385 | await asyncio.gather(*coros) 386 | 387 | children = {} 388 | reviews = {} 389 | 390 | # Make a map of the reviews. 391 | for rev in pull["reviews"]["nodes"]: 392 | rev[dd("review_state")] = rev["state"] 393 | reviews[rev["id"]] = rev 394 | 395 | # For each thread, attach the thread as a child of the review. Each 396 | # comment in the thread can be from a different review (as people 397 | # respond to each other). The whole thread will be attached to the 398 | # review for the first comment. Make comments 2-N as children of 399 | # comment 1. 400 | for thread in pull["reviewThreads"]["nodes"]: 401 | com0 = thread["comments"]["nodes"][0] 402 | com0[DD_children] = thread["comments"]["nodes"][1:] 403 | com0["isResolved"] = thread["isResolved"] 404 | if com0["pullRequestReview"]: 405 | rev_id = com0["pullRequestReview"]["id"] 406 | review_comments = reviews[rev_id].setdefault(DD_children, []) 407 | review_comments.append(com0) 408 | 409 | # For each review, show it if it has a body, or if it has children, or 410 | # if it's not just "COMMENTED". 411 | for rev in reviews.values(): 412 | if rev["bodyText"] or rev.get(DD_children) or rev["state"] != "COMMENTED": 413 | com = children.setdefault(rev["id"], dict(rev)) 414 | com[dd("review_state")] = rev["state"] 415 | 416 | if not rev["bodyText"] and len(rev.get(DD_children, ())) == 1: 417 | # A review with just one comment and no body: the comment should 418 | # go where the review would have been. 419 | com = rev[DD_children][0] 420 | com[dd("review_state")] = rev[dd("review_state")] 421 | children[rev["id"]] = com 422 | 423 | # Comments are simple: they all get shown. 424 | for com in pull["comments"]["nodes"]: 425 | children[com["id"]] = com 426 | 427 | # Examine all the resulting threads (children). Keep a thread if it has 428 | # any comments newer than our since date. Mark older comments as old. 429 | kids, _ = self._trim_unwanted_tree(children.values()) 430 | pull[DD_children] = kids 431 | 432 | def _trim_unwanted_tree(self, nodes): 433 | """ 434 | Trim a nested list to indicate activity since `self.since`. A thread 435 | will be kept if any of its children is newer than since. Items older 436 | than that will get ["boring"]=True, and shown grayed in the output. 437 | """ 438 | keep = [] 439 | any_interesting_total = False 440 | for node in nodes: 441 | if self._node_is_interesting(node): 442 | any_interesting = True 443 | else: 444 | any_interesting = False 445 | node[dd("boring")] = True 446 | kids, any_interesting_kids = self._trim_unwanted_tree( 447 | node.get(DD_children, ()) 448 | ) 449 | if any_interesting or any_interesting_kids: 450 | node[DD_children] = kids 451 | keep.append(node) 452 | any_interesting_total = True 453 | keep = sorted(keep, key=operator.itemgetter("updatedAt")) 454 | return keep, any_interesting_total 455 | 456 | def _add_reasons(self, entry): 457 | """ 458 | Populate an entry with the reasons it's been included. 459 | 460 | Args: 461 | entry (dict): the issue or pull request data. 462 | 463 | """ 464 | # write "reasonCreated" based on "createdAt", etc. 465 | for slug in ["Created", "Closed", "Merged"]: 466 | at = slug.lower() + "At" 467 | entry[dd(f"reason{slug}")] = bool(entry.get(at) and entry[at] > self.since) 468 | 469 | 470 | def coro_from_item(digester, item): 471 | """ 472 | Parse a single config item, and make a digester coro for it. 473 | """ 474 | url = None 475 | more_kwargs = {} 476 | if isinstance(item, str): 477 | url = item 478 | elif "url" in item: 479 | more_kwargs = dict(item) 480 | url = more_kwargs.pop("url") 481 | 482 | if url: 483 | fn, kwargs = digester.method_from_url(url) 484 | else: 485 | if "search" in item: 486 | kwargs = dict(item) 487 | kwargs["query"] = kwargs.pop("search") 488 | fn = digester.get_search_results 489 | else: 490 | raise DinghyError(f"Don't understand item: {item!r}") 491 | 492 | try: 493 | coro = fn(**kwargs, **more_kwargs) 494 | except TypeError as type_err: 495 | raise DinghyError(f"Problem with config item: {item}: {type_err}") from None 496 | 497 | return coro 498 | 499 | 500 | async def make_digest(items, since=None, digest="digest.html", **options): 501 | """ 502 | Make a single digest. 503 | 504 | Args: 505 | since (optional str): a duration spec ("2 day", "3d6h", etc). Default: 1 week. 506 | items (list[str|dict]): a list of YAML objects or GitHub URLs to collect entries from. 507 | digest (str): the HTML file name to write. 508 | 509 | """ 510 | if since is None: 511 | since = "1 week" 512 | show_date = since != "forever" 513 | since_date = parse_since(since) 514 | digester = Digester(since=since_date, options=options) 515 | 516 | coros = [] 517 | for item in items: 518 | try: 519 | coros.append(coro_from_item(digester, item)) 520 | except: 521 | for coro in coros: 522 | coro.close() 523 | raise 524 | 525 | digester.prepare() 526 | results = await asyncio.gather(*coros) 527 | 528 | # $set_env.py: DINGHY_SAVE_RESULT - save digest data in a JSON file. 529 | if int(os.environ.get("DINGHY_SAVE_RESULT", 0)): 530 | json_name = digest.replace(".html", ".json") 531 | await json_save(results, json_name) 532 | logger.info(f"Wrote results data: {json_name}") 533 | 534 | await render_jinja_to_file( 535 | options.get("template", "digest.html.j2"), 536 | digest, 537 | results=results, 538 | since=since_date if show_date else None, 539 | now=datetime.datetime.now(), 540 | __version__=__version__, 541 | title=options.get("title", ""), 542 | ) 543 | logger.info(f"Wrote digest: {digest}") 544 | 545 | 546 | async def make_digests_from_config(conf_file, digests=None, since=None): 547 | """ 548 | Make all the digests specified by a configuration file. 549 | 550 | Args: 551 | conf_file (str): a file path to read as a config file. 552 | digests (list of str): the digest names to make. 553 | since (str): the spec for since when. 554 | """ 555 | try: 556 | with open(conf_file, encoding="utf-8") as cf: 557 | config = yaml.safe_load(cf) 558 | except Exception as err: 559 | raise DinghyError(f"Couldn't read config file {conf_file!r}: {err}") from err 560 | 561 | if "digests" not in config: 562 | raise DinghyError(f"No 'digests:' clause in config file {conf_file!r}") 563 | 564 | defaults = config.get("defaults", {}) 565 | coros = [] 566 | for spec in config["digests"]: 567 | args = {**defaults, **spec} 568 | if digests is not None and args["digest"] not in digests: 569 | continue 570 | if since is not None: 571 | args["since"] = since 572 | coros.append(make_digest(**args)) 573 | await asyncio.gather(*coros) 574 | 575 | 576 | def just_render(result_file): 577 | """Helper function to re-render stored results. 578 | 579 | For iterating on rendering changes without using up GitHub rate limits. 580 | 581 | $ python -c "import sys,dinghy.digest as dd; dd.just_render(sys.argv[1])" /tmp/lots.json 582 | 583 | """ 584 | with open(result_file, encoding="utf-8") as j: 585 | results = json.load(j) 586 | 587 | asyncio.run( 588 | render_jinja_to_file( 589 | "digest.html.j2", 590 | result_file.replace(".json", ".html"), 591 | results=results, 592 | since=datetime.datetime.now(), 593 | now=datetime.datetime.now(), 594 | __version__=__version__, 595 | ) 596 | ) 597 | --------------------------------------------------------------------------------