├── repos
└── .gitkeep
├── joint_teapot
├── utils
│ ├── __init__.py
│ ├── main.py
│ ├── logger.py
│ └── joj3.py
├── __init__.py
├── __main__.py
├── workers
│ ├── __init__.py
│ ├── joj.py
│ ├── git.py
│ ├── canvas.py
│ ├── mattermost.py
│ └── gitea.py
├── config.py
├── teapot.py
└── app.py
├── pytest.ini
├── .gitattributes
├── requirements-dev.txt
├── mkdocs.yml
├── .env.example
├── .github
└── workflows
│ ├── mkdocs.yml
│ ├── codeql.yml
│ └── python-package.yml
├── mypy.ini
├── requirements.txt
├── docs
└── index.md
├── LICENSE
├── .pre-commit-config.yaml
├── .gitea
└── workflows
│ └── build.yaml
├── setup.py
├── examples
└── ve482.py
├── README.md
└── .gitignore
/repos/.gitkeep:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/joint_teapot/utils/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/pytest.ini:
--------------------------------------------------------------------------------
1 | [pytest]
2 | filterwarnings =
3 | ignore::DeprecationWarning
4 |
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
1 | # .env.example make the repo not pure python
2 | .env.example linguist-vendored
3 |
--------------------------------------------------------------------------------
/requirements-dev.txt:
--------------------------------------------------------------------------------
1 | pre-commit>=2.10.1
2 | pytest>=6.2.2
3 | zipp>=3.19.1 # not directly required, pinned by Snyk to avoid a vulnerability
4 |
--------------------------------------------------------------------------------
/mkdocs.yml:
--------------------------------------------------------------------------------
1 | site_name: Joint Teapot
2 | repo_url: https://github.com/BoYanZh/joint-teapot
3 | repo_name: BoYanZh/joint-teapot
4 | theme:
5 | name: material
6 |
--------------------------------------------------------------------------------
/joint_teapot/__init__.py:
--------------------------------------------------------------------------------
1 | __version__ = "0.0.0"
2 |
3 | from joint_teapot.app import app
4 | from joint_teapot.teapot import Teapot as Teapot
5 | from joint_teapot.utils.logger import logger as logger
6 |
7 |
8 | def main() -> None:
9 | app()
10 |
--------------------------------------------------------------------------------
/joint_teapot/__main__.py:
--------------------------------------------------------------------------------
1 | from joint_teapot.app import app
2 | from joint_teapot.utils.logger import logger as logger
3 |
4 | if __name__ == "__main__":
5 | try:
6 | app()
7 | except Exception:
8 | logger.exception("Unexpected error:")
9 |
--------------------------------------------------------------------------------
/joint_teapot/workers/__init__.py:
--------------------------------------------------------------------------------
1 | from joint_teapot.workers.canvas import Canvas as Canvas
2 | from joint_teapot.workers.git import Git as Git
3 | from joint_teapot.workers.gitea import Gitea as Gitea
4 | from joint_teapot.workers.mattermost import Mattermost as Mattermost
5 |
--------------------------------------------------------------------------------
/.env.example:
--------------------------------------------------------------------------------
1 | CANVAS_ACCESS_TOKEN=CiBD8fLtEhUOkcjdKhIj18Fx13WV1e3hwffewrfN3whAcS1IKPeJ41fBKq1EzkGd
2 | CANVAS_COURSE_ID=1445
3 | GITEA_ACCESS_TOKEN=10cdf70f8fe1b7a5d3321e5a10d2d743e4818d4a
4 | GITEA_ORG_NAME=VG101
5 | MATTERMOST_TEAM=VG101
6 | MATTERMOST_ACCESS_TOKEN=qjnpxytsgpdnxfbpyhrinzdapl
7 | MATTERMOST_TEACHING_TEAM=["A", "B", "C"]
8 |
--------------------------------------------------------------------------------
/.github/workflows/mkdocs.yml:
--------------------------------------------------------------------------------
1 | name: mkdocs
2 | on:
3 | push:
4 | branches:
5 | - master
6 | jobs:
7 | deploy:
8 | if: ${{ vars.RUN_ON_GITEA != 'true' }}
9 | runs-on: ubuntu-latest
10 | steps:
11 | - uses: actions/checkout@v2
12 | - uses: actions/setup-python@v2
13 | with:
14 | python-version: 3.x
15 | - run: pip install mkdocs-material
16 | - run: mkdocs gh-deploy --force
17 |
--------------------------------------------------------------------------------
/mypy.ini:
--------------------------------------------------------------------------------
1 | [mypy]
2 | plugins = pydantic.mypy
3 |
4 | follow_imports = silent
5 | warn_redundant_casts = True
6 | warn_unused_ignores = True
7 | disallow_any_generics = True
8 | check_untyped_defs = True
9 | no_implicit_reexport = True
10 |
11 | # for strict mypy: (this is the tricky one :-))
12 | disallow_untyped_defs = True
13 |
14 | [pydantic-mypy]
15 | init_forbid_extra = True
16 | init_typed = True
17 | warn_required_dynamic_aliases = True
18 | warn_untyped_fields = True
19 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | canvasapi>=2.2.0
2 | colorama>=0.4.6
3 | filelock>=3.14.0
4 | focs_gitea>=1.22.0
5 | GitPython>=3.1.18
6 | joj-submitter>=0.0.8
7 | loguru>=0.5.3
8 | mattermostdriver>=7.3.2
9 | mosspy>=1.0.9
10 | patool>=1.12
11 | pydantic>=2.0.2
12 | pydantic-settings>=2.0.1
13 | requests>=2.32.2 # not directly required, pinned by Snyk to avoid a vulnerability
14 | typer>=0.12.3
15 | urllib3>=2.2.2 # not directly required, pinned by Snyk to avoid a vulnerability
16 | zipp>=3.19.1 # not directly required, pinned by Snyk to avoid a vulnerability
17 |
--------------------------------------------------------------------------------
/docs/index.md:
--------------------------------------------------------------------------------
1 | # Joint Teapot
2 |
3 | A handy tool for TAs in JI to handle works through [Gitea](https://focs.ji.sjtu.edu.cn/git/), [Canvas](https://umjicanvas.com/), and [JOJ](https://joj.sjtu.edu.cn/). Joint is related to JI and also this tool which join websites together. Teapot means to hold Gitea, inspired by [@nichujie](https://github.com/nichujie).
4 |
5 | ## Getting Started
6 |
7 | ### Setup venv (Optional)
8 |
9 | ```bash
10 | python3 -m venv env # you only need to do that once
11 | source env/Scripts/activate # each time when you need this venv
12 | ```
13 |
14 | ### Install
15 |
16 | ```bash
17 | pip3 install -e .
18 | cp .env.example .env && vi .env # configure environment
19 | joint-teapot --help
20 | ```
21 |
22 | ### For developers
23 |
24 | ```bash
25 | pip3 install -r requirements-dev.txt
26 | pre-commit install
27 | pytest -svv
28 | ```
29 |
--------------------------------------------------------------------------------
/.github/workflows/codeql.yml:
--------------------------------------------------------------------------------
1 | name: "CodeQL"
2 |
3 | on:
4 | push:
5 | branches: [ "master" ]
6 | pull_request:
7 | branches: [ "master" ]
8 | schedule:
9 | - cron: "26 0 * * 5"
10 |
11 | jobs:
12 | analyze:
13 | if: ${{ vars.RUN_ON_GITEA != 'true' }}
14 | name: Analyze
15 | runs-on: ubuntu-latest
16 | permissions:
17 | actions: read
18 | contents: read
19 | security-events: write
20 |
21 | strategy:
22 | fail-fast: false
23 | matrix:
24 | language: [ python ]
25 |
26 | steps:
27 | - name: Checkout
28 | uses: actions/checkout@v3
29 |
30 | - name: Initialize CodeQL
31 | uses: github/codeql-action/init@v2
32 | with:
33 | languages: ${{ matrix.language }}
34 | queries: +security-and-quality
35 |
36 | - name: Autobuild
37 | uses: github/codeql-action/autobuild@v2
38 |
39 | - name: Perform CodeQL Analysis
40 | uses: github/codeql-action/analyze@v2
41 | with:
42 | category: "/language:${{ matrix.language }}"
43 |
--------------------------------------------------------------------------------
/joint_teapot/utils/main.py:
--------------------------------------------------------------------------------
1 | import math
2 | import re
3 | from typing import Callable, Iterable, Optional, TypeVar
4 |
5 | from canvasapi.user import User
6 |
7 | _T = TypeVar("_T")
8 |
9 |
10 | def first(
11 | iterable: Iterable[_T], condition: Callable[[_T], bool] = lambda x: True
12 | ) -> Optional[_T]:
13 | return next((x for x in iterable if condition(x)), None)
14 |
15 |
16 | def percentile(
17 | N: Iterable[float], percent: float, key: Callable[[float], float] = lambda x: x
18 | ) -> Optional[float]:
19 | if not N:
20 | return None
21 | N = sorted(N)
22 | k = (len(N) - 1) * percent
23 | f = math.floor(k)
24 | c = math.ceil(k)
25 | if f == c:
26 | return key(N[int(k)])
27 | d0 = key(N[int(f)]) * (c - k)
28 | d1 = key(N[int(c)]) * (k - f)
29 | return d0 + d1
30 |
31 |
32 | def default_repo_name_convertor(user: User) -> str:
33 | sis_id, name = user.sis_id, user.name
34 | eng = re.sub("[\u4e00-\u9fa5]", "", name)
35 | eng = eng.replace(",", "")
36 | eng = eng.title().replace(" ", "").replace("\xa0", "")
37 | return f"{eng}{sis_id}"
38 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2021 BoYanZh
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | repos:
2 | - repo: https://github.com/pre-commit/pre-commit-hooks
3 | rev: v6.0.0
4 | hooks:
5 | - id: check-yaml
6 | - id: end-of-file-fixer
7 | - id: trailing-whitespace
8 | - id: requirements-txt-fixer
9 | - repo: https://github.com/pre-commit/mirrors-mypy
10 | rev: "v1.18.2"
11 | hooks:
12 | - id: mypy
13 | additional_dependencies:
14 | - pydantic
15 | - repo: https://github.com/asottile/pyupgrade
16 | rev: v3.20.0
17 | hooks:
18 | - id: pyupgrade
19 | - repo: https://github.com/hadialqattan/pycln
20 | rev: v2.5.0
21 | hooks:
22 | - id: pycln
23 | args: [-a]
24 | - repo: https://github.com/PyCQA/bandit
25 | rev: '1.8.6'
26 | hooks:
27 | - id: bandit
28 | - repo: https://github.com/PyCQA/isort
29 | rev: 6.0.1
30 | hooks:
31 | - id: isort
32 | args: ["--profile", "black", "--filter-files"]
33 | - repo: https://github.com/psf/black
34 | rev: 25.9.0
35 | hooks:
36 | - id: black
37 | - repo: https://github.com/Lucas-C/pre-commit-hooks
38 | rev: v1.5.5
39 | hooks:
40 | - id: remove-crlf
41 | - id: remove-tabs
42 |
--------------------------------------------------------------------------------
/.gitea/workflows/build.yaml:
--------------------------------------------------------------------------------
1 | name: build
2 | on:
3 | - push
4 | jobs:
5 | trigger-build-image:
6 | runs-on: ubuntu-latest
7 | if: gitea.ref == 'refs/heads/master'
8 | steps:
9 | - name: Set up SSH
10 | run: |
11 | mkdir -p ~/.ssh
12 | echo "${{ secrets.DEPLOY_PRIVATE_KEY }}" > ~/.ssh/id_ed25519
13 | echo "${{ secrets.DEPLOY_KEY }}" > ~/.ssh/id_ed25519.pub
14 | chmod 600 ~/.ssh/id_ed25519
15 | chmod 600 ~/.ssh/id_ed25519.pub
16 | ssh-keyscan -p 2222 focs.ji.sjtu.edu.cn >> ~/.ssh/known_hosts
17 | ssh -T git@focs.ji.sjtu.edu.cn -p 2222
18 | - name: Set up Git
19 | run: |
20 | git config --global user.name "bot-joj"
21 | git config --global user.email "bot-joj@focs.ji.sjtu.edu.cn"
22 | - name: Clone, Commit and Push
23 | shell: bash
24 | run: |
25 | git clone ssh://git@focs.ji.sjtu.edu.cn:2222/JOJ/runner-images.git runner-images
26 | cd runner-images
27 | git submodule update --init --remote --force
28 | git add -A
29 | git commit --allow-empty -m "chore: ${{ gitea.repository }}@${{ gitea.sha }} trigger force build gitea actions"
30 | git push
31 |
--------------------------------------------------------------------------------
/.github/workflows/python-package.yml:
--------------------------------------------------------------------------------
1 | # This workflow will install Python dependencies, run tests and lint with a variety of Python versions
2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions
3 |
4 | name: Python package
5 |
6 | on: [ push ]
7 |
8 | jobs:
9 | build:
10 | if: ${{ vars.RUN_ON_GITEA != 'true' }}
11 | runs-on: ubuntu-latest
12 | steps:
13 | - uses: actions/checkout@v2
14 | - name: Set up Python 3.9
15 | uses: actions/setup-python@v2
16 | with:
17 | python-version: 3.9
18 | - name: Cache environment
19 | uses: actions/cache@v4
20 | id: cache
21 | with:
22 | path: ${{ env.pythonLocation }}
23 | key: ${{ runner.os }}-${{ env.pythonLocation }}-${{ secrets.CACHE_VERSION }}-${{ hashFiles('**/setup.py') }}-${{ hashFiles('**/requirements.txt') }}
24 | restore-keys: |
25 | ${{ runner.os }}-${{ env.pythonLocation }}
26 | - name: Install dependencies
27 | if: steps.cache.outputs.cache-hit != 'true'
28 | run: |
29 | python -m pip install --upgrade pip
30 | - name: Install itself
31 | run: if [ -f requirements.txt ]; then pip install -e .; fi
32 | - name: Run --help
33 | run: joint-teapot --help
34 |
--------------------------------------------------------------------------------
/joint_teapot/utils/logger.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import sys
3 | from sys import stderr
4 | from types import FrameType
5 | from typing import Optional
6 |
7 | from loguru import logger as logger
8 |
9 | from joint_teapot.config import settings
10 |
11 |
12 | # recipe from https://loguru.readthedocs.io/en/stable/overview.html#entirely-compatible-with-standard-logging
13 | class InterceptHandler(logging.Handler):
14 | def __init__(self) -> None:
15 | super().__init__()
16 |
17 | def emit(self, record: logging.LogRecord) -> None:
18 | try:
19 | level = logger.level(record.levelname).name
20 | except ValueError:
21 | level = record.levelno
22 |
23 | # Find caller from where originated the logged message
24 | frame: Optional[FrameType] = sys._getframe(6)
25 | depth = 6
26 | while frame and frame.f_code.co_filename == logging.__file__:
27 | frame = frame.f_back
28 | depth += 1
29 |
30 | logger.opt(depth=depth, exception=record.exc_info).log(
31 | level,
32 | record.getMessage(),
33 | )
34 |
35 |
36 | def set_logger(
37 | stderr_log_level: str = settings.stderr_log_level,
38 | ) -> None:
39 | logging.basicConfig(handlers=[InterceptHandler()], level=0, force=True)
40 | logger.remove()
41 | logger.add(stderr, level=stderr_log_level, colorize=stderr.isatty())
42 | logger.add(settings.log_file_path, level="DEBUG")
43 |
44 |
45 | set_logger()
46 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | import os
2 | import re
3 | from typing import List
4 |
5 | from setuptools import find_packages, setup
6 |
7 |
8 | def get_version(package: str) -> str:
9 | """
10 | Return package version as listed in `__version__` in `__init__.py`.
11 | """
12 | path = os.path.join(package, "__init__.py")
13 | main_py = open(path, encoding="utf8").read()
14 | match = re.search("__version__ = ['\"]([^'\"]+)['\"]", main_py)
15 | if match is None:
16 | return "0.0.0"
17 | return match.group(1)
18 |
19 |
20 | def get_long_description() -> str:
21 | """
22 | Return the README.
23 | """
24 | return open("README.md", encoding="utf8").read()
25 |
26 |
27 | def get_install_requires() -> List[str]:
28 | """
29 | Return each line of requirements.txt.
30 | """
31 | return open("requirements.txt").read().splitlines()
32 |
33 |
34 | setup(
35 | name="joint-teapot",
36 | version=get_version("joint_teapot"),
37 | url="https://github.com/BoYanZh/joint-teapot",
38 | license="MIT",
39 | description="A handy tool for TAs in JI to handle stuffs through Gitea, Canvas, JOJ and Mattermost.",
40 | long_description=get_long_description(),
41 | long_description_content_type="text/markdown",
42 | author="BoYanZh",
43 | author_email="bomingzh@sjtu.edu.cn",
44 | maintainer="BoYanZh",
45 | maintainer_email="bomingzh@sjtu.edu.cn",
46 | packages=find_packages(),
47 | python_requires=">=3.6",
48 | entry_points={"console_scripts": ["joint-teapot=joint_teapot:main"]},
49 | install_requires=get_install_requires(),
50 | )
51 |
--------------------------------------------------------------------------------
/joint_teapot/config.py:
--------------------------------------------------------------------------------
1 | from functools import lru_cache
2 | from typing import List
3 |
4 | from pydantic_settings import BaseSettings, SettingsConfigDict
5 |
6 |
7 | class Settings(BaseSettings):
8 | """
9 | Define the settings (config).
10 | """
11 |
12 | # canvas
13 | canvas_domain_name: str = "oc.sjtu.edu.cn"
14 | canvas_suffix: str = "/"
15 | canvas_access_token: str = ""
16 | canvas_course_id: int = 0
17 |
18 | # gitea
19 | gitea_domain_name: str = "focs.ji.sjtu.edu.cn"
20 | gitea_suffix: str = "/git"
21 | gitea_access_token: str = ""
22 | gitea_org_name: str = ""
23 | gitea_debug: bool = False
24 |
25 | # git
26 | git_host: str = "ssh://git@focs.ji.sjtu.edu.cn:2222"
27 | repos_dir: str = "./repos"
28 | default_branch: str = "master"
29 |
30 | # mattermost
31 | mattermost_domain_name: str = "focs.ji.sjtu.edu.cn"
32 | mattermost_suffix: str = "/mm"
33 | mattermost_access_token: str = ""
34 | mattermost_team: str = ""
35 | mattermost_teaching_team: List[str] = [
36 | "charlem",
37 | ]
38 |
39 | # joj
40 | joj_sid: str = ""
41 |
42 | # joj3
43 | joj3_lock_file_path: str = ".git/teapot-joj3-all-env.lock"
44 | joj3_lock_file_timeout: int = 30
45 |
46 | # moss
47 | moss_user_id: int = 9876543210
48 |
49 | # log file
50 | log_file_path: str = "joint-teapot.log"
51 | stderr_log_level: str = "INFO"
52 | model_config = SettingsConfigDict(env_file=".env", env_file_encoding="utf-8")
53 |
54 |
55 | @lru_cache()
56 | def get_settings() -> Settings:
57 | return Settings()
58 |
59 |
60 | def set_settings(new_settings: Settings) -> None:
61 | for field, value in new_settings.model_dump(exclude_unset=True).items():
62 | setattr(settings, field, value)
63 |
64 |
65 | settings: Settings = get_settings()
66 |
--------------------------------------------------------------------------------
/joint_teapot/workers/joj.py:
--------------------------------------------------------------------------------
1 | import io
2 | import os
3 | import zipfile
4 | from typing import Tuple
5 |
6 | from colorama import Fore, Style, init
7 | from joj_submitter import JOJSubmitter, Language
8 |
9 | from joint_teapot.config import settings
10 | from joint_teapot.utils.logger import logger
11 |
12 |
13 | class JOJ:
14 | def __init__(self, sid: str = ""):
15 | sid = sid or settings.joj_sid
16 | init()
17 | self.submitter = JOJSubmitter(sid, logger)
18 |
19 | def submit_dir(self, problem_url: str, path: str, lang: str) -> Tuple[int, str]:
20 | if lang not in list(Language):
21 | raise Exception(f"unsupported language '{lang}' for JOJ")
22 | exclude_paths = [".git"]
23 | zip_buffer = io.BytesIO()
24 | zip_buffer.name = f"{os.path.basename(path)}.zip"
25 | with zipfile.ZipFile(zip_buffer, "a", zipfile.ZIP_DEFLATED) as zip_file:
26 | for root, dirs, files in os.walk(path):
27 | dirs[:] = [d for d in dirs if d not in exclude_paths]
28 | for file in files:
29 | zip_file.write(
30 | os.path.join(root, file),
31 | os.path.relpath(os.path.join(root, file), path),
32 | )
33 | zip_buffer.seek(0)
34 | response = self.submitter.upload_file(problem_url, zip_buffer, lang)
35 | if response.status_code != 200:
36 | logger.error(
37 | f"{path} submit to JOJ error, status code {response.status_code}"
38 | )
39 | return -1, ""
40 | logger.info(f"{path} submit to JOJ succeed, record url {response.url}")
41 | record = self.submitter.get_status(response.url)
42 | fore_color = Fore.RED if record.status != "Accepted" else Fore.GREEN
43 | logger.info(
44 | f"status: {fore_color}{record.status}{Style.RESET_ALL}, "
45 | + f"accept number: {Fore.BLUE}{record.accepted_count}{Style.RESET_ALL}, "
46 | + f"score: {Fore.BLUE}{record.score}{Style.RESET_ALL}, "
47 | + f"total time: {Fore.BLUE}{record.total_time}{Style.RESET_ALL}, "
48 | + f"peak memory: {Fore.BLUE}{record.peak_memory}{Style.RESET_ALL}"
49 | )
50 | score_int = 0
51 | try:
52 | score_int = int(record.score)
53 | except ValueError:
54 | pass
55 | return score_int, response.url
56 |
--------------------------------------------------------------------------------
/examples/ve482.py:
--------------------------------------------------------------------------------
1 | import glob
2 | import json
3 | import ntpath
4 | import os
5 | from typing import cast
6 |
7 | from canvasapi.assignment import Assignment
8 |
9 | from joint_teapot import Teapot, logger
10 | from joint_teapot.utils.main import default_repo_name_convertor, first, percentile
11 |
12 |
13 | class VE482Teapot(Teapot):
14 | def p1_check(self) -> None:
15 | fault_repos = []
16 | for repo_name in self.gitea.get_all_repo_names():
17 | if not repo_name.endswith("p1"):
18 | continue
19 | faults = []
20 | succeed = self.checkout_to_repo_by_release_name(repo_name, "p1")
21 | if succeed:
22 | contain_c_file = False
23 | contain_readme_file = False
24 | for fn in glob.glob(f"{self.git.repos_dir}/{repo_name}/*"):
25 | basename = ntpath.basename(fn)
26 | if basename.endswith(".c"):
27 | contain_c_file = True
28 | if basename.lower().startswith("readme"):
29 | contain_readme_file = True
30 | if not contain_c_file:
31 | faults.append(
32 | "no C file found in root directory in release p1, "
33 | "can not compile on JOJ"
34 | )
35 | if not contain_readme_file:
36 | faults.append(
37 | "no README file found in root directory in release p1"
38 | )
39 | else:
40 | faults.append("no release named p1")
41 | if faults:
42 | fault_string = ""
43 | for fault in faults:
44 | fault_string += f"- {fault}\n"
45 | logger.info("\n".join(("", repo_name, "", fault_string)))
46 | self.gitea.issue_api.issue_create_issue(
47 | self.gitea.org_name,
48 | repo_name,
49 | body={
50 | "body": fault_string,
51 | "title": "p1 submission pre-check failed",
52 | },
53 | )
54 | fault_repos.append(repo_name)
55 | logger.info(f"{len(fault_repos)} fault repo(s): {fault_repos}")
56 |
57 | def p1_submit(self) -> None:
58 | res_dict = {}
59 | assignment_name = "p1.3"
60 | assignment = first(self.canvas.assignments, lambda x: x.name == assignment_name)
61 | if assignment is None:
62 | logger.info(f"Canvas assignment {assignment_name} not found")
63 | return
64 | assignment = cast(Assignment, assignment)
65 | students = self.canvas.students
66 | for submission in assignment.get_submissions():
67 | student = first(students, lambda x: x.id == submission.user_id)
68 | if student is None:
69 | continue
70 | repo_name = default_repo_name_convertor(student) + "-p1"
71 | repo_dir = os.path.join(self.git.repos_dir, repo_name)
72 | base_score, base_url = self.joj.submit_dir(
73 | "https://joj.sjtu.edu.cn/d/ve482_fall_2021/p/61c2d0b27fe7290006b27034",
74 | repo_dir,
75 | "make",
76 | )
77 | bonus_score, bonus_url = self.joj.submit_dir(
78 | "https://joj.sjtu.edu.cn/d/ve482_fall_2021/p/61c2d49e7fe7290006b2703e",
79 | repo_dir,
80 | "make",
81 | )
82 | total_score = base_score / 520 * 100 + bonus_score / 220 * 30
83 | res_dict[student.sis_login_id] = total_score
84 | data = {
85 | "submission": {"posted_grade": round(total_score, 2)},
86 | "comment": {
87 | "text_comment": (
88 | f"base score: {base_score} / 520, url: {base_url}\n"
89 | f"bonus score: {bonus_score} / 220, url: {bonus_url}\n"
90 | f"total score: {base_score} / 520 * 100 + "
91 | f"{bonus_score} / 220 * 30"
92 | )
93 | },
94 | }
95 | submission.edit(**data)
96 | float_grades = list(res_dict.values())
97 | summary = [
98 | min(float_grades),
99 | percentile(float_grades, 0.25),
100 | percentile(float_grades, 0.5),
101 | percentile(float_grades, 0.75),
102 | max(float_grades),
103 | ]
104 | average_grade = sum(float_grades) / len(float_grades)
105 | logger.info(
106 | f"Grades summary: "
107 | f"Min: {summary[0]:.2f}, "
108 | f"Q1: {summary[1]:.2f}, "
109 | f"Q2: {summary[2]:.2f}, "
110 | f"Q3: {summary[3]:.2f}, "
111 | f"Max: {summary[4]:.2f}, "
112 | f"Average: {average_grade:.2f}"
113 | )
114 | json.dump(
115 | res_dict, open("ve482_p1_grade.json", "w"), ensure_ascii=False, indent=4
116 | )
117 |
118 |
119 | if __name__ == "__main__":
120 | teapot = VE482Teapot()
121 | teapot.p1_submit()
122 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Joint Teapot
2 |
3 | [](https://app.codacy.com/gh/BoYanZh/Joint-Teapot?utm_source=github.com&utm_medium=referral&utm_content=BoYanZh/Joint-Teapot&utm_campaign=Badge_Grade_Settings)
4 |
5 | A handy tool for TAs in JI to handle works through [Gitea](https://focs.ji.sjtu.edu.cn/git/), [Canvas](https://umjicanvas.com/), [JOJ](https://joj.sjtu.edu.cn/) and [Mattermost](https://focs.ji.sjtu.edu.cn/mm/). Joint is related to JI and also this tool which join websites together. Teapot means to hold Gitea, inspired by [@nichujie](https://github.com/nichujie).
6 |
7 | This tool is still under heavy development. The docs may not be updated on time, and all the features are provided with the probability to change.
8 |
9 | ## Getting Started
10 |
11 | ### Setup venv (Optional)
12 |
13 | ```bash
14 | python3 -m venv env # you only need to do that once
15 | # each time when you need this venv, if on Linux / macOS use
16 | source env/bin/activate
17 | # or this if on Windows
18 | source env/Scripts/activate
19 | ```
20 |
21 | ### Install
22 |
23 | ```bash
24 | pip3 install -e .
25 | cp .env.example .env && vi .env # configure environment
26 | joint-teapot --help
27 | ```
28 |
29 | ### For developers
30 |
31 | ```bash
32 | pip3 install -r requirements-dev.txt
33 | pre-commit install
34 | pytest -svv
35 | ```
36 |
37 | ## Commands & Features
38 |
39 | ### `archive-repos`
40 |
41 | archive repos in gitea organization according to regex (dry-run enabled by default)
42 |
43 | ### `unwatch-all-repos`
44 |
45 | unwatch all repos in gitea organization
46 |
47 | ### `check-issues`
48 |
49 | check the existence of issue by title on gitea
50 |
51 | ### `checkout-releases`
52 |
53 | checkout git repo to git tag fetched from gitea by release name, with due date
54 |
55 | ### `clone-all-repos`
56 |
57 | clone all gitea repos to local
58 |
59 | ### `close-all-issues`
60 |
61 | close all issues and pull requests in gitea organization
62 |
63 | ### `create-channels-on-mm`
64 |
65 | create channels for student groups according to group information on gitea. Optionally specify a prefix to ignore all repos whose names do not start with it. Optionally specify a suffix to add to all channels created.
66 |
67 | Example: `python3 -m joint_teapot create-channels-on-mm --prefix p1 --suffix -private --invite-teaching-team` will fetch all repos whose names start with `"p1"` and create channels on mm for these repos like "p1team1-private". Members of a repo will be added to the corresponding channel. And teaching team (adjust in `.env`) will be invited to the channels.
68 |
69 | ### `create-comment`
70 |
71 | create a comment for an issue on gitea.
72 |
73 | ### `create-issues`
74 |
75 | create issues on gitea. Specify a list of repos (use `--regex` to match against list of patterns), a title, and a body (use `--file` to read from file), in this order.
76 |
77 | Examples (run both with `python3 -m joint_teapot create-issues`):
78 |
79 | - `pgroup-08 pgroup-17 "Hurry up" "You are running out of time"` will create an issue in these two pgroups.
80 | - `--regex "^pgroup" "Final submission" --file "./issues/final-submission.md"` will create an issue in all pgroups, with body content read from said file.
81 |
82 | ### `create-personal-repos`
83 |
84 | create personal repos on gitea for all canvas students. You may specify an optional suffix.
85 |
86 | Example: `python3 -m joint_teapot create-personal-repos --suffix "-p1"` will create repos named `StudentNameStudentID-p1`.
87 |
88 | ### `create-teams`
89 |
90 | create teams on gitea by canvas groups. To integrate with webhooks, it's recommended to set suffix to `-gitea`.
91 |
92 | ### `create-webhooks-for-mm`
93 |
94 | Create a pair of webhooks on gitea and mm for all student groups on gitea, and configure them so that updates on gitea will be pushed to the mm channel. Optionally specify a prefix to ignore all repos whose names do not start with it.
95 |
96 | Example: `python3 -m joint_teapot create-webhooks-for-mm p1 -git-suffix` will fetch all repos whose names start with `"p1"` and create two-way webhooks for these repos and channels of the same name but with a "-git" suffix. All repos should already have mm channels following the requirement. If not, use `create-channels-on-mm` to create them.
97 |
98 | ### `get-no-collaborator-repos`
99 |
100 | list all repos with no collaborators
101 |
102 | ### `get-public-keys`
103 |
104 | list all public keys on gitea
105 |
106 | ### `get-repos-status`
107 |
108 | list status of all repos with conditions
109 |
110 | ### `invite-to-teams`
111 |
112 | invite all canvas students to gitea teams by team name
113 |
114 | ### `prepare-assignment-dir`
115 |
116 | prepare assignment dir from extracted canvas "Download Submissions" zip
117 |
118 | ### `unsubscribe-from-repos`
119 |
120 | Unsubscribe from all repos in the organization specified in the config file where the repo name matches a given regex expression.
121 |
122 | Example: `python3 -m joint_teapot unsubscribe-from-repos '\d{12}$'` will remove all repos whose names end with a student ID number from your gitea subscription list. Refer to the Python `re` module docs for more info about regex.
123 |
124 | ### `upload-assignment-grades`
125 |
126 | upload assignment grades to canvas from grade file (GRADE.txt by default), read the first line as grade, the rest as comments
127 |
128 | ## License
129 |
130 | MIT
131 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .idea
2 |
3 | # Created by .ignore support plugin (hsz.mobi)
4 | ### Python template
5 | # Byte-compiled / optimized / DLL files
6 | __pycache__/
7 | *.py[cod]
8 | *$py.class
9 |
10 | # C extensions
11 | *.so
12 |
13 | # Distribution / packaging
14 | .Python
15 | build/
16 | develop-eggs/
17 | dist/
18 | downloads/
19 | eggs/
20 | .eggs/
21 | lib/
22 | lib64/
23 | parts/
24 | sdist/
25 | var/
26 | wheels/
27 | share/python-wheels/
28 | *.egg-info/
29 | .installed.cfg
30 | *.egg
31 | MANIFEST
32 |
33 | # PyInstaller
34 | # Usually these files are written by a python script from a template
35 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
36 | *.manifest
37 | *.spec
38 |
39 | # Installer logs
40 | pip-log.txt
41 | pip-delete-this-directory.txt
42 |
43 | # Unit test / coverage reports
44 | htmlcov/
45 | .tox/
46 | .nox/
47 | .coverage
48 | .coverage.*
49 | .cache
50 | nosetests.xml
51 | coverage.xml
52 | *.cover
53 | *.py,cover
54 | .hypothesis/
55 | .pytest_cache/
56 | cover/
57 |
58 | # Translations
59 | *.mo
60 | *.pot
61 |
62 | # Django stuff:
63 | *.log
64 | local_settings.py
65 | db.sqlite3
66 | db.sqlite3-journal
67 |
68 | # Flask stuff:
69 | instance/
70 | .webassets-cache
71 |
72 | # Scrapy stuff:
73 | .scrapy
74 |
75 | # Sphinx documentation
76 | docs/_build/
77 |
78 | # PyBuilder
79 | .pybuilder/
80 | target/
81 |
82 | # Jupyter Notebook
83 | .ipynb_checkpoints
84 |
85 | # IPython
86 | profile_default/
87 | ipython_config.py
88 |
89 | # pyenv
90 | # For a library or package, you might want to ignore these files since the code is
91 | # intended to run in multiple environments; otherwise, check them in:
92 | # .python-version
93 |
94 | # pipenv
95 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
96 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
97 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
98 | # install all needed dependencies.
99 | #Pipfile.lock
100 |
101 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
102 | __pypackages__/
103 |
104 | # Celery stuff
105 | celerybeat-schedule
106 | celerybeat.pid
107 |
108 | # SageMath parsed files
109 | *.sage.py
110 |
111 | # Environments
112 | .env
113 | .venv
114 | env/
115 | venv/
116 | ENV/
117 | env.bak/
118 | venv.bak/
119 |
120 | # Spyder project settings
121 | .spyderproject
122 | .spyproject
123 |
124 | # Rope project settings
125 | .ropeproject
126 |
127 | # mkdocs documentation
128 | /site
129 |
130 | # mypy
131 | .mypy_cache/
132 | .dmypy.json
133 | dmypy.json
134 |
135 | # Pyre type checker
136 | .pyre/
137 |
138 | # pytype static type analyzer
139 | .pytype/
140 |
141 | # Cython debug symbols
142 | cython_debug/
143 |
144 |
145 | # Created by https://www.toptal.com/developers/gitignore/api/vscode,python
146 | # Edit at https://www.toptal.com/developers/gitignore?templates=vscode,python
147 |
148 | ### Python ###
149 | # Byte-compiled / optimized / DLL files
150 | __pycache__/
151 | *.py[cod]
152 | *$py.class
153 |
154 | # C extensions
155 | *.so
156 |
157 | # Distribution / packaging
158 | .Python
159 | build/
160 | develop-eggs/
161 | dist/
162 | downloads/
163 | eggs/
164 | .eggs/
165 | lib/
166 | lib64/
167 | parts/
168 | sdist/
169 | var/
170 | wheels/
171 | pip-wheel-metadata/
172 | share/python-wheels/
173 | *.egg-info/
174 | .installed.cfg
175 | *.egg
176 | MANIFEST
177 |
178 | # PyInstaller
179 | # Usually these files are written by a python script from a template
180 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
181 | *.manifest
182 | *.spec
183 |
184 | # Installer logs
185 | pip-log.txt
186 | pip-delete-this-directory.txt
187 |
188 | # Unit test / coverage reports
189 | htmlcov/
190 | .tox/
191 | .nox/
192 | .coverage
193 | .coverage.*
194 | .cache
195 | nosetests.xml
196 | coverage.xml
197 | *.cover
198 | *.py,cover
199 | .hypothesis/
200 | .pytest_cache/
201 | pytestdebug.log
202 |
203 | # Translations
204 | *.mo
205 | *.pot
206 |
207 | # Django stuff:
208 | *.log
209 | local_settings.py
210 | db.sqlite3
211 | db.sqlite3-journal
212 |
213 | # Flask stuff:
214 | instance/
215 | .webassets-cache
216 |
217 | # Scrapy stuff:
218 | .scrapy
219 |
220 | # Sphinx documentation
221 | docs/_build/
222 | doc/_build/
223 |
224 | # PyBuilder
225 | target/
226 |
227 | # Jupyter Notebook
228 | .ipynb_checkpoints
229 |
230 | # IPython
231 | profile_default/
232 | ipython_config.py
233 |
234 | # pyenv
235 | .python-version
236 |
237 | # pipenv
238 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
239 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
240 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
241 | # install all needed dependencies.
242 | #Pipfile.lock
243 |
244 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
245 | __pypackages__/
246 |
247 | # Celery stuff
248 | celerybeat-schedule
249 | celerybeat.pid
250 |
251 | # SageMath parsed files
252 | *.sage.py
253 |
254 | # Environments
255 | .env
256 | .venv
257 | env/
258 | venv/
259 | ENV/
260 | env.bak/
261 | venv.bak/
262 | pythonenv*
263 |
264 | # Spyder project settings
265 | .spyderproject
266 | .spyproject
267 |
268 | # Rope project settings
269 | .ropeproject
270 |
271 | # mkdocs documentation
272 | /site
273 |
274 | # mypy
275 | .mypy_cache/
276 | .dmypy.json
277 | dmypy.json
278 |
279 | # Pyre type checker
280 | .pyre/
281 |
282 | # pytype static type analyzer
283 | .pytype/
284 |
285 | # profiling data
286 | .prof
287 |
288 | ### vscode ###
289 | .vscode/*
290 | # !.vscode/settings.json
291 | !.vscode/tasks.json
292 | !.vscode/launch.json
293 | !.vscode/extensions.json
294 | *.code-workspace
295 |
296 | # vim
297 | .vimspector.json
298 |
299 | # End of https://www.toptal.com/developers/gitignore/api/vscode,python
300 |
301 | repos/*
302 | !repos/.gitkeep
303 |
--------------------------------------------------------------------------------
/joint_teapot/workers/git.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 | from time import sleep
4 | from typing import List, Optional
5 |
6 | from joint_teapot.utils.logger import logger
7 |
8 | current_path = sys.path[0]
9 | sys.path.remove(current_path)
10 | from git import Repo
11 | from git.exc import GitCommandError
12 | from git.remote import PushInfoList
13 |
14 | sys.path.insert(0, current_path)
15 |
16 | from joint_teapot.config import settings
17 |
18 |
19 | class Git:
20 | def __init__(
21 | self,
22 | git_host: str = "",
23 | org_name: str = "",
24 | repos_dir: str = "",
25 | ):
26 | git_host = git_host or settings.git_host
27 | org_name = org_name or settings.gitea_org_name
28 | repos_dir = repos_dir or settings.repos_dir
29 | self.git_host = git_host
30 | self.org_name = org_name
31 | self.repos_dir = repos_dir
32 | if not os.path.isdir(self.repos_dir):
33 | raise Exception(f"{self.repos_dir} does not exist! Create it first.")
34 | logger.debug("Git initialized")
35 | logger.info(f"repos dir: {self.repos_dir}")
36 |
37 | def clone_repo(
38 | self,
39 | repo_name: str,
40 | branch: str = settings.default_branch,
41 | auto_retry: bool = True,
42 | ) -> Optional[Repo]:
43 | repo = None
44 | repo_dir = os.path.join(self.repos_dir, repo_name)
45 | retry_interval = 2
46 | while retry_interval and auto_retry:
47 | try:
48 | repo = Repo.clone_from(
49 | f"{self.git_host}/{self.org_name}/{repo_name}.git",
50 | repo_dir,
51 | branch=branch,
52 | )
53 | retry_interval = 0
54 | except GitCommandError as e:
55 | if "Connection refused" in e.stderr or "Connection reset" in e.stderr:
56 | logger.warning(
57 | f"{repo_name} connection refused/reset in clone. "
58 | "Probably by JI firewall."
59 | )
60 | logger.info(f"wait for {retry_interval} seconds to retry...")
61 | sleep(retry_interval)
62 | if retry_interval < 64:
63 | retry_interval *= 2
64 | elif f"Remote branch {branch} not found in upstream origin" in e.stderr:
65 | retry_interval = 0
66 | logger.error(f"{repo_name} origin/{branch} not found")
67 | else:
68 | raise
69 | return repo
70 |
71 | def get_repo(self, repo_name: str) -> Optional[Repo]:
72 | repo_dir = os.path.join(self.repos_dir, repo_name)
73 | if os.path.exists(repo_dir):
74 | return Repo(repo_dir)
75 | return self.clone_repo(repo_name)
76 |
77 | def repo_clean_and_checkout(
78 | self,
79 | repo_name: str,
80 | checkout_dest: str,
81 | *,
82 | auto_retry: bool = True,
83 | clean_git_lock: bool = False,
84 | reset_target: str = f"origin/{settings.default_branch}",
85 | ) -> str:
86 | repo_dir = os.path.join(self.repos_dir, repo_name)
87 | repo = self.get_repo(repo_name)
88 | if not repo:
89 | return repo_dir
90 | retry_interval = 2
91 | while retry_interval and auto_retry:
92 | try:
93 | if clean_git_lock:
94 | locks_removed_count = 0
95 | for root, _, files in os.walk(os.path.join(repo_dir, ".git")):
96 | for filename in files:
97 | if filename.endswith(".lock"):
98 | lock_file_path = os.path.join(root, filename)
99 | if (
100 | os.path.relpath(lock_file_path, repo_dir)
101 | == settings.joj3_lock_file_path
102 | ):
103 | continue
104 | try:
105 | os.remove(lock_file_path)
106 | locks_removed_count += 1
107 | except OSError as e:
108 | logger.warning(f"error removing lock file: {e}")
109 | logger.info(f"removed {locks_removed_count} lock files")
110 | repo.git.fetch("--tags", "--all", "-f")
111 | repo.git.reset("--hard", reset_target)
112 | repo.git.clean("-d", "-f", "-x")
113 | repo.git.checkout(checkout_dest)
114 | retry_interval = 0
115 | except GitCommandError as e:
116 | if "Connection refused" in e.stderr or "Connection reset" in e.stderr:
117 | logger.warning(
118 | f"{repo_name} connection refused/reset in fetch. "
119 | "Probably by JI firewall."
120 | )
121 | logger.info(f"wait for {retry_interval} seconds to retry...")
122 | sleep(retry_interval)
123 | if retry_interval < 64:
124 | retry_interval *= 2
125 | elif (
126 | f"Remote branch {settings.default_branch} not found in upstream origin"
127 | in e.stderr
128 | ):
129 | retry_interval = 0
130 | logger.error(
131 | f"{repo_name} origin/{settings.default_branch} not found"
132 | )
133 | else:
134 | retry_interval = 0
135 | logger.exception(e)
136 | return repo_dir
137 |
138 | def add_commit(
139 | self, repo_name: str, files_to_add: List[str], commit_message: str
140 | ) -> None:
141 | repo: Repo = self.get_repo(repo_name)
142 | for file in files_to_add:
143 | try:
144 | repo.index.add(file)
145 | except OSError:
146 | logger.warning(f'file path "{file}" does not exist, skipped')
147 | continue
148 | if repo.is_dirty(untracked_files=True) or repo.index.diff(None):
149 | repo.index.commit(commit_message)
150 |
151 | def push(self, repo_name: str) -> PushInfoList:
152 | repo: Repo = self.get_repo(repo_name)
153 | return repo.remote(name="origin").push()
154 |
--------------------------------------------------------------------------------
/joint_teapot/workers/canvas.py:
--------------------------------------------------------------------------------
1 | import csv
2 | import os
3 | import re
4 | from glob import glob
5 | from pathlib import Path
6 | from typing import cast
7 |
8 | from canvasapi import Canvas as PyCanvas
9 | from canvasapi.assignment import Assignment
10 | from canvasapi.user import User
11 | from patoolib import extract_archive
12 | from patoolib.util import PatoolError
13 |
14 | from joint_teapot.config import settings
15 | from joint_teapot.utils.logger import logger
16 | from joint_teapot.utils.main import first, percentile
17 |
18 |
19 | class Canvas:
20 | def __init__(
21 | self,
22 | domain_name: str = "",
23 | suffix: str = "",
24 | access_token: str = "", # nosec
25 | course_id: int = 0,
26 | grade_filename: str = "GRADE.txt",
27 | ):
28 | domain_name = domain_name or settings.canvas_domain_name
29 | suffix = suffix or settings.canvas_suffix
30 | access_token = access_token or settings.canvas_access_token
31 | course_id = course_id or settings.canvas_course_id
32 | self.canvas = PyCanvas(f"https://{domain_name}{suffix}", access_token)
33 | self.course = self.canvas.get_course(course_id)
34 | logger.info(f"Canvas course loaded. {self.course}")
35 | # types = ["student", "observer"]
36 | types = ["student"]
37 |
38 | def patch_user(student: User) -> User:
39 | student.name = (
40 | re.sub(r"[^\x00-\x7F]+", "", student.name).strip().title()
41 | ) # We only care english name
42 | student.sis_id = student.login_id
43 | student.login_id = student.email.split("@")[0]
44 | return student
45 |
46 | self.students = [
47 | patch_user(student)
48 | for student in self.course.get_users(enrollment_type=types)
49 | ]
50 | for attr in ["login_id", "name"]:
51 | if not hasattr(self.students[0], attr):
52 | raise Exception(
53 | f"Unable to gather students' {attr}, please contact the Canvas site admin"
54 | )
55 | self.users = [patch_user(student) for student in self.course.get_users()]
56 | logger.debug("Canvas students loaded")
57 | self.assignments = self.course.get_assignments()
58 | logger.debug("Canvas assignments loaded")
59 | self.groups = self.course.get_groups()
60 | logger.debug("Canvas groups loaded")
61 | self.grade_filename = grade_filename
62 | logger.debug("Canvas initialized")
63 |
64 | def export_wrong_email_users(self) -> None:
65 | SAMPLE_EMAIL_BODY = """Dear Student,
66 |
67 | We have noticed that you have changed your email address on Canvas. While this can clearly cause privacy issues, this also prevents you from joining Gitea which will be intensively used in this course. Please revert back to your SJTU email address (`jaccount@sjtu.edu.cn`) as soon as possible. Note that if your email address is still incorrect in 24 hours, we will have to apply penalties as this is slowing down the whole course progress.
68 |
69 | Best regards,
70 | Teaching Team"""
71 | emails = [
72 | user.email for user in self.users if not user.email.endswith("@sjtu.edu.cn")
73 | ]
74 | print(f"To: {','.join(emails)}")
75 | print(f"Subject: [{settings.gitea_org_name}] Important: wrong Canvas email")
76 | print(f"Body:\n{SAMPLE_EMAIL_BODY}")
77 |
78 | def export_users_to_csv(self, filename: Path) -> None:
79 | with open(filename, mode="w", newline="") as file:
80 | writer = csv.writer(file)
81 | for user in self.users:
82 | writer.writerow([user.name, user.sis_id, user.login_id])
83 | logger.info(f"Users exported to {filename}")
84 |
85 | def prepare_assignment_dir(
86 | self, dir_or_zip_file: str, create_grade_file: bool = True
87 | ) -> None:
88 | if os.path.isdir(dir_or_zip_file):
89 | assignments_dir = dir_or_zip_file
90 | else:
91 | assignments_dir = os.path.splitext(dir_or_zip_file)[0]
92 | if os.path.exists(assignments_dir):
93 | logger.error(
94 | f"{assignments_dir} exists, can not unzip submissions file"
95 | )
96 | return
97 | extract_archive(dir_or_zip_file, outdir=assignments_dir, verbosity=-1)
98 | login_ids = {stu.id: stu.login_id for stu in self.students}
99 | for v in login_ids.values():
100 | new_path = os.path.join(assignments_dir, v)
101 | if not os.path.exists(new_path):
102 | os.mkdir(new_path)
103 | if create_grade_file:
104 | grade_file_path = os.path.join(new_path, self.grade_filename)
105 | if not os.path.exists(grade_file_path):
106 | open(grade_file_path, mode="w")
107 | late_students = set()
108 | error_students = set()
109 | submitted_ids = set()
110 | for path in glob(os.path.join(assignments_dir, "*")):
111 | try:
112 | filename = os.path.basename(path)
113 | if "_" not in filename:
114 | continue
115 | segments = filename.split("_")
116 | if segments[1] == "late":
117 | file_id = int(segments[2])
118 | else:
119 | file_id = int(segments[1])
120 | login_id = login_ids[file_id]
121 | except Exception:
122 | logger.error(f"Error on parsing path: {path}")
123 | continue
124 | student = first(self.students, lambda x: x.login_id == login_id)
125 | target_dir = os.path.join(assignments_dir, login_id)
126 | if segments[1] == "late":
127 | # TODO: check the delay time of late submission
128 | if create_grade_file:
129 | grade_file_path = os.path.join(target_dir, self.grade_filename)
130 | if os.path.exists(grade_file_path):
131 | open(grade_file_path, mode="a").write("LATE SUBMISSION\n")
132 | late_students.add(student)
133 | try:
134 | extract_archive(path, outdir=target_dir, verbosity=-1)
135 | logger.info(f"Extract succeed: {student}")
136 | os.remove(path)
137 | except PatoolError as e:
138 | if not str(e).startswith("unknown archive format"):
139 | logger.exception(f"Extract failed: {student}")
140 | error_students.add(student)
141 | os.rename(path, os.path.join(target_dir, filename))
142 | submitted_ids.add(login_id)
143 | if login_ids:
144 | no_submission_students = [
145 | first(self.students, lambda x: x.login_id == login_id)
146 | for login_id in set(login_ids.values()) - submitted_ids
147 | ]
148 | if no_submission_students:
149 | tmp = ", ".join([str(student) for student in no_submission_students])
150 | logger.info(f"No submission student(s): {tmp}")
151 | if late_students:
152 | tmp = ", ".join([str(student) for student in late_students])
153 | logger.info(f"Late student(s): {tmp}")
154 | if error_students:
155 | tmp = ", ".join([str(student) for student in error_students])
156 | logger.info(f"Extract error student(s): {tmp}")
157 |
158 | def upload_assignment_grades(
159 | self, assignments_dir: str, assignment_name: str
160 | ) -> None:
161 | assignment = first(self.assignments, lambda x: x.name == assignment_name)
162 | if assignment is None:
163 | logger.info(f"Canvas assignment {assignment_name} not found")
164 | return
165 | assignment = cast(Assignment, assignment)
166 | submission_dict = {}
167 | float_grades = []
168 | is_float_grades = True
169 | for submission in assignment.get_submissions():
170 | student = first(self.students, lambda x: x.id == submission.user_id)
171 | if student is None:
172 | continue
173 | grade_file_path = os.path.join(
174 | assignments_dir, student.login_id, self.grade_filename
175 | )
176 | try:
177 | grade, *comments = list(open(grade_file_path))
178 | grade = grade.strip()
179 | try:
180 | float_grades.append(float(grade))
181 | except ValueError:
182 | is_float_grades = False
183 | data = {
184 | "submission": {"posted_grade": grade},
185 | "comment": {"text_comment": "".join(comments)},
186 | }
187 | submission_dict[(student, submission)] = data
188 | comment_no_newline = (
189 | data["comment"]["text_comment"].strip().replace("\n", " ")
190 | )
191 | logger.info(
192 | f"Grade file parsed for {assignment} {student}: "
193 | f"grade: {data['submission']['posted_grade']}, "
194 | f'comment: "{comment_no_newline}"'
195 | )
196 | except Exception:
197 | logger.error(f"Can not parse grade file {grade_file_path}")
198 | return
199 | for (student, submission), data in submission_dict.items():
200 | logger.info(
201 | f"Uploading grade for {assignment} {student}: {data.__repr__()}"
202 | )
203 | submission.edit(**data)
204 | if is_float_grades and float_grades:
205 | summary = [
206 | min(float_grades),
207 | percentile(float_grades, 0.25),
208 | percentile(float_grades, 0.5),
209 | percentile(float_grades, 0.75),
210 | max(float_grades),
211 | ]
212 | average_grade = sum(float_grades) / len(float_grades)
213 | logger.info(
214 | f"Grades summary: "
215 | f"Min: {summary[0]:.2f}, "
216 | f"Q1: {summary[1]:.2f}, "
217 | f"Q2: {summary[2]:.2f}, "
218 | f"Q3: {summary[3]:.2f}, "
219 | f"Max: {summary[4]:.2f}, "
220 | f"Average: {average_grade:.2f}"
221 | )
222 | logger.info(f"Canvas assginemnt {assignment} grades upload succeed")
223 |
224 |
225 | if __name__ == "__main__":
226 | canvas = Canvas()
227 |
--------------------------------------------------------------------------------
/joint_teapot/workers/mattermost.py:
--------------------------------------------------------------------------------
1 | from typing import Dict, List
2 |
3 | import focs_gitea
4 | from canvasapi.paginated_list import PaginatedList
5 | from mattermostdriver import Driver
6 |
7 | from joint_teapot.config import settings
8 | from joint_teapot.utils.logger import logger
9 | from joint_teapot.workers.gitea import Gitea
10 |
11 |
12 | class Mattermost:
13 | def __init__(
14 | self,
15 | access_token: str = "", # nosec
16 | team_name: str = "",
17 | domain_name: str = "",
18 | suffix: str = "",
19 | ):
20 | access_token = access_token or settings.mattermost_access_token
21 | team_name = team_name or settings.mattermost_team
22 | domain_name = domain_name or settings.mattermost_domain_name
23 | suffix = suffix or settings.mattermost_suffix
24 | self.url = domain_name
25 | self.url_suffix = suffix
26 | self.endpoint = Driver(
27 | {
28 | "url": domain_name,
29 | "port": 443,
30 | "basepath": suffix + "/api/v4",
31 | "token": access_token,
32 | }
33 | )
34 | try:
35 | operator = self.endpoint.login()
36 | except Exception:
37 | logger.error("Cannot login to Mattermost")
38 | return
39 | if "admin" not in operator["roles"] and "system_user" not in operator["roles"]:
40 | logger.error("Please make sure you have enough permission")
41 | try:
42 | self.team = self.endpoint.teams.get_team_by_name(team_name)
43 | except Exception as e:
44 | logger.error(f"Cannot get team {team_name}: {e}")
45 | return
46 |
47 | def create_channels_for_groups(
48 | self,
49 | groups: Dict[str, List[str]],
50 | suffix: str = "",
51 | invite_teaching_team: bool = True,
52 | ) -> None:
53 | for group_name, members in groups.items():
54 | channel_name = group_name + suffix
55 | try:
56 | channel = self.endpoint.channels.create_channel(
57 | {
58 | "team_id": self.team["id"],
59 | "name": channel_name,
60 | "display_name": channel_name,
61 | "type": "P", # create private channels
62 | }
63 | )
64 | logger.info(f"Added group {channel_name} to Mattermost")
65 | except Exception as e:
66 | logger.warning(
67 | f"Error when creating channel {channel_name}: {e} Perhaps channel already exists?"
68 | )
69 | continue
70 | if invite_teaching_team:
71 | members.extend(settings.mattermost_teaching_team)
72 | for member in members:
73 | try:
74 | mmuser = self.endpoint.users.get_user_by_username(member)
75 | except Exception:
76 | logger.warning(
77 | f"User {member} is not found on the Mattermost server"
78 | )
79 | self.endpoint.posts.create_post(
80 | {
81 | "channel_id": channel["id"],
82 | "message": f"User {member} is not found on the Mattermost server",
83 | }
84 | )
85 | continue
86 | # code for adding student to mm, disabled since there is no need to do that
87 | # try:
88 | # mmuser = self.endpoint.users.create_user({'email':f"{member}@sjtu.edu.cn", 'username':member, auth_service:"gitlab"})
89 | # except e:
90 | # logger.error(f"Error creating user {member}")
91 | # continue
92 | try:
93 | self.endpoint.channels.add_user(
94 | channel["id"], {"user_id": mmuser["id"]}
95 | )
96 | except Exception:
97 | logger.warning(f"User {member} is not in the team")
98 | self.endpoint.posts.create_post(
99 | {
100 | "channel_id": channel["id"],
101 | "message": f"User {member} is not in the team",
102 | }
103 | )
104 | logger.info(f"Added member {member} to channel {channel_name}")
105 |
106 | def create_channels_for_individuals(
107 | self,
108 | students: PaginatedList,
109 | invite_teaching_team: bool = True,
110 | ) -> None:
111 | for student in students:
112 | display_name = student.name
113 | channel_name = student.sis_id
114 | try:
115 | channel = self.endpoint.channels.create_channel(
116 | {
117 | "team_id": self.team["id"],
118 | "name": channel_name,
119 | "display_name": display_name,
120 | "type": "P", # create private channels
121 | }
122 | )
123 | logger.info(
124 | f"Added channel {display_name} ({channel_name}) to Mattermost"
125 | )
126 | except Exception as e:
127 | logger.warning(
128 | f"Error when creating channel {channel_name}: {e} Perhaps channel already exists?"
129 | )
130 | continue
131 | members = [student.login_id]
132 | if invite_teaching_team:
133 | members.extend(settings.mattermost_teaching_team)
134 | for member in members:
135 | try:
136 | mmuser = self.endpoint.users.get_user_by_username(member)
137 | except Exception:
138 | logger.warning(
139 | f"User {member} is not found on the Mattermost server"
140 | )
141 | self.endpoint.posts.create_post(
142 | {
143 | "channel_id": channel["id"],
144 | "message": f"User {member} is not found on the Mattermost server",
145 | }
146 | )
147 | continue
148 | # code for adding student to mm, disabled since there is no need to do that
149 | # try:
150 | # mmuser = self.endpoint.users.create_user({'email':f"{member}@sjtu.edu.cn", 'username':member, auth_service:"gitlab"})
151 | # except e:
152 | # logger.error(f"Error creating user {member}")
153 | # continue
154 | try:
155 | self.endpoint.channels.add_user(
156 | channel["id"], {"user_id": mmuser["id"]}
157 | )
158 | except Exception:
159 | logger.warning(f"User {member} is not in the team")
160 | self.endpoint.posts.create_post(
161 | {
162 | "channel_id": channel["id"],
163 | "message": f"User {member} is not in the team",
164 | }
165 | )
166 |
167 | logger.info(f"Added member {member} to channel {channel_name}")
168 |
169 | def create_webhooks_for_repos(
170 | self, repos: List[str], gitea: Gitea, gitea_suffix: bool
171 | ) -> None:
172 | # one group corresponds to one repo so these concepts can be used interchangeably
173 | for repo in repos:
174 | channel_name = f"{repo}-gitea" if gitea_suffix else repo
175 | logger.info(
176 | f"Creating webhooks for repo {gitea.org_name}/{repo} and channel {channel_name}"
177 | )
178 | try:
179 | mm_channel = self.endpoint.channels.get_channel_by_name(
180 | self.team["id"], channel_name
181 | )
182 | except Exception as e:
183 | logger.warning(
184 | f"Error when getting channel {channel_name} from Mattermost team {self.team['name']}: {e}"
185 | )
186 | continue
187 | try:
188 | mm_webhook = self.endpoint.webhooks.create_incoming_hook(
189 | {
190 | "channel_id": mm_channel["id"],
191 | "display_name": f"Gitea integration for {self.team['name']}/{repo}",
192 | "channel_locked": True,
193 | }
194 | )
195 | except Exception as e:
196 | logger.error(f"Error when creating incoming webhook at Mattermost: {e}")
197 | continue
198 | try:
199 | gitea.repository_api.repo_create_hook(
200 | gitea.org_name,
201 | repo,
202 | body=focs_gitea.CreateHookOption(
203 | active=True,
204 | type="slack",
205 | events=[
206 | "issues_only",
207 | "issue_comment",
208 | "issue_assign",
209 | "pull_request_only",
210 | "pull_request_comment",
211 | "pull_request_review",
212 | "pull_request_review_request",
213 | "push",
214 | "create",
215 | "delete",
216 | "release",
217 | "wiki",
218 | ],
219 | config={
220 | "url": f"https://{self.url}{self.url_suffix}/hooks/{mm_webhook['id']}",
221 | "username": "FOCS Gitea",
222 | "icon_url": f"https://{self.url}{self.url_suffix}/api/v4/brand/image",
223 | "content_type": "json",
224 | "channel": channel_name,
225 | },
226 | ),
227 | )
228 | except Exception as e:
229 | logger.warning(f"Error when creating outgoing webhook at Gitea: {e}")
230 |
231 | # unused since we can give students invitation links instead
232 | def invite_students_to_team(self, students: List[str]) -> None:
233 | for student in students:
234 | try:
235 | mmuser = self.endpoint.users.get_user_by_username(student)
236 | except Exception:
237 | logger.warning(f"User {student} is not found on the Mattermost server")
238 | continue
239 | self.endpoint.teams.add_user_to_team(
240 | self.team["id"], {"user_id": mmuser["id"], "team_id": self.team["id"]}
241 | )
242 | logger.info(f"Added user {student} to team {self.team['name']}")
243 |
--------------------------------------------------------------------------------
/joint_teapot/utils/joj3.py:
--------------------------------------------------------------------------------
1 | import bisect
2 | import csv
3 | import json
4 | import os
5 | from datetime import datetime, timedelta
6 | from typing import Any, Dict, List, Optional, Tuple
7 |
8 | from pydantic_settings import BaseSettings
9 |
10 | from joint_teapot.config import settings
11 | from joint_teapot.utils.logger import logger
12 |
13 |
14 | class Env(BaseSettings):
15 | github_actor: str = ""
16 | github_repository: str = ""
17 | github_sha: str = ""
18 | github_ref: str = ""
19 | github_workflow: str = ""
20 | github_run_number: str = "0"
21 | joj3_conf_name: str = ""
22 | joj3_groups: str = ""
23 | joj3_run_id: str = ""
24 | joj3_commit_msg: str = ""
25 | joj3_force_quit_stage_name: str = ""
26 | joj3_output_path: str = ""
27 |
28 |
29 | def get_total_score(score_file_path: str) -> int:
30 | with open(score_file_path) as json_file:
31 | stages: List[Dict[str, Any]] = json.load(json_file)
32 | total_score = 0
33 | for stage in stages:
34 | for result in stage["results"]:
35 | total_score += result["score"]
36 | return total_score
37 |
38 |
39 | def generate_scoreboard(
40 | score_file_path: str,
41 | submitter: str,
42 | scoreboard_file_path: str,
43 | exercise_name: str,
44 | submitter_repo_name: str,
45 | exercise_total_score: int,
46 | ) -> None:
47 | if not scoreboard_file_path.endswith(".csv"):
48 | logger.error(
49 | f"Scoreboard file should be a .csv file, but now it is {scoreboard_file_path}"
50 | )
51 | return
52 | os.makedirs(os.path.dirname(scoreboard_file_path), exist_ok=True)
53 | # Load the csv file if it already exists
54 | fixed_headers = ["", "repo_name", "last_edit", "total"]
55 | fixed_defaults = [submitter, submitter_repo_name, "", "0"]
56 | if os.path.exists(scoreboard_file_path):
57 | with open(scoreboard_file_path, newline="") as file:
58 | reader = csv.reader(file)
59 | rows = list(reader)
60 | columns = rows[0]
61 | data = rows[1:]
62 |
63 | def migrate_scoreboard() -> None:
64 | if "repo_name" in columns:
65 | return
66 | columns.insert(1, "repo_name")
67 | for row in data:
68 | row.insert(1, "")
69 |
70 | migrate_scoreboard()
71 | else:
72 | columns = fixed_headers
73 | data = []
74 |
75 | submitter_found = False
76 | for row in data:
77 | if row[0] == submitter:
78 | submitter_row = row # This is a reference of the original data
79 | submitter_found = True
80 | break
81 | if not submitter_found:
82 | submitter_row = fixed_defaults + [""] * (len(columns) - len(fixed_headers))
83 | data.append(submitter_row)
84 |
85 | # Update data
86 | with open(score_file_path) as json_file:
87 | stages: List[Dict[str, Any]] = json.load(json_file)
88 |
89 | if exercise_name == "unknown":
90 | for stage in stages:
91 | if stage["name"] != "metadata":
92 | continue
93 | comment = stage["results"][0]["comment"]
94 | exercise_name = comment.split("-")[0]
95 | # Find if exercise in table:
96 | if exercise_name not in columns:
97 | column_tail = columns[len(fixed_headers) :]
98 | bisect.insort(column_tail, exercise_name)
99 | columns[len(fixed_headers) :] = column_tail
100 | index = columns.index(exercise_name)
101 | for row in data:
102 | row.insert(index, "")
103 |
104 | submitter_row[columns.index(exercise_name)] = str(exercise_total_score)
105 |
106 | total = 0
107 | for col in columns:
108 | if col in fixed_headers:
109 | continue
110 | idx = columns.index(col)
111 | if (submitter_row[idx] is not None) and (submitter_row[idx] != ""):
112 | try:
113 | total += int(submitter_row[idx])
114 | except ValueError:
115 | pass
116 |
117 | submitter_row[columns.index("total")] = str(total)
118 |
119 | now = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
120 | submitter_row[columns.index("last_edit")] = now
121 |
122 | submitter_row[columns.index("repo_name")] = submitter_repo_name
123 |
124 | # Sort data by total, from low to high
125 | data.sort(key=lambda x: int(x[columns.index("total")]))
126 |
127 | # Write back to the csv file:
128 | with open(scoreboard_file_path, mode="w", newline="") as file:
129 | writer = csv.writer(file)
130 | writer.writerow(columns)
131 | writer.writerows(data)
132 |
133 |
134 | def get_failed_table_from_file(table_file_path: str) -> List[List[str]]:
135 | data: List[List[str]] = []
136 | if os.path.exists(table_file_path):
137 | with open(table_file_path) as table_file:
138 | for i, line in enumerate(table_file):
139 | if i < 2:
140 | continue
141 | stripped_line = line.strip().strip("|").split("|")
142 | data.append(stripped_line)
143 | return data
144 |
145 |
146 | def get_failed_stage_from_file(score_file_path: str) -> str:
147 | with open(score_file_path) as json_file:
148 | stages: List[Dict[str, Any]] = json.load(json_file)
149 |
150 | failed_stage = ""
151 | for stage in stages:
152 | if stage["force_quit"] == True:
153 | failed_stage = stage["name"]
154 | break
155 | return failed_stage
156 |
157 |
158 | def update_failed_table_from_score_file(
159 | data: List[List[str]],
160 | score_file_path: str,
161 | repo_name: str,
162 | repo_link: str,
163 | action_link: str,
164 | ) -> None:
165 | failed_stage = get_failed_stage_from_file(score_file_path)
166 |
167 | # append to failed table
168 | now = datetime.now().strftime("%Y-%m-%d %H:%M")
169 | repo = f"[{repo_name}]({repo_link})"
170 | failure = f"[{failed_stage}]({action_link})"
171 | row_found = False
172 | for i, row in enumerate(data[:]):
173 | if row[1] == repo:
174 | row_found = True
175 | if failed_stage == "":
176 | data.remove(row)
177 | else:
178 | data[i][0] = now
179 | data[i][2] = failure
180 | break
181 | if not row_found and failed_stage != "":
182 | data.append([now, repo, failure])
183 |
184 |
185 | def write_failed_table_into_file(data: List[List[str]], table_file_path: str) -> None:
186 | data.sort(key=lambda x: datetime.strptime(x[0], "%Y-%m-%d %H:%M"), reverse=True)
187 | text = "|date|repository|failure|\n"
188 | text += "|----|----|----|\n"
189 | for row in data:
190 | text += f"|{row[0]}|{row[1]}|{row[2]}|\n"
191 |
192 | os.makedirs(os.path.dirname(table_file_path), exist_ok=True)
193 | with open(table_file_path, "w") as table_file:
194 | table_file.write(text)
195 |
196 |
197 | def generate_failed_table(
198 | score_file_path: str,
199 | repo_name: str,
200 | repo_link: str,
201 | table_file_path: str,
202 | action_link: str,
203 | ) -> None:
204 | if not table_file_path.endswith(".md"):
205 | logger.error(
206 | f"Failed table file should be a .md file, but now it is {table_file_path}"
207 | )
208 | return
209 |
210 | data = get_failed_table_from_file(table_file_path)
211 | update_failed_table_from_score_file(
212 | data,
213 | score_file_path,
214 | repo_name,
215 | repo_link,
216 | action_link,
217 | )
218 | write_failed_table_into_file(data, table_file_path)
219 |
220 |
221 | def generate_title_and_comment(
222 | score_file_path: str,
223 | action_link: str,
224 | run_number: str,
225 | exercise_name: str,
226 | submitter: str,
227 | commit_hash: str,
228 | submitter_in_title: bool = True,
229 | run_id: str = "unknown",
230 | max_total_score: int = -1,
231 | penalty_factor: float = 1.0,
232 | ) -> Tuple[str, str]:
233 | with open(score_file_path) as json_file:
234 | stages: List[Dict[str, Any]] = json.load(json_file)
235 | if exercise_name == "unknown":
236 | for stage in stages:
237 | if stage["name"] != "metadata":
238 | continue
239 | comment = stage["results"][0]["comment"]
240 | exercise_name = comment.split("-")[0]
241 | total_score = 0
242 | now = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
243 | comment = (
244 | f"Generated at {now} from [Gitea Actions #{run_number}]({action_link}), "
245 | f"commit {commit_hash}, "
246 | f"triggered by @{submitter}, "
247 | f"run ID [`{run_id}`](https://focs.ji.sjtu.edu.cn/joj-mon/d/{settings.gitea_org_name}?var-Filters=RunID%7C%3D%7C{run_id}).\n"
248 | "Powered by [JOJ3](https://github.com/joint-online-judge/JOJ3) and "
249 | "[Joint-Teapot](https://github.com/BoYanZh/Joint-Teapot) with ❤️.\n"
250 | )
251 | if penalty_factor != 1.0:
252 | comment += f"## ⚠️Total Score Penalty Warning⚠️\n**The total score is multiplied by {penalty_factor}.**\n"
253 | for stage in stages:
254 | if all(
255 | result["score"] == 0 and result["comment"].strip() == ""
256 | for result in stage["results"]
257 | ):
258 | continue
259 | stage_score = sum(result["score"] for result in stage["results"])
260 | comment += f"## {stage['name']} - Score: {stage_score}"
261 | force_quit = stage["force_quit"]
262 | if force_quit:
263 | comment += " - Fatal Error"
264 | comment += "\n"
265 | for i, result in enumerate(stage["results"]):
266 | comment += "\n"
267 | comment += f"Case {i} - Score: {result['score']}
\n"
268 | if result["comment"].strip() != "":
269 | comment += f"\n{result['comment']}\n\n"
270 | comment += " \n\n"
271 | total_score += result["score"]
272 | comment += "\n"
273 | if penalty_factor != 1.0:
274 | total_score = round(total_score - abs(total_score) * (1 - penalty_factor))
275 | title = get_title_prefix(exercise_name, submitter, submitter_in_title)
276 | if max_total_score >= 0:
277 | title += f"{total_score} / {max_total_score}"
278 | else:
279 | title += f"{total_score}"
280 | return title, comment
281 |
282 |
283 | def check_skipped(score_file_path: str, keyword: str) -> bool:
284 | with open(score_file_path) as json_file:
285 | stages: List[Dict[str, Any]] = json.load(json_file)
286 | for stage in stages:
287 | if stage["name"] != "metadata":
288 | continue
289 | comment = stage["results"][0]["comment"]
290 | if keyword in comment or "skip-teapot" in comment:
291 | return True
292 | return False
293 |
294 |
295 | def get_title_prefix(
296 | exercise_name: str, submitter: str, submitter_in_title: bool
297 | ) -> str:
298 | title = f"JOJ3 Result for {exercise_name} by @{submitter} - Score: "
299 | if not submitter_in_title:
300 | title = f"JOJ3 Result for {exercise_name} - Score: "
301 | return title
302 |
303 |
304 | def parse_penalty_config(penalty_config: str) -> List[Tuple[float, float]]:
305 | res = []
306 | for penalty in penalty_config.split(","):
307 | if "=" not in penalty:
308 | continue
309 | hour, factor = map(float, penalty.split("="))
310 | res.append((hour, factor))
311 | res.sort(key=lambda x: x[0])
312 | return res
313 |
314 |
315 | def get_penalty_factor(
316 | end_time: Optional[datetime],
317 | penalty_config: str,
318 | ) -> float:
319 | if not end_time or not penalty_config:
320 | return 1.0
321 | now = datetime.now()
322 | if now < end_time:
323 | return 1.0
324 | penalties = parse_penalty_config(penalty_config)
325 | res = 0.0
326 | for hour, factor in penalties[::-1]:
327 | if now < end_time + timedelta(hours=hour):
328 | res = factor
329 | else:
330 | break
331 | return res
332 |
--------------------------------------------------------------------------------
/joint_teapot/teapot.py:
--------------------------------------------------------------------------------
1 | import functools
2 | import glob
3 | import os
4 | import re
5 | from datetime import datetime, timedelta, timezone
6 | from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple, TypeVar
7 |
8 | import mosspy
9 | from git import Repo
10 |
11 | from joint_teapot.config import settings
12 | from joint_teapot.utils import joj3
13 | from joint_teapot.utils.logger import logger
14 | from joint_teapot.utils.main import default_repo_name_convertor, first
15 | from joint_teapot.workers import Canvas, Git, Gitea, Mattermost
16 | from joint_teapot.workers.joj import JOJ
17 |
18 | if TYPE_CHECKING:
19 | import focs_gitea
20 |
21 | _T = TypeVar("_T")
22 |
23 |
24 | def for_all_methods(
25 | decorator: Callable[[Callable[[_T], _T]], Any],
26 | ) -> Callable[[_T], _T]:
27 | @functools.wraps(decorator)
28 | def decorate(cls: Any) -> Any:
29 | for attr in cls.__dict__: # there's probably a better way to do this
30 | if callable(getattr(cls, attr)):
31 | setattr(cls, attr, decorator(getattr(cls, attr)))
32 | return cls
33 |
34 | return decorate
35 |
36 |
37 | def log_exception_in_loguru(func: Callable[..., Any]) -> Callable[..., Any]:
38 | @functools.wraps(func)
39 | def decorator(*args: Any, **kwargs: Any) -> Any:
40 | try:
41 | return func(*args, **kwargs)
42 | except Exception as e:
43 | logger.exception(e)
44 |
45 | return decorator
46 |
47 |
48 | @for_all_methods(log_exception_in_loguru)
49 | class Teapot:
50 | _canvas = None
51 | _gitea = None
52 | _git = None
53 | _joj = None
54 | _mattermost = None
55 |
56 | @property
57 | def canvas(self) -> Canvas:
58 | if not self._canvas:
59 | self._canvas = Canvas()
60 | return self._canvas
61 |
62 | @property
63 | def gitea(self) -> Gitea:
64 | if not self._gitea:
65 | self._gitea = Gitea()
66 | return self._gitea
67 |
68 | @property
69 | def git(self) -> Git:
70 | if not self._git:
71 | self._git = Git()
72 | return self._git
73 |
74 | @property
75 | def joj(self) -> JOJ:
76 | if not self._joj:
77 | self._joj = JOJ()
78 | return self._joj
79 |
80 | @property
81 | def mattermost(self) -> Mattermost:
82 | if not self._mattermost:
83 | self._mattermost = Mattermost()
84 | return self._mattermost
85 |
86 | def __init__(self) -> None:
87 | logger.info(
88 | "Settings loaded. "
89 | f"Canvas Course ID: {settings.canvas_course_id}, "
90 | f"Gitea Organization name: {settings.gitea_org_name}, "
91 | f"Mattermost Team name: {settings.mattermost_team}@{settings.mattermost_domain_name}{settings.mattermost_suffix}"
92 | )
93 | logger.debug("Teapot initialized.")
94 |
95 | def add_all_canvas_students_to_teams(self, team_names: List[str]) -> None:
96 | return self.gitea.add_canvas_students_to_teams(self.canvas.students, team_names)
97 |
98 | def create_personal_repos_for_all_canvas_students(
99 | self, suffix: str = "", template: str = ""
100 | ) -> List[str]:
101 | return self.gitea.create_personal_repos_for_canvas_students(
102 | self.canvas.students,
103 | lambda user: default_repo_name_convertor(user) + suffix,
104 | template,
105 | )
106 |
107 | def create_teams_and_repos_by_canvas_groups(
108 | self, group_prefix: str = "", template: str = ""
109 | ) -> List[str]:
110 | def convertor(name: str) -> Optional[str]:
111 | if group_prefix and not name.startswith(group_prefix):
112 | return None
113 | team_name, number_str = name.split(" ")
114 | number = int(number_str)
115 | return f"{team_name}{number:02}"
116 |
117 | return self.gitea.create_teams_and_repos_by_canvas_groups(
118 | self.canvas.students, self.canvas.groups, convertor, convertor, template
119 | )
120 |
121 | def get_public_key_of_all_canvas_students(self) -> Dict[str, List[str]]:
122 | return self.gitea.get_public_key_of_canvas_students(self.canvas.students)
123 |
124 | def clone_all_repos(self) -> None:
125 | for i, repo_name in enumerate(self.gitea.get_all_repo_names()):
126 | logger.info(f"{i}, {self.gitea.org_name}/{repo_name} cloning...")
127 | self.git.repo_clean_and_checkout(repo_name, settings.default_branch)
128 |
129 | def moss_all_repos(self, language: str, wildcards: List[str]) -> str:
130 | m = mosspy.Moss(settings.moss_user_id, language)
131 | for repo_name in self.gitea.get_all_repo_names():
132 | base_dir = os.path.join(settings.repos_dir, repo_name)
133 | for wildcard in wildcards:
134 | full_wildcard = os.path.join(base_dir, wildcard)
135 | for file in glob.glob(full_wildcard, recursive=True):
136 | if not os.path.isfile(file):
137 | continue
138 | logger.info(f"Adding file {file}")
139 | m.files.append((file, os.path.relpath(file, settings.repos_dir)))
140 | logger.info("Sending files")
141 | return m.send()
142 |
143 | def create_issue_for_repos(
144 | self,
145 | repo_names: List[str],
146 | title: str,
147 | body: str,
148 | from_file: bool = False,
149 | use_regex: bool = False,
150 | milestone: str = "",
151 | labels: List[str] = [],
152 | ) -> None:
153 | if from_file:
154 | try:
155 | f = open(body)
156 | content = f.read()
157 | f.close()
158 | except FileNotFoundError:
159 | logger.error(f"file {body} not found")
160 | return
161 | except Exception as e:
162 | logger.exception("Error occurred when opening file {body}:")
163 | logger.error(e)
164 | return
165 | else:
166 | content = body
167 |
168 | affected_repos = []
169 | if use_regex:
170 | all_repos = self.gitea.get_all_repo_names()
171 | for pattern in repo_names:
172 | affected_repos.extend(
173 | [repo for repo in all_repos if re.search(pattern, repo) is not None]
174 | )
175 | else:
176 | affected_repos = repo_names
177 |
178 | for repo_name in affected_repos:
179 | self.gitea.create_issue(repo_name, title, content, True, milestone, labels)
180 |
181 | def create_comment(
182 | self,
183 | repo_name: str,
184 | index: int,
185 | body: str,
186 | ) -> None:
187 | self.gitea.create_comment(repo_name, index, body)
188 |
189 | def check_exist_issue_by_title(
190 | self, repo_names: List[str], title: str
191 | ) -> List[str]:
192 | res = []
193 | for repo_name in repo_names:
194 | if not self.gitea.check_exist_issue_by_title(repo_name, title):
195 | res.append(repo_name)
196 | return res
197 |
198 | def checkout_to_repo_by_release_name(
199 | self, repo_name: str, release_name: str, due: datetime = datetime(3000, 1, 1)
200 | ) -> bool:
201 | repo_releases = self.gitea.get_repo_releases(repo_name)
202 | release = first(repo_releases, lambda item: item.name == release_name)
203 | if release is None or release.created_at.replace(tzinfo=None) >= due:
204 | logger.warning(
205 | f"{self.gitea.org_name}/{repo_name} checkout to "
206 | f"release by name {release_name} fail"
207 | )
208 | return False
209 | self.git.repo_clean_and_checkout(repo_name, f"tags/{release.tag_name}")
210 | logger.info(
211 | f"{self.gitea.org_name}/{repo_name} checkout to "
212 | f"tags/{release.tag_name} succeed"
213 | )
214 | return True
215 |
216 | def get_repos_status(self, commit_lt: int, issue_lt: int) -> None:
217 | for repo_name, (
218 | commit_count,
219 | issue_count,
220 | ) in self.gitea.get_repos_status().items():
221 | if commit_count < commit_lt or issue_count < issue_lt:
222 | logger.info(
223 | f"{self.gitea.org_name}/{repo_name} has "
224 | f"{commit_count} commit(s), {issue_count} issue(s)"
225 | )
226 |
227 | def create_channels_for_individuals(
228 | self, invite_teaching_teams: bool = True
229 | ) -> None:
230 | return self.mattermost.create_channels_for_individuals(
231 | self.canvas.students, invite_teaching_teams
232 | )
233 |
234 | def joj3_post_issue(
235 | self,
236 | env: joj3.Env,
237 | max_total_score: int,
238 | gitea_actions_url: str,
239 | submitter_in_issue_title: bool,
240 | submitter_repo_name: str,
241 | issue_label_name: str,
242 | issue_label_color: str,
243 | issue_label_exclusive: bool,
244 | penalty_factor: float,
245 | ) -> int:
246 | title, comment = joj3.generate_title_and_comment(
247 | env.joj3_output_path,
248 | gitea_actions_url,
249 | env.github_run_number,
250 | env.joj3_conf_name,
251 | env.github_actor,
252 | env.github_sha,
253 | submitter_in_issue_title,
254 | env.joj3_run_id,
255 | max_total_score,
256 | penalty_factor,
257 | )
258 | title_prefix = joj3.get_title_prefix(
259 | env.joj3_conf_name, env.github_actor, submitter_in_issue_title
260 | )
261 | joj3_issue: focs_gitea.Issue
262 | issue: focs_gitea.Issue
263 | new_issue = False
264 | for issue in self.gitea.issue_api.issue_list_issues(
265 | self.gitea.org_name, submitter_repo_name, state="open"
266 | ):
267 | if issue.title.startswith(title_prefix):
268 | joj3_issue = issue
269 | break
270 | else:
271 | new_issue = True
272 | labels = self.gitea.issue_api.issue_list_labels(
273 | self.gitea.org_name, submitter_repo_name
274 | )
275 | label_id = 0
276 | label = first(labels, lambda label: label.name == issue_label_name)
277 | if label:
278 | label_id = label.id
279 | else:
280 | label = self.gitea.issue_api.issue_create_label(
281 | self.gitea.org_name,
282 | submitter_repo_name,
283 | body={
284 | "name": issue_label_name,
285 | "color": issue_label_color,
286 | "exclusive": issue_label_exclusive,
287 | },
288 | )
289 | label_id = label.id
290 | joj3_issue = self.gitea.issue_api.issue_create_issue(
291 | self.gitea.org_name,
292 | submitter_repo_name,
293 | body={"title": title, "body": comment, "labels": [label_id]},
294 | )
295 | gitea_issue_url = joj3_issue.html_url
296 | if not new_issue:
297 | self.gitea.issue_api.issue_edit_issue(
298 | self.gitea.org_name,
299 | submitter_repo_name,
300 | joj3_issue.number,
301 | body={"title": title, "body": comment},
302 | )
303 | return joj3_issue.number
304 |
305 | def joj3_check_submission_time(
306 | self,
307 | begin_time: Optional[datetime] = None,
308 | end_time: Optional[datetime] = None,
309 | penalty_config: str = "",
310 | ) -> Tuple[str, bool]:
311 | now = datetime.now()
312 | penalties = joj3.parse_penalty_config(penalty_config)
313 | if penalties and end_time:
314 | penalty_end_time = end_time + timedelta(hours=penalties[-1][0])
315 | if begin_time and now < begin_time:
316 | return (
317 | "### Submission Time Check Failed\n"
318 | f"Current time {now} is not in the valid range "
319 | f"[{begin_time}, {end_time}].\n",
320 | True,
321 | )
322 | elif now > penalty_end_time:
323 | return (
324 | "### Submission Time Check Failed\n"
325 | f"Current time {now} is not in the valid range "
326 | f"[{begin_time}, {end_time}], and the penalty range "
327 | f"[{end_time + timedelta(seconds=1)}, {penalty_end_time}].\n",
328 | True,
329 | )
330 | elif now > end_time:
331 | return (
332 | "### Submission Time Check Passed\n"
333 | f"Current time {now} is not in the valid range "
334 | f"[{begin_time}, {end_time}], but in the penalty range "
335 | f"[{end_time + timedelta(seconds=1)}, {penalty_end_time}].\n",
336 | False,
337 | )
338 | else:
339 | if (begin_time and now < begin_time) or (end_time and now > end_time):
340 | return (
341 | "### Submission Time Check Failed\n"
342 | f"Current time {now} is not in the valid range "
343 | f"[{begin_time}, {end_time}].\n",
344 | True,
345 | )
346 | return (
347 | "### Submission Time Check Passed\n"
348 | f"Current time {now} is in the valid range "
349 | f"[{begin_time}, {end_time}].\n",
350 | False,
351 | )
352 |
353 | def joj3_check_submission_count(
354 | self,
355 | env: joj3.Env,
356 | grading_repo_name: str,
357 | group_config: str,
358 | scoreboard_filename: str,
359 | ignore_submitter: bool,
360 | ) -> Tuple[str, bool]:
361 | submitter = env.github_actor
362 | submitter_repo_name = env.github_repository.split("/")[-1]
363 | repo: Repo = self.git.get_repo(grading_repo_name)
364 | now = datetime.now(timezone.utc)
365 | items = group_config.split(",")
366 | comment = ""
367 | failed = False
368 | pattern = re.compile(
369 | r"joj3: update scoreboard for (?P.+?) "
370 | r"by @(?P.+) in "
371 | r"(?P.+)/(?P.+)@(?P.+)"
372 | )
373 | time_windows = []
374 | valid_items = []
375 | for item in items:
376 | if "=" not in item:
377 | continue
378 | name, values = item.split("=")
379 | if ":" not in values:
380 | continue
381 | max_count, time_period = map(int, values.split(":"))
382 | if max_count < 0 or time_period < 0:
383 | continue
384 | since = now - timedelta(hours=time_period)
385 | time_windows.append(since)
386 | valid_items.append((name, max_count, time_period, since))
387 | logger.info(f"valid items: {valid_items}, time windows: {time_windows}")
388 | matched_commits = []
389 | all_commits_length = 0
390 | if time_windows:
391 | earliest_since = min(time_windows).strftime("%Y-%m-%dT%H:%M:%S")
392 | commits = repo.iter_commits(paths=scoreboard_filename, since=earliest_since)
393 | for commit in commits:
394 | all_commits_length += 1
395 | lines = commit.message.strip().splitlines()
396 | if not lines:
397 | continue
398 | match = pattern.match(lines[0])
399 | if not match:
400 | continue
401 | d = match.groupdict()
402 | if (
403 | env.joj3_conf_name != d["exercise_name"]
404 | or submitter_repo_name != d["submitter_repo_name"]
405 | ):
406 | continue
407 | if not ignore_submitter and submitter != d["submitter"]:
408 | continue
409 | groups_line = next((l for l in lines if l.startswith("groups: ")), None)
410 | commit_groups = (
411 | groups_line[len("groups: ") :].split(",") if groups_line else []
412 | )
413 | matched_commits.append(
414 | {
415 | "time": commit.committed_datetime,
416 | "groups": [g.strip() for g in commit_groups],
417 | }
418 | )
419 | logger.info(
420 | f"matched commits length: {len(matched_commits)}, all commits length: {all_commits_length}"
421 | )
422 | for name, max_count, time_period, since in valid_items:
423 | submit_count = 0
424 | time_limit = now - timedelta(hours=time_period)
425 | for commit in matched_commits:
426 | if commit["time"] < time_limit:
427 | continue
428 | if name:
429 | target_group = name.lower()
430 | commit_groups_lower = [g.lower() for g in commit["groups"]]
431 | if target_group not in commit_groups_lower:
432 | continue
433 | submit_count += 1
434 | logger.info(
435 | f"submitter {submitter} is submitting for the {submit_count + 1} time, "
436 | f"{min(0, max_count - submit_count - 1)} time(s) remaining, "
437 | f"group={name}, "
438 | f"time period={time_period} hour(s), "
439 | f"max count={max_count}, submit count={submit_count}"
440 | )
441 | use_group = True
442 | if name:
443 | comment += f"keyword `{name}` "
444 | use_group = name.lower() in env.joj3_groups.lower()
445 | comment += (
446 | f"In last {time_period} hour(s): "
447 | f"submit count {submit_count}, "
448 | f"max count {max_count}"
449 | )
450 | if use_group and submit_count + 1 > max_count:
451 | failed = True
452 | comment += ", exceeded."
453 | else:
454 | comment += "."
455 | comment += "\n"
456 | if failed:
457 | title = "### Submission Count Check Failed"
458 | else:
459 | title = "### Submission Count Check Passed"
460 | msg = f"{title}\n{comment}\n"
461 | return msg, failed
462 |
463 |
464 | if __name__ == "__main__":
465 | teapot = Teapot()
466 |
--------------------------------------------------------------------------------
/joint_teapot/app.py:
--------------------------------------------------------------------------------
1 | import json
2 | import os
3 | import re
4 | from datetime import datetime
5 | from pathlib import Path
6 | from time import sleep
7 | from typing import TYPE_CHECKING, List, Optional
8 |
9 | from filelock import FileLock
10 | from git import Repo
11 | from typer import Argument, Exit, Option, Typer, echo
12 |
13 | from joint_teapot.config import Settings, set_settings, settings
14 | from joint_teapot.teapot import Teapot
15 | from joint_teapot.utils import joj3
16 | from joint_teapot.utils.logger import logger, set_logger
17 |
18 | if TYPE_CHECKING:
19 | pass
20 |
21 | app = Typer(add_completion=False)
22 |
23 |
24 | class Tea:
25 | _teapot = None
26 |
27 | @property
28 | def pot(self) -> Teapot:
29 | if not self._teapot:
30 | self._teapot = Teapot()
31 | return self._teapot
32 |
33 |
34 | tea = Tea() # lazy loader
35 |
36 |
37 | @app.command("export-users", help="export users from canvas to csv file")
38 | def export_users_to_csv(output_file: Path = Argument("students.csv")) -> None:
39 | tea.pot.canvas.export_users_to_csv(output_file)
40 |
41 |
42 | @app.command(
43 | "export-wrong-email-users",
44 | help="export users with wrong email from canvas in stdout",
45 | )
46 | def export_wrong_email_users() -> None:
47 | tea.pot.canvas.export_wrong_email_users()
48 |
49 |
50 | @app.command(
51 | "invite-to-teams", help="invite all canvas students to gitea teams by team name"
52 | )
53 | def add_all_canvas_students_to_teams(team_names: List[str]) -> None:
54 | tea.pot.add_all_canvas_students_to_teams(team_names)
55 |
56 |
57 | @app.command(
58 | "create-personal-repos",
59 | help="create personal repos on gitea for all canvas students",
60 | )
61 | def create_personal_repos_for_all_canvas_students(
62 | suffix: str = Option(""), template: str = Option("", help="generate from template")
63 | ) -> None:
64 | tea.pot.create_personal_repos_for_all_canvas_students(suffix, template)
65 |
66 |
67 | @app.command("create-teams", help="create teams on gitea by canvas groups")
68 | def create_teams_and_repos_by_canvas_groups(
69 | group_prefix: str, template: str = Option("", help="generate from template")
70 | ) -> None:
71 | tea.pot.create_teams_and_repos_by_canvas_groups(group_prefix, template)
72 |
73 |
74 | @app.command("get-public-keys", help="list all public keys on gitea")
75 | def get_public_key_of_all_canvas_students() -> None:
76 | res = []
77 | for k, v in tea.pot.get_public_key_of_all_canvas_students().items():
78 | keys = "\\n".join(v)
79 | res.append(f"{k},{keys}")
80 | echo("\n".join(res))
81 |
82 |
83 | @app.command("clone-all-repos", help="clone all gitea repos to local")
84 | def clone_all_repos() -> None:
85 | tea.pot.clone_all_repos()
86 |
87 |
88 | @app.command("moss-all-repos", help="moss all gitea repos")
89 | def moss_all_repos(language: str = "cc", wildcards: List[str] = ["*.*"]) -> None:
90 | url = tea.pot.moss_all_repos(language, wildcards)
91 | echo("Report Url: " + url)
92 |
93 |
94 | @app.command("create-issues", help="create issues on gitea")
95 | def create_issue_for_repos(
96 | repo_names: List[str],
97 | title: str,
98 | body: str = Argument(
99 | ..., help="issue body, or, if --from-file is set, filepath to issue body"
100 | ),
101 | from_file: bool = Option(False, "--file/--body"),
102 | use_regex: bool = Option(
103 | False, "--regex", help="repo_names takes list of regexes if set"
104 | ),
105 | milesetone: str = Option("", "--milestone", help="milestone title"),
106 | labels: List[str] = Option(
107 | [],
108 | "--label",
109 | help="labels to add to the issue (use --label A --label B to add multiple)",
110 | ),
111 | ) -> None:
112 | tea.pot.create_issue_for_repos(
113 | repo_names, title, body, from_file, use_regex, milesetone, labels
114 | )
115 |
116 |
117 | @app.command("create-comment", help="create a comment for an issue on gitea")
118 | def create_comment(
119 | repo_name: str,
120 | index: int,
121 | body: str = Argument(..., help="comment body"),
122 | ) -> None:
123 | tea.pot.create_comment(repo_name, index, body)
124 |
125 |
126 | @app.command(
127 | "create-milestones",
128 | help="create milestones on gitea",
129 | )
130 | def create_milestones(
131 | title: str,
132 | regex: str = Argument(".+"),
133 | due_on: str = Argument("", help="milestone due-on date [%YYYY-%MM-%DD]"),
134 | description: str = Argument(""),
135 | ) -> None:
136 | tea.pot.gitea.create_milestones(title, regex, due_on, description)
137 |
138 |
139 | @app.command("check-issues", help="check the existence of issue by title on gitea")
140 | def check_exist_issue_by_title(repo_names: List[str], title: str) -> None:
141 | echo("\n".join(tea.pot.check_exist_issue_by_title(repo_names, title)))
142 |
143 |
144 | @app.command(
145 | "checkout-releases",
146 | help="checkout git repo to git tag fetched from gitea by release name, with due date",
147 | )
148 | def checkout_to_repos_by_release_name(
149 | repo_names: List[str], release_name: str, due: datetime = Argument("3000-01-01")
150 | ) -> None:
151 | failed_repos = []
152 | succeed_repos = []
153 | for repo_name in repo_names:
154 | succeed = tea.pot.checkout_to_repo_by_release_name(repo_name, release_name, due)
155 | if not succeed:
156 | failed_repos.append(repo_name)
157 | else:
158 | succeed_repos.append(repo_name)
159 | echo(f"succeed repos: {succeed_repos}")
160 | echo(f"failed repos: {failed_repos}")
161 |
162 |
163 | @app.command(
164 | "close-all-issues", help="close all issues and pull requests in gitea organization"
165 | )
166 | def close_all_issues() -> None:
167 | tea.pot.gitea.close_all_issues()
168 |
169 |
170 | @app.command(
171 | "archive-repos", help="archive repos in gitea organization according to regex"
172 | )
173 | def archive_repos(regex: str = Argument(".+"), dry_run: bool = Option(True)) -> None:
174 | tea.pot.gitea.archive_repos(regex, dry_run)
175 |
176 |
177 | @app.command("unwatch-all-repos", help="unwatch all repos in gitea organization")
178 | def unwatch_all_repos() -> None:
179 | tea.pot.gitea.unwatch_all_repos()
180 |
181 |
182 | @app.command("get-no-collaborator-repos", help="list all repos with no collaborators")
183 | def get_no_collaborator_repos() -> None:
184 | tea.pot.gitea.get_no_collaborator_repos()
185 |
186 |
187 | @app.command("get-repos-status", help="list status of all repos with conditions")
188 | def get_repos_status(
189 | commit_lt: int = Argument(100000, help="commit count less than"),
190 | issue_lt: int = Argument(100000, help="issue count less than"),
191 | ) -> None:
192 | tea.pot.get_repos_status(commit_lt, issue_lt)
193 |
194 |
195 | @app.command(
196 | "prepare-assignment-dir",
197 | help='prepare assignment dir from extracted canvas "Download Submissions" zip',
198 | )
199 | def prepare_assignment_dir(dir_or_zip_file: Path) -> None:
200 | tea.pot.canvas.prepare_assignment_dir(str(dir_or_zip_file))
201 |
202 |
203 | @app.command(
204 | "upload-assignment-grades",
205 | help="upload assignment grades to canvas from grade file (GRADE.txt by default), "
206 | + "read the first line as grade, the rest as comments",
207 | )
208 | def upload_assignment_grades(assignments_dir: Path, assignment_name: str) -> None:
209 | tea.pot.canvas.upload_assignment_grades(str(assignments_dir), assignment_name)
210 |
211 |
212 | @app.command(
213 | "create-group-channels-on-mm",
214 | help="create channels for student groups according to group information on"
215 | " gitea; to integrate with webhooks, it's recommended to set suffix to '-gitea'",
216 | )
217 | def create_group_channels_on_mm(
218 | prefix: str = Option(""),
219 | suffix: str = Option(""),
220 | invite_teaching_team: bool = Option(True),
221 | ) -> None:
222 | groups = {
223 | group_name: members
224 | for group_name, members in tea.pot.gitea.get_all_teams().items()
225 | if group_name.startswith(prefix)
226 | }
227 | logger.info(
228 | f"{len(groups)} channel(s) to be created"
229 | + (f" with suffix {suffix}" if suffix else "")
230 | + (", inviting teaching team" if invite_teaching_team else "")
231 | + f": {','.join(groups.keys())}"
232 | )
233 | tea.pot.mattermost.create_channels_for_groups(groups, suffix, invite_teaching_team)
234 |
235 |
236 | @app.command(
237 | "create-personal-channels-on-mm",
238 | help="create channels for every student",
239 | )
240 | def create_personal_channels_on_mm(
241 | invite_teaching_team: bool = Option(True),
242 | ) -> None:
243 | tea.pot.create_channels_for_individuals(invite_teaching_team)
244 |
245 |
246 | @app.command(
247 | "create-webhooks-for-mm",
248 | help="create a pair of webhooks on gitea and mm for all student groups on gitea, "
249 | "and configure them so that updates on gitea will be pushed to the mm channel",
250 | )
251 | def create_webhooks_for_mm(
252 | regex: str = Argument(""),
253 | gitea_suffix: bool = Option(True, help="append gitea suffix to mm channel names"),
254 | ) -> None:
255 | repo_names = [
256 | group_name
257 | for group_name in tea.pot.gitea.get_all_teams()
258 | if re.match(regex, group_name)
259 | ]
260 | logger.info(f"{len(repo_names)} pair(s) of webhooks to be created: {repo_names}")
261 | tea.pot.mattermost.create_webhooks_for_repos(
262 | repo_names, tea.pot.gitea, gitea_suffix
263 | )
264 |
265 |
266 | @app.command(
267 | "unsubscribe-from-repos",
268 | help="unsubscribe from all repos whose name match the given regex pattern",
269 | )
270 | def unsubscribe_from_repos(pattern: str = Argument("")) -> None:
271 | tea.pot.gitea.unsubscribe_from_repos(pattern)
272 |
273 |
274 | @app.command(
275 | "joj3-all-env",
276 | help="run all joj3 tasks from env var and cli args",
277 | )
278 | def joj3_all_env(
279 | env_path: str = Argument("", help="path to .env file"),
280 | grading_repo_name: str = Option(
281 | "",
282 | help="name of grading repo to push failed table file",
283 | ),
284 | scoreboard_filename: str = Option(
285 | "scoreboard.csv", help="name of scoreboard file in the gitea repo"
286 | ),
287 | failed_table_filename: str = Option(
288 | "failed-table.md", help="name of failed table file in the gitea repo"
289 | ),
290 | max_total_score: int = Option(
291 | -1,
292 | help="max total score",
293 | ),
294 | skip_result_issue: bool = Option(
295 | False,
296 | help="skip creating result issue on gitea",
297 | ),
298 | skip_scoreboard: bool = Option(
299 | False,
300 | help="skip creating scoreboard on gitea",
301 | ),
302 | skip_failed_table: bool = Option(
303 | False,
304 | help="skip creating failed table on gitea",
305 | ),
306 | scoreboard_column_by_ref: bool = Option(
307 | False,
308 | help="use git ref as scoreboard column name",
309 | ),
310 | submitter_in_issue_title: bool = Option(
311 | True,
312 | help="whether to include submitter in issue title",
313 | ),
314 | issue_label_name: str = Option(
315 | "Kind/Testing",
316 | help="label name for the issue created by this command",
317 | ),
318 | issue_label_color: str = Option(
319 | "#795548",
320 | help="label color for the issue created by this command",
321 | ),
322 | issue_label_exclusive: bool = Option(
323 | False,
324 | help="label set as exclusive for the issue created by this command",
325 | ),
326 | end_time: Optional[datetime] = Option(None),
327 | penalty_config: str = Option(
328 | "",
329 | help=(
330 | "Configuration for penalties in the format "
331 | "'hours=factor'. "
332 | "Example: --penalty-config 24=0.75,48=0.5"
333 | ),
334 | ),
335 | ) -> None:
336 | app.pretty_exceptions_enable = False
337 | set_settings(Settings(_env_file=env_path))
338 | set_logger(settings.stderr_log_level)
339 | logger.info(f"debug log to file: {settings.log_file_path}")
340 | env = joj3.Env()
341 | if "" in (
342 | env.github_actor,
343 | env.github_run_number,
344 | env.github_sha,
345 | env.github_repository,
346 | ):
347 | logger.error("missing required env var")
348 | raise Exit(code=1)
349 | submitter_repo_name = env.github_repository.split("/")[-1]
350 | penalty_factor = joj3.get_penalty_factor(end_time, penalty_config)
351 | total_score = joj3.get_total_score(env.joj3_output_path)
352 | total_score = round(total_score - abs(total_score) * (1 - penalty_factor))
353 | res = {
354 | "totalScore": total_score,
355 | "cappedTotalScore": (
356 | total_score if max_total_score < 0 else min(total_score, max_total_score)
357 | ),
358 | "forceQuit": env.joj3_force_quit_stage_name != "",
359 | "forceQuitStageName": env.joj3_force_quit_stage_name,
360 | "issue": 0,
361 | "action": int(env.github_run_number),
362 | "sha": env.github_sha,
363 | "commitMsg": env.joj3_commit_msg,
364 | }
365 | submitter_repo_url = (
366 | f"https://{settings.gitea_domain_name}{settings.gitea_suffix}/"
367 | + f"{settings.gitea_org_name}/{submitter_repo_name}"
368 | )
369 | gitea_actions_url = f"{submitter_repo_url}/actions/runs/{env.github_run_number}"
370 | gitea_issue_url = ""
371 | if not skip_result_issue:
372 | issue_number = tea.pot.joj3_post_issue(
373 | env,
374 | max_total_score,
375 | gitea_actions_url,
376 | submitter_in_issue_title,
377 | submitter_repo_name,
378 | issue_label_name,
379 | issue_label_color,
380 | issue_label_exclusive,
381 | penalty_factor,
382 | )
383 | res["issue"] = issue_number
384 | gitea_issue_url = f"{submitter_repo_url}/issues/{issue_number}"
385 | logger.info(f"gitea issue url: {gitea_issue_url}")
386 | echo(json.dumps(res)) # print result to stdout for joj3 log parser
387 | if skip_scoreboard and skip_failed_table:
388 | return
389 | lock_file_path = os.path.join(
390 | settings.repos_dir, grading_repo_name, settings.joj3_lock_file_path
391 | )
392 | logger.debug(
393 | f"try to acquire lock, file path: {lock_file_path}, "
394 | + f"timeout: {settings.joj3_lock_file_timeout}"
395 | )
396 | with FileLock(lock_file_path, timeout=settings.joj3_lock_file_timeout).acquire():
397 | logger.debug("file lock acquired")
398 | retry_interval = 1
399 | git_push_ok = False
400 | while not git_push_ok:
401 | repo_path = tea.pot.git.repo_clean_and_checkout(
402 | grading_repo_name,
403 | "grading",
404 | clean_git_lock=True,
405 | reset_target="origin/grading",
406 | )
407 | repo: Repo = tea.pot.git.get_repo(grading_repo_name)
408 | if "grading" not in repo.remote().refs:
409 | logger.error(
410 | '"grading" branch not found in remote, create and push it to origin first.'
411 | )
412 | raise Exit(code=1)
413 | if "grading" not in repo.branches:
414 | logger.error('"grading" branch not found in local, create it first.')
415 | raise Exit(code=1)
416 | repo.git.reset("--hard", "origin/grading")
417 | if not skip_scoreboard:
418 | exercise_name = env.joj3_conf_name
419 | if scoreboard_column_by_ref:
420 | exercise_name = env.github_ref
421 | joj3.generate_scoreboard(
422 | env.joj3_output_path,
423 | env.github_actor,
424 | os.path.join(repo_path, scoreboard_filename),
425 | exercise_name,
426 | submitter_repo_name,
427 | total_score,
428 | )
429 | failed_stage = joj3.get_failed_stage_from_file(env.joj3_output_path)
430 | tea.pot.git.add_commit(
431 | grading_repo_name,
432 | [scoreboard_filename],
433 | (
434 | f"joj3: update scoreboard for {env.joj3_conf_name} by @{env.github_actor} in "
435 | f"{settings.gitea_org_name}/{submitter_repo_name}@{env.github_sha}\n\n"
436 | f"gitea actions link: {gitea_actions_url}\n"
437 | f"gitea issue link: {gitea_issue_url}\n"
438 | f"groups: {env.joj3_groups}\n"
439 | f"failed stage: {failed_stage}\n"
440 | ),
441 | )
442 | if not skip_failed_table:
443 | joj3.generate_failed_table(
444 | env.joj3_output_path,
445 | submitter_repo_name,
446 | submitter_repo_url,
447 | os.path.join(repo_path, failed_table_filename),
448 | gitea_actions_url,
449 | )
450 | tea.pot.git.add_commit(
451 | grading_repo_name,
452 | [failed_table_filename],
453 | (
454 | f"joj3: update failed table for {env.joj3_conf_name} by @{env.github_actor} in "
455 | f"{settings.gitea_org_name}/{submitter_repo_name}@{env.github_sha}\n\n"
456 | f"gitea actions link: {gitea_actions_url}\n"
457 | f"gitea issue link: {gitea_issue_url}\n"
458 | f"groups: {env.joj3_groups}\n"
459 | ),
460 | )
461 | push_info_list = tea.pot.git.push(grading_repo_name)
462 | git_push_ok = push_info_list.error is None
463 | if not git_push_ok:
464 | retry_interval *= 2
465 | logger.info(
466 | f"git push failed, retry in {retry_interval} seconds: {push_info_list}"
467 | )
468 | if retry_interval > 64:
469 | logger.error(f"git push failed too many times")
470 | raise Exit(code=1)
471 | sleep(retry_interval)
472 | logger.info("joj3-all-env done")
473 |
474 |
475 | @app.command(
476 | "joj3-check-env",
477 | help="check joj3 restrictions from env var and cli args",
478 | )
479 | def joj3_check_env(
480 | env_path: str = Argument("", help="path to .env file"),
481 | grading_repo_name: str = Option(
482 | "grading",
483 | help="name of grading repo to push scoreboard file",
484 | ),
485 | scoreboard_filename: str = Option(
486 | "scoreboard.csv", help="name of scoreboard file in the gitea repo"
487 | ),
488 | group_config: str = Option(
489 | "=100:24",
490 | help=(
491 | "Configuration for groups in the format "
492 | "'group_name=max_count:time_period(in hours)'. "
493 | "Empty group name for all groups. "
494 | "Negative max_count or time_period for no limit. "
495 | "Example: --group-config joj=10:24,run=20:48"
496 | ),
497 | ),
498 | begin_time: Optional[datetime] = Option(None),
499 | end_time: Optional[datetime] = Option(None),
500 | penalty_config: str = Option(
501 | "",
502 | help=(
503 | "Configuration for penalties in the format "
504 | "'hours=factor'. "
505 | "Example: --penalty-config 24=0.75,48=0.5"
506 | ),
507 | ),
508 | ignore_submitter: bool = Option(
509 | False, help="ignore submitter when checking submission count"
510 | ),
511 | ) -> None:
512 | app.pretty_exceptions_enable = False
513 | set_settings(Settings(_env_file=env_path))
514 | set_logger(settings.stderr_log_level)
515 | logger.info(f"debug log to file: {settings.log_file_path}")
516 | env = joj3.Env()
517 | if "" in (
518 | env.github_actor,
519 | env.github_repository,
520 | ):
521 | logger.error("missing required env var")
522 | raise Exit(code=1)
523 | time_msg, time_failed = tea.pot.joj3_check_submission_time(
524 | begin_time,
525 | end_time,
526 | penalty_config,
527 | )
528 | count_msg, count_failed = tea.pot.joj3_check_submission_count(
529 | env, grading_repo_name, group_config, scoreboard_filename, ignore_submitter
530 | )
531 | echo(
532 | json.dumps(
533 | {
534 | "msg": time_msg + count_msg,
535 | "failed": time_failed or count_failed,
536 | }
537 | )
538 | ) # print result to stdout for joj3
539 | logger.info("joj3-check-env done")
540 |
541 |
542 | @app.command("joj3-check-gitea-token")
543 | def joj3_check_gitea_token(
544 | env_path: str = Argument("", help="path to .env file")
545 | ) -> None:
546 | app.pretty_exceptions_enable = False
547 | set_settings(Settings(_env_file=env_path))
548 | set_logger(settings.stderr_log_level)
549 | tea.pot.gitea.organization_api.org_list_repos(settings.gitea_org_name)
550 |
551 |
552 | if __name__ == "__main__":
553 | try:
554 | app()
555 | except Exception:
556 | logger.exception("Unexpected error:")
557 |
--------------------------------------------------------------------------------
/joint_teapot/workers/gitea.py:
--------------------------------------------------------------------------------
1 | import re
2 | from enum import Enum
3 | from functools import lru_cache
4 | from typing import Any, Callable, Dict, Iterable, List, Optional, Tuple, TypeVar
5 |
6 | import focs_gitea
7 | from canvasapi.group import Group, GroupMembership
8 | from canvasapi.paginated_list import PaginatedList
9 | from canvasapi.user import User
10 | from focs_gitea.rest import ApiException
11 |
12 | from joint_teapot.config import settings
13 | from joint_teapot.utils.logger import logger
14 | from joint_teapot.utils.main import default_repo_name_convertor, first
15 |
16 |
17 | class PermissionEnum(Enum):
18 | read = "read"
19 | write = "write"
20 | admin = "admin"
21 |
22 |
23 | T = TypeVar("T")
24 |
25 |
26 | def list_all(method: Callable[..., Iterable[T]], *args: Any, **kwargs: Any) -> List[T]:
27 | all_res = []
28 | page = 1
29 | while True:
30 | res = method(*args, **kwargs, page=page)
31 | if not res:
32 | break
33 | for item in res:
34 | all_res.append(item)
35 | page += 1
36 | return all_res
37 |
38 |
39 | class Gitea:
40 | def __init__(
41 | self,
42 | access_token: str = "", # nosec
43 | org_name: str = "",
44 | domain_name: str = "",
45 | suffix: str = "",
46 | ):
47 | access_token = access_token or settings.gitea_access_token
48 | org_name = org_name or settings.gitea_org_name
49 | domain_name = domain_name or settings.gitea_domain_name
50 | suffix = suffix or settings.gitea_suffix
51 | self.org_name = org_name
52 | configuration = focs_gitea.Configuration()
53 | configuration.api_key["access_token"] = access_token
54 | configuration.host = f"https://{domain_name}{suffix}/api/v1"
55 | configuration.debug = settings.gitea_debug
56 | for v in configuration.logger.values():
57 | v.handlers = []
58 | self.api_client = focs_gitea.ApiClient(configuration)
59 | self.admin_api = focs_gitea.AdminApi(self.api_client)
60 | self.miscellaneous_api = focs_gitea.MiscellaneousApi(self.api_client)
61 | self.organization_api = focs_gitea.OrganizationApi(self.api_client)
62 | self.issue_api = focs_gitea.IssueApi(self.api_client)
63 | self.repository_api = focs_gitea.RepositoryApi(self.api_client)
64 | self.settings_api = focs_gitea.SettingsApi(self.api_client)
65 | self.user_api = focs_gitea.UserApi(self.api_client)
66 | logger.debug("Gitea initialized")
67 |
68 | @lru_cache()
69 | def _get_team_id_by_name(self, name: str) -> int:
70 | res = self.organization_api.team_search(
71 | self.org_name, q=str(name), limit=1
72 | ).to_dict()
73 | if len(res["data"] or []) == 0:
74 | raise Exception(
75 | f"{name} not found by name in Gitea. Possible reason: you did not join this team."
76 | )
77 | return res["data"][0]["id"]
78 |
79 | @lru_cache()
80 | def _get_username_by_canvas_student(self, student: User) -> str:
81 | if (
82 | student.email is not None
83 | and student.email.count("@") == 1
84 | and student.email.endswith("@sjtu.edu.cn")
85 | ):
86 | return student.email.split("@")[0]
87 | raise Exception(f"Can not get username of {student}, an SJTU email is expected")
88 |
89 | def add_canvas_students_to_teams(
90 | self, students: PaginatedList, team_names: List[str]
91 | ) -> None:
92 | for team_name in team_names:
93 | team_id = self._get_team_id_by_name(team_name)
94 | team_members = self.organization_api.org_list_team_members(team_id)
95 | for student in students:
96 | try:
97 | username = self._get_username_by_canvas_student(student)
98 | team_member = first(team_members, lambda x: x.login == username)
99 | if team_member is None:
100 | self.organization_api.org_add_team_member(team_id, username)
101 | logger.info(f"{student} added to team {team_name}")
102 | else:
103 | team_members.remove(team_member)
104 | logger.warning(f"{student} already in team {team_name}")
105 | except Exception as e:
106 | logger.error(e)
107 | for team_member in team_members:
108 | logger.error(
109 | f"{team_member.full_name} found in team {team_name} "
110 | + "but not found in Canvas students"
111 | )
112 |
113 | def create_personal_repos_for_canvas_students(
114 | self,
115 | students: PaginatedList,
116 | repo_name_convertor: Callable[
117 | [User], Optional[str]
118 | ] = default_repo_name_convertor,
119 | template: str = "",
120 | ) -> List[str]:
121 | repo_names = []
122 | for student in students:
123 | repo_name = repo_name_convertor(student)
124 | if repo_name is None:
125 | continue
126 | repo_names.append(repo_name)
127 | try:
128 | try:
129 | if template == "":
130 | body = {
131 | "auto_init": False,
132 | "default_branch": settings.default_branch,
133 | "name": repo_name,
134 | "private": True,
135 | "template": False,
136 | "trust_model": "default",
137 | }
138 | self.organization_api.create_org_repo(self.org_name, body=body)
139 | else:
140 | body = {
141 | "default_branch": settings.default_branch,
142 | "git_content": True,
143 | "git_hooks": True,
144 | "labels": True,
145 | "name": repo_name,
146 | "owner": self.org_name,
147 | "private": True,
148 | "protected_branch": True,
149 | }
150 | self.repository_api.generate_repo(
151 | self.org_name, template, body=body
152 | )
153 | logger.info(
154 | f"Personal repo {self.org_name}/{repo_name} for {student} created"
155 | )
156 | except ApiException as e:
157 | if e.status == 409:
158 | logger.warning(
159 | f"Personal repo {self.org_name}/{repo_name} for {student} already exists"
160 | )
161 | else:
162 | raise (e)
163 | username = self._get_username_by_canvas_student(student)
164 | self.repository_api.repo_add_collaborator(
165 | self.org_name, repo_name, username
166 | )
167 | except Exception as e:
168 | logger.error(e)
169 | return repo_names
170 |
171 | def create_teams_and_repos_by_canvas_groups(
172 | self,
173 | students: PaginatedList,
174 | groups: PaginatedList,
175 | team_name_convertor: Callable[[str], Optional[str]] = lambda name: name,
176 | repo_name_convertor: Callable[[str], Optional[str]] = lambda name: name,
177 | template: str = "",
178 | permission: PermissionEnum = PermissionEnum.write,
179 | ) -> List[str]:
180 | repo_names = []
181 | teams = list_all(self.organization_api.org_list_teams, self.org_name)
182 | repos = list_all(self.organization_api.org_list_repos, self.org_name)
183 | group: Group
184 | for group in groups:
185 | team_name = team_name_convertor(group.name)
186 | repo_name = repo_name_convertor(group.name)
187 | if team_name is None or repo_name is None:
188 | continue
189 | team = first(teams, lambda team: team.name == team_name)
190 | if team is None:
191 | team = self.organization_api.org_create_team(
192 | self.org_name,
193 | body={
194 | "can_create_org_repo": False,
195 | "includes_all_repositories": False,
196 | "name": team_name,
197 | "permission": permission.value,
198 | "units": [
199 | "repo.code",
200 | "repo.issues",
201 | "repo.ext_issues",
202 | "repo.wiki",
203 | "repo.pulls",
204 | "repo.releases",
205 | "repo.projects",
206 | "repo.ext_wiki",
207 | ],
208 | },
209 | )
210 | logger.info(f"Team {team_name} created")
211 | if first(repos, lambda repo: repo.name == repo_name) is None:
212 | repo_names.append(repo_name)
213 | if template == "":
214 | self.organization_api.create_org_repo(
215 | self.org_name,
216 | body={
217 | "auto_init": False,
218 | "default_branch": settings.default_branch,
219 | "name": repo_name,
220 | "private": True,
221 | "template": False,
222 | "trust_model": "default",
223 | },
224 | )
225 | else:
226 | self.repository_api.generate_repo(
227 | self.org_name,
228 | template,
229 | body={
230 | "default_branch": settings.default_branch,
231 | "git_content": True,
232 | "git_hooks": True,
233 | "labels": True,
234 | "name": repo_name,
235 | "owner": self.org_name,
236 | "private": True,
237 | "protected_branch": True,
238 | },
239 | )
240 | logger.info(f"{self.org_name}/{team_name} created")
241 | try:
242 | self.organization_api.org_add_team_repository(
243 | team.id, self.org_name, repo_name
244 | )
245 | except Exception as e:
246 | logger.warning(e)
247 | membership: GroupMembership
248 | student_count = 0
249 | for membership in group.get_memberships():
250 | student = first(students, lambda s: s.id == membership.user_id)
251 | student_count += 1
252 | if student is None:
253 | raise Exception(
254 | f"student with user_id {membership.user_id} not found"
255 | )
256 | try:
257 | username = self._get_username_by_canvas_student(student)
258 | except Exception as e:
259 | logger.warning(e)
260 | continue
261 | try:
262 | self.organization_api.org_add_team_member(team.id, username)
263 | self.repository_api.repo_add_collaborator(
264 | self.org_name, repo_name, username
265 | )
266 | except Exception as e:
267 | logger.error(e)
268 | continue
269 | try:
270 | self.repository_api.repo_delete_branch_protection(
271 | self.org_name, repo_name, settings.default_branch
272 | )
273 | except ApiException as e:
274 | if e.status != 404:
275 | raise
276 | try:
277 | self.repository_api.repo_create_branch_protection(
278 | self.org_name,
279 | repo_name,
280 | body={
281 | "block_on_official_review_requests": True,
282 | "block_on_outdated_branch": True,
283 | "block_on_rejected_reviews": True,
284 | "branch_name": settings.default_branch,
285 | "dismiss_stale_approvals": True,
286 | "enable_approvals_whitelist": False,
287 | "enable_merge_whitelist": False,
288 | "enable_push": True,
289 | "enable_push_whitelist": True,
290 | "merge_whitelist_teams": [],
291 | "merge_whitelist_usernames": [],
292 | "protected_file_patterns": "",
293 | "push_whitelist_deploy_keys": False,
294 | "push_whitelist_teams": ["Owners"],
295 | "push_whitelist_usernames": [],
296 | "require_signed_commits": False,
297 | "required_approvals": max(student_count - 1, 0),
298 | "enable_status_check": True,
299 | "status_check_contexts": ["Run JOJ3 on Push / run (push)"],
300 | },
301 | )
302 | except ApiException as e:
303 | if e.status != 404:
304 | raise
305 | logger.info(f"{self.org_name}/{repo_name} jobs done")
306 | return repo_names
307 |
308 | def get_public_key_of_canvas_students(
309 | self, students: PaginatedList
310 | ) -> Dict[str, List[str]]:
311 | res = {}
312 | for student in students:
313 | try:
314 | username = self._get_username_by_canvas_student(student)
315 | keys = [
316 | item.key
317 | for item in list_all(self.user_api.user_list_keys, username)
318 | ]
319 | if not keys:
320 | logger.info(f"{student} has not uploaded ssh keys to gitea")
321 | continue
322 | res[student.login_id] = keys
323 | except Exception as e:
324 | logger.error(e)
325 | return res
326 |
327 | def get_repo_releases(self, repo_name: str) -> List[Any]:
328 | try:
329 | args = self.repository_api.repo_list_releases, self.org_name, repo_name
330 | return list_all(*args)
331 | except ApiException as e:
332 | if e.status != 404:
333 | raise
334 | return []
335 |
336 | def get_all_repo_names(self) -> List[str]:
337 | return [
338 | data.name
339 | for data in list_all(self.organization_api.org_list_repos, self.org_name)
340 | ]
341 |
342 | def get_no_collaborator_repos(self) -> List[str]:
343 | res = []
344 | for data in list_all(self.organization_api.org_list_repos, self.org_name):
345 | collaborators = self.repository_api.repo_list_collaborators(
346 | self.org_name, data.name
347 | )
348 | if collaborators:
349 | continue
350 | logger.info(f"{self.org_name}/{data.name} has no collaborators")
351 | res.append(data.name)
352 | return res
353 |
354 | def get_repos_status(self) -> Dict[str, Tuple[int, int]]:
355 | res = {}
356 | for repo in list_all(self.organization_api.org_list_repos, self.org_name):
357 | commits = []
358 | issues = []
359 | try:
360 | commits = self.repository_api.repo_get_all_commits(
361 | self.org_name, repo.name
362 | )
363 | except ApiException as e:
364 | if e.status != 409:
365 | raise
366 | issues = self.issue_api.issue_list_issues(
367 | self.org_name, repo.name, state="all"
368 | )
369 | # if not commits:
370 | # logger.info(f"{self.org_name}/{repo.name} has no commits")
371 | res[repo.name] = (len(commits), len(issues))
372 | return res
373 |
374 | def create_issue(
375 | self,
376 | repo_name: str,
377 | title: str,
378 | body: str,
379 | assign_every_collaborators: bool = True,
380 | milestone: str = "",
381 | labels: list[str] = [],
382 | ) -> None:
383 | assignees = []
384 | if assign_every_collaborators:
385 | assignees = [
386 | item.login
387 | for item in list_all(
388 | self.repository_api.repo_list_collaborators,
389 | self.org_name,
390 | repo_name,
391 | )
392 | ]
393 | milestone_id = None
394 | if milestone:
395 | milestone_list = self.issue_api.issue_get_milestones_list(
396 | self.org_name, repo_name
397 | )
398 | if milestone not in [m.title for m in milestone_list]:
399 | logger.warning(f"Milestone {milestone} does not exist in {repo_name}")
400 | else:
401 | milestone_id = first(
402 | [m.id for m in milestone_list if m.title == milestone]
403 | )
404 | labels_id = []
405 | if labels:
406 | labels_list = self.issue_api.issue_list_labels(self.org_name, repo_name)
407 | labels_id = [l.id for l in labels_list if l.name in labels]
408 | if not labels_id:
409 | logger.warning(f"no label matches {labels}")
410 | self.issue_api.issue_create_issue(
411 | self.org_name,
412 | repo_name,
413 | body={
414 | "title": title,
415 | "body": body,
416 | "assignees": assignees,
417 | "milestone": milestone_id,
418 | "labels": labels_id,
419 | },
420 | )
421 | logger.info(f'Created issue "{title}" in {repo_name}')
422 |
423 | def create_comment(
424 | self,
425 | repo_name: str,
426 | index: int,
427 | body: str,
428 | ) -> None:
429 | self.issue_api.issue_create_comment(
430 | self.org_name,
431 | repo_name,
432 | index,
433 | body={"body": body},
434 | )
435 | logger.info(f"Created comment in {repo_name}/issues/{index}")
436 |
437 | def create_milestone(
438 | self,
439 | repo_name: str,
440 | title: str,
441 | description: str,
442 | due_on: str,
443 | ) -> None:
444 | if due_on == "":
445 | self.issue_api.issue_create_milestone(
446 | self.org_name,
447 | repo_name,
448 | body={"title": title, "description": description},
449 | )
450 | return
451 | self.issue_api.issue_create_milestone(
452 | self.org_name,
453 | repo_name,
454 | body={
455 | "title": title,
456 | "description": description,
457 | "due_on": due_on + "T23:59:59.999+08:00",
458 | },
459 | )
460 |
461 | def check_exist_issue_by_title(self, repo_name: str, title: str) -> bool:
462 | for issue in list_all(
463 | self.issue_api.issue_list_issues, self.org_name, repo_name
464 | ):
465 | if issue.title == title:
466 | return True
467 | return False
468 |
469 | def close_all_issues(self) -> None:
470 | for repo_name in self.get_all_repo_names():
471 | issues = list_all(
472 | self.issue_api.issue_list_issues, self.org_name, repo_name
473 | )
474 | for issue in issues:
475 | if issue.state != "closed":
476 | self.issue_api.issue_edit_issue(
477 | self.org_name, repo_name, issue.number, body={"state": "closed"}
478 | )
479 |
480 | def archive_repos(self, regex: str = ".+", dry_run: bool = True) -> None:
481 | if dry_run:
482 | logger.info("Dry run enabled. No changes will be made to the repositories.")
483 | logger.info(f"Archiving repos with name matching {regex}")
484 | for repo_name in self.get_all_repo_names():
485 | if re.fullmatch(regex, repo_name):
486 | logger.info(f"Archived {repo_name}")
487 | if not dry_run:
488 | self.repository_api.repo_edit(
489 | self.org_name, repo_name, body={"archived": True}
490 | )
491 |
492 | def unwatch_all_repos(self) -> None:
493 | for repo in list_all(self.organization_api.org_list_repos, self.org_name):
494 | self.repository_api.user_current_delete_subscription(
495 | self.org_name, repo.name
496 | )
497 | logger.info(f"Unwatched {repo.name}")
498 |
499 | def get_all_teams(self) -> Dict[str, List[str]]:
500 | res: Dict[str, List[str]] = {}
501 | for team in list_all(self.organization_api.org_list_teams, self.org_name):
502 | if team.name == "Owners":
503 | continue
504 | team_id = team.id
505 | try:
506 | members = [
507 | m.login.lower()
508 | for m in self.organization_api.org_list_team_members(team_id)
509 | ]
510 | except ApiException as e:
511 | logger.warning(
512 | f"Failed to get members of team {team_id} in {self.org_name}: {e}"
513 | )
514 | continue
515 | res[team.name] = members
516 | return res
517 |
518 | def unsubscribe_from_repos(self, pattern: str) -> None:
519 | subscriptions = [
520 | sub
521 | for sub in self.user_api.user_current_list_subscriptions()
522 | if sub.owner.login == self.org_name
523 | and re.search(pattern, sub.name) is not None
524 | ]
525 | if len(subscriptions) == 0:
526 | logger.warning(f"No subscribed repo matches the pattern {pattern}")
527 | return
528 | logger.info(
529 | f"{len(subscriptions)} subscriptions match the pattern {pattern}: {[s.name for s in subscriptions]}"
530 | )
531 | for sub in subscriptions:
532 | self.repository_api.user_current_delete_subscription(
533 | self.org_name, sub.name
534 | )
535 | logger.info(f"Unsubscribed from {sub.name}")
536 |
537 | def create_milestones(
538 | self, milestone: str, regex: str, due_date: str, description: str
539 | ) -> None:
540 | for repo_name in self.get_all_repo_names():
541 | if not re.fullmatch(regex, repo_name):
542 | continue
543 | milestone_list = self.issue_api.issue_get_milestones_list(
544 | self.org_name, repo_name
545 | )
546 | if milestone in [m.title for m in milestone_list]:
547 | logger.warning(f"Milestone {milestone} already exists in {repo_name}")
548 | continue
549 | self.create_milestone(repo_name, milestone, description, due_date)
550 | logger.info(f"Created milestone {milestone} in {repo_name}")
551 |
552 |
553 | if __name__ == "__main__":
554 | gitea = Gitea()
555 |
--------------------------------------------------------------------------------