├── config
├── setup.cfg
├── pytest.ini
├── development_requirements.txt
├── .coveragerc
└── .coveragerc_ci_cd
├── src
├── database
│ └── database.db
├── app
│ ├── static
│ │ ├── images
│ │ │ └── favicon
│ │ │ │ ├── favicon.ico
│ │ │ │ ├── favicon-96x96.png
│ │ │ │ ├── apple-touch-icon.png
│ │ │ │ ├── web-app-manifest-192x192.png
│ │ │ │ ├── web-app-manifest-512x512.png
│ │ │ │ └── site.webmanifest
│ │ ├── css
│ │ │ └── styles.css
│ │ └── js
│ │ │ └── script.js
│ └── templates
│ │ └── index.html
├── data
│ ├── user.py
│ ├── output_format.py
│ ├── flow.py
│ ├── day.py
│ ├── semester.py
│ ├── course_choice.py
│ ├── type.py
│ ├── enum_args.py
│ ├── degree.py
│ ├── case_insensitive_dict.py
│ ├── settings.py
│ ├── schedule.py
│ ├── meeting.py
│ ├── language.py
│ ├── course.py
│ ├── activity.py
│ ├── academic_activity.py
│ └── course_constraint.py
├── algorithms
│ ├── constraint_courses.py
│ └── csp.py
├── utils.py
└── convertor
│ └── convertor.py
├── readme_data
├── example_image_output_hebrew.png
└── example_image_output_english.png
├── .gitattributes
├── tests
├── conftest.py
├── convertor
│ └── test_convertor.py
├── controller
│ └── test_controller.py
├── algorithms
│ ├── test_constraint_courses.py
│ └── test_csp.py
├── collector
│ └── test_network.py
└── data
│ └── test_data.py
├── .github
└── workflows
│ ├── delete_all_workflow_runs.yaml
│ ├── build_executable_file.yaml
│ ├── test_cross_platforms.yaml
│ ├── update_data_cycle.yaml
│ ├── generate_constraint_json.yaml
│ └── cycles.yaml
├── requirements.txt
├── main.py
├── scripts
├── release.py
├── argcomplete_semester_organizer.sh
├── update_levnet_data.py
└── run_linter.py
├── .gitignore
├── app.py
├── README.md
├── SemesterOrganizer.bat
└── LICENSE
/config/setup.cfg:
--------------------------------------------------------------------------------
1 | [pycodestyle]
2 | ignore = E501
3 |
--------------------------------------------------------------------------------
/config/pytest.ini:
--------------------------------------------------------------------------------
1 | [pytest]
2 | pythonpath = ../.
3 | markers =
4 | network: tests that require network access
5 |
--------------------------------------------------------------------------------
/src/database/database.db:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ItamarShalev/semester_organizer/HEAD/src/database/database.db
--------------------------------------------------------------------------------
/readme_data/example_image_output_hebrew.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ItamarShalev/semester_organizer/HEAD/readme_data/example_image_output_hebrew.png
--------------------------------------------------------------------------------
/src/app/static/images/favicon/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ItamarShalev/semester_organizer/HEAD/src/app/static/images/favicon/favicon.ico
--------------------------------------------------------------------------------
/readme_data/example_image_output_english.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ItamarShalev/semester_organizer/HEAD/readme_data/example_image_output_english.png
--------------------------------------------------------------------------------
/src/app/static/images/favicon/favicon-96x96.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ItamarShalev/semester_organizer/HEAD/src/app/static/images/favicon/favicon-96x96.png
--------------------------------------------------------------------------------
/src/app/static/images/favicon/apple-touch-icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ItamarShalev/semester_organizer/HEAD/src/app/static/images/favicon/apple-touch-icon.png
--------------------------------------------------------------------------------
/src/app/static/images/favicon/web-app-manifest-192x192.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ItamarShalev/semester_organizer/HEAD/src/app/static/images/favicon/web-app-manifest-192x192.png
--------------------------------------------------------------------------------
/src/app/static/images/favicon/web-app-manifest-512x512.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ItamarShalev/semester_organizer/HEAD/src/app/static/images/favicon/web-app-manifest-512x512.png
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
1 | # Apply LF line endings to all text files
2 | * text=auto eol=lf
3 |
4 | # Explicitly mark .db and .png files as binary to exclude them from line ending normalization
5 | *.db binary
6 | *.png binary
7 |
--------------------------------------------------------------------------------
/src/data/user.py:
--------------------------------------------------------------------------------
1 | from dataclasses import dataclass
2 |
3 |
4 | @dataclass
5 | class User:
6 | username: str = None
7 | password: str = None
8 |
9 | def __bool__(self):
10 | return bool(self.username and self.password)
11 |
--------------------------------------------------------------------------------
/src/data/output_format.py:
--------------------------------------------------------------------------------
1 | from enum import Enum
2 |
3 |
4 | class OutputFormat(Enum):
5 | CSV = "csv"
6 | EXCEL = "xlsx"
7 | IMAGE = "png"
8 |
9 | def __str__(self):
10 | return self.name.lower()
11 |
12 | def __repr__(self):
13 | return str(self)
14 |
--------------------------------------------------------------------------------
/src/data/flow.py:
--------------------------------------------------------------------------------
1 | from enum import auto
2 | from src.data.enum_args import EnumArgs
3 |
4 |
5 | class Flow(EnumArgs):
6 | CONSOLE = auto()
7 | UPDATE_DATABASE = auto()
8 | UPDATE_SERVER_DATABASE = auto()
9 | UPDATE_GENERATED_JSON_DATA = auto()
10 | RELEASE = auto()
11 | LINTER = auto()
12 |
--------------------------------------------------------------------------------
/src/data/day.py:
--------------------------------------------------------------------------------
1 | from enum import IntEnum, auto
2 |
3 |
4 | class Day(IntEnum):
5 | SUNDAY = auto()
6 | MONDAY = auto()
7 | TUESDAY = auto()
8 | WEDNESDAY = auto()
9 | THURSDAY = auto()
10 | FRIDAY = auto()
11 |
12 | def __str__(self):
13 | return self.name.capitalize()
14 |
15 | def __repr__(self):
16 | return str(self)
17 |
--------------------------------------------------------------------------------
/tests/conftest.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from src import utils
3 |
4 |
5 | def pytest_configure(config):
6 | """
7 | Allows plugins and conftest files to perform initial configuration.
8 | This hook is called for every plugin and initial conftest
9 | file after command line options have been parsed.
10 | """
11 | utils.init_project()
12 | utils.config_logging_level(logging.DEBUG)
13 |
--------------------------------------------------------------------------------
/.github/workflows/delete_all_workflow_runs.yaml:
--------------------------------------------------------------------------------
1 | name: Delete all workflow runs
2 | on:
3 | workflow_dispatch:
4 |
5 | jobs:
6 | delete_runs:
7 | runs-on: ubuntu-latest
8 | steps:
9 | - name: Delete workflow runs
10 | uses: Mattraks/delete-workflow-runs@v2
11 | with:
12 | token: ${{ github.token }}
13 | repository: ${{ github.repository }}
14 | retain_days: 0
15 | keep_minimum_runs: 2
16 |
--------------------------------------------------------------------------------
/src/data/semester.py:
--------------------------------------------------------------------------------
1 | from enum import Enum, auto
2 |
3 |
4 | class Semester(Enum):
5 | "Semester אלול"
6 | SUMMER = auto()
7 | "Semester א"
8 | FALL = auto()
9 | "Semester ב"
10 | SPRING = auto()
11 | "Semester שנתי"
12 | ANNUAL = auto()
13 |
14 | def __str__(self):
15 | return self.name.capitalize()
16 |
17 | def __repr__(self):
18 | return str(self)
19 |
20 | def __iter__(self):
21 | return iter((self.value, self.name.lower()))
22 |
--------------------------------------------------------------------------------
/src/data/course_choice.py:
--------------------------------------------------------------------------------
1 | from dataclasses import dataclass
2 | from typing import Set
3 |
4 |
5 | @dataclass
6 | class CourseChoice:
7 | name: str
8 | parent_course_number: int
9 | available_teachers_for_lecture: Set[str]
10 | available_teachers_for_practice: Set[str]
11 | attendance_required_for_lecture: bool = True
12 | attendance_required_for_practice: bool = True
13 |
14 | def __eq__(self, other):
15 | return self.name == other.name
16 |
17 | def __hash__(self):
18 | return hash(self.name)
19 |
--------------------------------------------------------------------------------
/src/app/static/images/favicon/site.webmanifest:
--------------------------------------------------------------------------------
1 | {
2 | "name": "MyWebSite",
3 | "short_name": "MySite",
4 | "icons": [
5 | {
6 | "src": "/web-app-manifest-192x192.png",
7 | "sizes": "192x192",
8 | "type": "image/png",
9 | "purpose": "maskable"
10 | },
11 | {
12 | "src": "/web-app-manifest-512x512.png",
13 | "sizes": "512x512",
14 | "type": "image/png",
15 | "purpose": "maskable"
16 | }
17 | ],
18 | "theme_color": "#ffffff",
19 | "background_color": "#ffffff",
20 | "display": "standalone"
21 | }
--------------------------------------------------------------------------------
/config/development_requirements.txt:
--------------------------------------------------------------------------------
1 | # Packages for development
2 | ##########################
3 | # pytest package to run all tests.
4 | pytest>=7.2.0
5 | # pytest extension to re-run if test failed.
6 | pytest-rerunfailures>=10.3
7 | # Run linter using pylint, run code checks and code style.
8 | pylint>=2.15.8
9 | # Export the project as exe file, support linux, mac and windows.
10 | pyinstaller>=5.7.0
11 | # Run linter and add checks where pylint didn't find.
12 | pycodestyle>=2.10.0
13 | # Run coverage to check how much the test cover the code.
14 | coverage>=6.5.0
15 |
--------------------------------------------------------------------------------
/src/data/type.py:
--------------------------------------------------------------------------------
1 | from enum import IntEnum, auto
2 |
3 |
4 | class Type(IntEnum):
5 | PERSONAL = auto()
6 | LECTURE = auto()
7 | LAB = auto()
8 | PRACTICE = auto()
9 | SEMINAR = auto()
10 |
11 | def is_lecture(self):
12 | return self in [Type.LECTURE, Type.SEMINAR]
13 |
14 | def is_exercise(self):
15 | return self in [Type.LAB, Type.PRACTICE]
16 |
17 | def is_personal(self):
18 | return self == Type.PERSONAL
19 |
20 | def __str__(self):
21 | return self.name.capitalize()
22 |
23 | def __repr__(self):
24 | return str(self)
25 |
--------------------------------------------------------------------------------
/src/data/enum_args.py:
--------------------------------------------------------------------------------
1 | from enum import Enum
2 | from typing import Union
3 |
4 |
5 | class EnumArgs(Enum):
6 |
7 | def __str__(self):
8 | return self.name.lower()
9 |
10 | def __repr__(self):
11 | return str(self)
12 |
13 | @classmethod
14 | def from_str(cls, name: Union[int, str]):
15 | try:
16 | if isinstance(name, int) or (isinstance(name, str) and name.isdigit()):
17 | return cls(int(name))
18 | return cls[str(name).upper()]
19 | except KeyError:
20 | raise ValueError(f"Enum: {type(cls)} Invalid value: {name}") from None
21 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | # Csp package, the main algorithm.
2 | python-constraint2==2.1.0
3 | # Save and load dataclass as json format.
4 | dataclasses_json>=0.5.7
5 | # Network requests.
6 | requests==2.32.3
7 | # For network, avoid printing warnings for each request.
8 | urllib3==2.3.0
9 | # Automatic bash complete commands.
10 | argcomplete>=2.0.0
11 | # Work with dataframe to export in csv, excel and image output formats.
12 | pandas>=1.5.2
13 | # Excel output.
14 | xlsxwriter==3.1.9
15 | # Image output.
16 | dataframe_image==0.2.7
17 | # dataframe_image module requirement.
18 | playwright==1.50.0
19 | # dataframe_image module requirement.
20 | matplotlib>=3.7.2
21 | # Flask for the website.
22 | flask==3.1.0
23 |
--------------------------------------------------------------------------------
/config/.coveragerc:
--------------------------------------------------------------------------------
1 | [run]
2 | source = src
3 |
4 | [report]
5 | omit =
6 | **/test_*.py
7 | exclude_lines =
8 | raise AssertionError
9 | raise NotImplementedError
10 | raise RuntimeError
11 | except OperationalError:
12 | def _create_schedule
13 | if not was_exists:
14 | except Timeout as error
15 | except json.JSONDecodeError
16 | if not json_data["success"]:
17 | except Exception
18 | raise InvalidServerRequestException
19 | class Gui
20 | throw NotImplementedError
21 | @pytest.mark.skip
22 | except ImportError
23 | def install_auto_complete_cli
24 | raise Exception
25 | if not os.path.exists
26 | if not response_json["totalItems"]
27 | def init_project
28 |
--------------------------------------------------------------------------------
/config/.coveragerc_ci_cd:
--------------------------------------------------------------------------------
1 | [run]
2 | source = src
3 |
4 | [report]
5 | omit =
6 | src/collector/network.py
7 | **/test_*.py
8 | exclude_lines =
9 | raise AssertionError
10 | raise NotImplementedError
11 | raise RuntimeError
12 | except OperationalError:
13 | def _create_schedule
14 | if not was_exists:
15 | except Timeout as error
16 | except json.JSONDecodeError
17 | if not json_data["success"]:
18 | except Exception
19 | raise InvalidServerRequestException
20 | class Gui
21 | throw NotImplementedError
22 | @pytest.mark.network
23 | @pytest.mark.skip
24 | if os.name == 'nt':
25 | sys.stdout.reconfigure
26 | sys.stdout.reconfigure(encoding="utf-8")
27 | if not os.path.exists
28 | except ImportError
29 | class TestController
30 | class Controller
31 | raise Exception
32 | class UserClickExitException
33 | def init_project
34 | def install_auto_complete_cli
35 |
--------------------------------------------------------------------------------
/.github/workflows/build_executable_file.yaml:
--------------------------------------------------------------------------------
1 | name: Build executable file
2 |
3 | on:
4 | workflow_dispatch:
5 |
6 | jobs:
7 |
8 | Build:
9 | strategy:
10 | fail-fast: false
11 | matrix:
12 | os: [ubuntu-latest, windows-latest, macos-latest]
13 | runs-on: ${{ matrix.os }}
14 | steps:
15 | - uses: actions/checkout@v4.1.1
16 | - name: Set up Python 3.13
17 | uses: actions/setup-python@v5.0.0
18 | with:
19 | python-version: "3.13"
20 | cache: pip
21 | - name: Install dependencies
22 | shell: bash
23 | run: |
24 | python -m pip install --upgrade pip
25 | python -m pip install -r requirements.txt
26 | python -m pip install -r config/development_requirements.txt
27 | - name: Build executable file
28 | shell: bash
29 | run: |
30 | title=$(python release.py --title)
31 | echo "title=${title}"
32 | echo "title=${title}" >> $GITHUB_ENV
33 | path=$(python release.py --path)
34 | echo "path=${path}"
35 | echo "path=${path}" >> $GITHUB_ENV
36 | python release.py --build
37 | - name: Upload executable file
38 | uses: actions/upload-artifact@v4.3.1
39 | with:
40 | name: ${{ env.title }}
41 | path: ${{ env.path }}
42 |
--------------------------------------------------------------------------------
/src/data/degree.py:
--------------------------------------------------------------------------------
1 | from enum import Enum
2 | from dataclasses import dataclass, field
3 | from typing import List
4 |
5 |
6 | @dataclass
7 | class DegreeData:
8 | name: str
9 | department: int
10 | years: int
11 | track_names: List[str] = field(default_factory=lambda: [])
12 |
13 | def __str__(self):
14 | # For example COMPUTER_SCIENCE -> Computer Science
15 | return self.name.replace("_", " ").title()
16 |
17 |
18 | class Degree(Enum):
19 | COMPUTER_SCIENCE = DegreeData("COMPUTER_SCIENCE", 20, 3)
20 | BIOINFORMATICS = DegreeData("BIOINFORMATICS", 11, 3)
21 | SOFTWARE_ENGINEERING = DegreeData("SOFTWARE_ENGINEERING", 20, 3, [
22 | "Software Engineering", "הנדסת תוכנה (מדע הנתונים)", "הנדסת תוכנה סייבר"
23 | ])
24 |
25 | def __str__(self):
26 | # For example COMPUTER_SCIENCE -> Computer Science
27 | return str(self.value)
28 |
29 | def __repr__(self):
30 | return str(self)
31 |
32 | def __hash__(self):
33 | return hash(self.name)
34 |
35 | def __iter__(self):
36 | return iter((self.name, self.value.department))
37 |
38 | def __eq__(self, other):
39 | return self.name == other.name
40 |
41 | @staticmethod
42 | def get_defaults():
43 | return {Degree.COMPUTER_SCIENCE, Degree.SOFTWARE_ENGINEERING}
44 |
45 | def __lt__(self, other):
46 | return self.value.name < other.value.name
47 |
--------------------------------------------------------------------------------
/.github/workflows/test_cross_platforms.yaml:
--------------------------------------------------------------------------------
1 | name: Test all tests on all platforms
2 |
3 | on:
4 | workflow_dispatch:
5 |
6 | jobs:
7 |
8 | Test:
9 | strategy:
10 | fail-fast: false
11 | matrix:
12 | os: [ubuntu-latest, windows-latest, macos-latest]
13 | python-version: ["3.13", "3.12", "3.11", "3.10", "3.9", "3.8"]
14 | runs-on: ${{ matrix.os }}
15 | steps:
16 | - uses: actions/checkout@v4.1.1
17 | - name: Set up Python ${{ matrix.python-version }}
18 | uses: actions/setup-python@v5.0.0
19 | with:
20 | python-version: ${{ matrix.python-version }}
21 | - name: Cache pip packages
22 | uses: actions/cache@v4
23 | with:
24 | path: ~/.cache/pip
25 | key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
26 | restore-keys: |
27 | ${{ runner.os }}-pip-
28 | - name: Install dependencies
29 | run: |
30 | python -m pip install --upgrade pip
31 | pip install -r requirements.txt
32 | pip install -r config/development_requirements.txt
33 | - name: Extract secrets
34 | shell: bash
35 | run: |
36 | mkdir -p src/database
37 | rm -rf src/database/user_data.txt
38 | touch src/database/user_data.txt
39 | echo ${{ secrets.USERNAME_LEVNET }} >> src/database/user_data.txt
40 | echo ${{ secrets.PASSWORD_LEVNET }} >> src/database/user_data.txt
41 | - name: Run pytest
42 | run: |
43 | pytest $(git ls-files 'test_*.py') -c config/pytest.ini -v --reruns 2 --reruns-delay 5
44 |
--------------------------------------------------------------------------------
/.github/workflows/update_data_cycle.yaml:
--------------------------------------------------------------------------------
1 | name: Update data cycle
2 |
3 | on:
4 | workflow_dispatch:
5 |
6 | jobs:
7 | Update:
8 | runs-on: ubuntu-latest
9 | steps:
10 | - uses: actions/checkout@v4.1.1
11 | - name: Set up Python 3.13
12 | uses: actions/setup-python@v5.0.0
13 | with:
14 | python-version: "3.13"
15 | - name: Cache pip packages
16 | uses: actions/cache@v4
17 | with:
18 | path: ~/.cache/pip
19 | key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
20 | restore-keys: |
21 | ${{ runner.os }}-pip-
22 | - name: Install dependencies
23 | run: |
24 | python -m pip install --upgrade pip
25 | pip install -r requirements.txt
26 | pip install -r config/development_requirements.txt
27 | - name: Clear and create log file
28 | run: |
29 | rm -rf log.txt
30 | touch log.txt
31 | - name: Extract secrets
32 | shell: bash
33 | run: |
34 | mkdir -p src/database
35 | rm -rf src/database/user_data.txt
36 | touch src/database/user_data.txt
37 | echo ${{ secrets.USERNAME_LEVNET }} >> src/database/user_data.txt
38 | echo ${{ secrets.PASSWORD_LEVNET }} >> src/database/user_data.txt
39 | - name: Run python update script flow
40 | run: |
41 | python update_levnet_data.py
42 | - name: Commit changes
43 | run: |
44 | git config --local user.email "itamar1111111@gmail.com"
45 | git config --local user.name "Itamar Shalev"
46 | git add src/database/database.db --force
47 | git commit -sm "[AUTO] database: Update levnet courses data."
48 | - name: Push to main
49 | uses: CasperWA/push-protected@v2
50 | with:
51 | token: ${{ secrets.SPEICAL_TOKEN }}
52 | branch: main
53 | force: True
54 |
--------------------------------------------------------------------------------
/src/data/case_insensitive_dict.py:
--------------------------------------------------------------------------------
1 | import re
2 |
3 |
4 | # pylint: disable=protected-access
5 | class CaseInsensitiveDict(dict):
6 | @classmethod
7 | def _k(cls, key):
8 | return key.lower() if isinstance(key, str) else key
9 |
10 | def __init__(self, *args, **kwargs):
11 | super().__init__(*args, **kwargs)
12 | self._convert_keys()
13 |
14 | def __getitem__(self, key):
15 | return super().__getitem__(self.__class__._k(key))
16 |
17 | def __setitem__(self, key, value):
18 | super().__setitem__(self.__class__._k(key), value)
19 |
20 | def __delitem__(self, key):
21 | return super().__delitem__(self.__class__._k(key))
22 |
23 | def __contains__(self, key):
24 | return super().__contains__(self.__class__._k(key))
25 |
26 | def pop(self, key, *args, **kwargs):
27 | return super().pop(self.__class__._k(key), *args, **kwargs)
28 |
29 | def get(self, key, *args, **kwargs):
30 | return super().get(self.__class__._k(key), *args, **kwargs)
31 |
32 | def setdefault(self, key, *args, **kwargs):
33 | return super().setdefault(self.__class__._k(key), *args, **kwargs)
34 |
35 | def update(self, E=None, **F):
36 | E = E or {}
37 | super().update(self.__class__(E))
38 | super().update(self.__class__(**F))
39 |
40 | def _convert_keys(self):
41 | for key in list(self.keys()):
42 | value = super().pop(key)
43 | self[key] = value
44 |
45 |
46 | class TextCaseInsensitiveDict(CaseInsensitiveDict):
47 | @classmethod
48 | def _k(cls, key):
49 | if isinstance(key, str):
50 | key = key.lower()
51 | # Remove all text sign from the start and the end of the text
52 | key = re.sub(r'[,.:;()!? \n\r\t=-]*$', '', key)
53 | key = re.sub(r'^[,.:;()!? \n\r\t=-]*', '', key)
54 | return key.lower().strip()
55 | return key
56 |
--------------------------------------------------------------------------------
/src/data/settings.py:
--------------------------------------------------------------------------------
1 | from typing import List, Set
2 | from dataclasses import dataclass, field
3 | from dataclasses_json import dataclass_json
4 |
5 | from src.data.day import Day
6 | from src.data.degree import Degree
7 | from src.data.output_format import OutputFormat
8 | from src.data.semester import Semester
9 | from src.data.language import Language
10 | from src import utils
11 |
12 |
13 | @dataclass_json
14 | @dataclass
15 | class Settings:
16 | attendance_required_all_courses: bool = True
17 | campus_name: str = ""
18 | year: int = utils.get_current_hebrew_year()
19 | semester: Semester = utils.get_current_semester()
20 | _degree: str = "COMPUTER_SCIENCE"
21 | _degrees: List[str] = field(default_factory=lambda: [degree.name for degree in Degree.get_defaults()])
22 | show_hertzog_and_yeshiva: bool = False
23 | show_only_courses_with_free_places: bool = False
24 | show_only_courses_active_classes: bool = True
25 | show_only_courses_with_the_same_actual_number: bool = True
26 | dont_show_courses_already_done: bool = True
27 | show_only_classes_in_days: List[Day] = field(default_factory=lambda: list(Day))
28 | output_formats: List[OutputFormat] = field(default_factory=lambda: [OutputFormat.IMAGE])
29 | show_only_classes_can_enroll: bool = True
30 | show_only_courses_with_prerequisite_done: bool = False
31 | language: Language = Language.get_current()
32 | force_update_data: bool = True
33 | show_english_speaker_courses: bool = False
34 |
35 | @property
36 | def degrees(self) -> Set[Degree]:
37 | return {Degree[degree] for degree in self._degrees}
38 |
39 | @degrees.setter
40 | def degrees(self, degrees: Set[Degree]):
41 | self._degrees = [degree.name for degree in degrees]
42 |
43 | @property
44 | def degree(self) -> Degree:
45 | return Degree[self._degree]
46 |
47 | @degree.setter
48 | def degree(self, degree: Degree):
49 | self._degree = degree.name
50 |
--------------------------------------------------------------------------------
/.github/workflows/generate_constraint_json.yaml:
--------------------------------------------------------------------------------
1 | name: Generate constraint json data
2 |
3 | on:
4 | push:
5 | branches:
6 | - main
7 | workflow_dispatch:
8 |
9 | jobs:
10 | UpdateNewGenerated:
11 | runs-on: ubuntu-latest
12 | steps:
13 | - uses: actions/checkout@v4.1.1
14 | - name: Set up Python 3.13
15 | uses: actions/setup-python@v5.0.0
16 | with:
17 | python-version: "3.13"
18 | - name: Cache pip packages
19 | uses: actions/cache@v4
20 | with:
21 | path: ~/.cache/pip
22 | key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
23 | restore-keys: |
24 | ${{ runner.os }}-pip-
25 | - name: Install dependencies
26 | run: |
27 | python -m pip install --upgrade pip
28 | pip install -r requirements.txt
29 | pip install -r config/development_requirements.txt
30 | - name: Clear and create log file
31 | run: |
32 | rm -rf log.txt
33 | touch log.txt
34 | - name: Extract secrets
35 | shell: bash
36 | run: |
37 | mkdir -p src/database
38 | rm -rf src/database/user_data.txt
39 | touch src/database/user_data.txt
40 | echo ${{ secrets.USERNAME_LEVNET }} >> src/database/user_data.txt
41 | echo ${{ secrets.PASSWORD_LEVNET }} >> src/database/user_data.txt
42 | - name: Run python update script flow
43 | run: |
44 | python main.py --flow update_generated_json_data
45 | - name: Commit changes if there are
46 | id: commit_changes
47 | run: |
48 | git config --local user.email "itamar1111111@gmail.com"
49 | git config --local user.name "Itamar Shalev"
50 | find src/algorithms/generated_data -type f | grep -v "personal" | xargs git add --force
51 | if git diff --quiet && git diff --staged --quiet; then
52 | echo "No changes to commit."
53 | echo "should_push=false" >> $GITHUB_OUTPUT
54 | else
55 | git commit -sm "[AUTO] algorithms: Generate constraint courses json data."
56 | echo "Changes committed."
57 | echo "should_push=true" >> $GITHUB_OUTPUT
58 | fi
59 | - name: Push to main
60 | if: ${{ steps.commit_changes.outputs.should_push == 'true' }}
61 | uses: CasperWA/push-protected@v2
62 | with:
63 | token: ${{ secrets.SPEICAL_TOKEN }}
64 | branch: main
65 | force: True
66 |
--------------------------------------------------------------------------------
/main.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # PYTHON_ARGCOMPLETE_OK
3 |
4 | import logging
5 | import pathlib
6 | from argparse import ArgumentParser
7 |
8 | import argcomplete
9 |
10 | from src import utils
11 | from src.collector.db import Database
12 | from src.controller.controller import Controller
13 | from src.algorithms.constraint_courses import ConstraintCourses
14 | from src.data.language import Language
15 | from src.data.user import User
16 | from src.data.flow import Flow
17 | from src.data.translation import _
18 |
19 |
20 | def get_args():
21 | parser = ArgumentParser()
22 | parser.add_argument("-f", "--flow", help="Run the program with flow, can be gui or console.",
23 | default=Flow.CONSOLE, choices=list(Flow), type=Flow.from_str)
24 | parser.add_argument("-u", "--username", help="The username user in the server", default=None, type=str)
25 | parser.add_argument("-p", "--password", help="The password user in the server", default=None, type=str)
26 | parser.add_argument("-l", "--language", help="Set the language of the program", choices=list(Language),
27 | type=Language.from_str, default=Language.get_default())
28 | parser.add_argument("-v", "--verbose", help="Print more debug logs", default=False, action="store_true")
29 | parser.add_argument("--database_path", default="", type=pathlib.Path,
30 | help="Path to database file (.db) Update database by given .db file, "
31 | "that can be downloaded from the server (currently the github)")
32 | argcomplete.autocomplete(parser)
33 | return parser.parse_args()
34 |
35 |
36 | def main():
37 | utils.init_project()
38 | args = get_args()
39 | utils.config_logging_level(logging.DEBUG if args.verbose else logging.ERROR)
40 | Language.set_current(args.language)
41 | database = Database()
42 |
43 | if args.username and args.password:
44 | database.save_user_data(User(args.username, args.password))
45 |
46 | if args.flow is Flow.CONSOLE:
47 | Controller(verbose=args.verbose).run_console_flow()
48 |
49 | elif args.flow is Flow.UPDATE_DATABASE:
50 | message = _("Database path is not a file or doesn't exists, the path given is: ")
51 | assert args.database_path.is_file(), message + str(args.database_path)
52 | database.update_database(args.database_path)
53 |
54 | elif args.flow is Flow.UPDATE_GENERATED_JSON_DATA:
55 | ConstraintCourses().export_generated_json_data()
56 |
57 |
58 | if __name__ == '__main__':
59 | main()
60 |
--------------------------------------------------------------------------------
/src/data/schedule.py:
--------------------------------------------------------------------------------
1 | from typing import List, Set
2 | import time
3 | from src.data.activity import Activity
4 | from src.data.day import Day
5 | from src.data.meeting import Meeting
6 |
7 |
8 | class Schedule:
9 |
10 | def __init__(self, name: str, file_name: str, description: str, activities: List[Activity]):
11 | self.name = name
12 | self.file_name = file_name
13 | self.description = description
14 | self.activities = activities
15 |
16 | def __str__(self):
17 | return f"{self.name}"
18 |
19 | def __repr__(self):
20 | return str(self)
21 |
22 | def __eq__(self, other):
23 | is_equals = len(self.activities) == len(other.activities)
24 | return is_equals and all(activity in other.activities for activity in self.activities)
25 |
26 | def __hash__(self):
27 | return hash(self.file_name)
28 |
29 | def __contains__(self, activity):
30 | return activity in self.activities
31 |
32 | def contains(self, activities):
33 | return all(activity in self for activity in activities)
34 |
35 | def get_learning_days(self) -> Set[Day]:
36 | return {meeting.day for activity in self.activities for meeting in activity.meetings}
37 |
38 | def get_all_academic_meetings(self) -> List[Meeting]:
39 | return [meeting for activity in self.activities for meeting in activity.meetings
40 | if not activity.type.is_personal()]
41 |
42 | def get_standby_in_minutes(self) -> float:
43 | """
44 | Get standby hours for all academic activities in schedule in minutes.
45 | """
46 | result = 0
47 | meetings = self.get_all_academic_meetings()
48 | meetings.sort()
49 |
50 | def to_minutes(struct_time: time.struct_time):
51 | return struct_time.tm_hour * 60 + struct_time.tm_min
52 |
53 | for i in range(len(meetings) - 1):
54 | if meetings[i].day != meetings[i + 1].day:
55 | continue
56 | delta_time = to_minutes(meetings[i + 1].start_time) - to_minutes(meetings[i].end_time)
57 | # Don't calculate standby minutes for the break between classes
58 | if delta_time > 15:
59 | result += delta_time
60 | return result
61 |
62 | def get_all_meetings_by_day(self, day: Day) -> Set[Meeting]:
63 | return {meeting for meeting in self.get_all_academic_meetings() if meeting.day is day}
64 |
65 | def __copy__(self):
66 | return Schedule(self.name, self.file_name, self.description, self.activities.copy())
67 |
--------------------------------------------------------------------------------
/src/data/meeting.py:
--------------------------------------------------------------------------------
1 | from typing import Union
2 | from time import struct_time, strptime
3 | import time
4 | import functools
5 | from src.data.day import Day
6 |
7 |
8 | @functools.total_ordering
9 | class Meeting:
10 |
11 | def __init__(self, day: Union[Day, int], start_time: Union[struct_time, str], end_time: Union[struct_time, str]):
12 | if isinstance(day, int):
13 | day = Day(day)
14 | self.day = day
15 | if isinstance(start_time, str):
16 | self.start_time = Meeting.str_to_time(start_time)
17 | else:
18 | self.start_time = start_time
19 |
20 | if isinstance(end_time, str):
21 | self.end_time = Meeting.str_to_time(end_time)
22 | else:
23 | self.end_time = end_time
24 |
25 | if self.start_time >= self.end_time:
26 | raise RuntimeError("Start time is after end time")
27 |
28 | def __str__(self):
29 | return f"{self.get_string_start_time()} - {self.get_string_end_time()}"
30 |
31 | def __repr__(self):
32 | return str(self)
33 |
34 | def is_crash_with_meeting(self, meeting):
35 | if self.day != meeting.day:
36 | return False
37 | meeting_crash = self.start_time <= meeting.start_time < self.end_time
38 | other_meeting_crash = meeting.start_time <= self.start_time < meeting.end_time
39 | return meeting_crash or other_meeting_crash
40 |
41 | def is_crash_with_meetings(self, meetings):
42 | if not meetings:
43 | return False
44 | return any(self.is_crash_with_meeting(meeting) for meeting in meetings)
45 |
46 | def get_string_start_time(self):
47 | return time.strftime("%H:%M", self.start_time)
48 |
49 | def get_string_end_time(self):
50 | return time.strftime("%H:%M", self.end_time)
51 |
52 | def __eq__(self, other):
53 | is_equals = self.day == other.day
54 | is_equals = is_equals and self.start_time == other.start_time
55 | is_equals = is_equals and self.end_time == other.end_time
56 | return is_equals
57 |
58 | def __lt__(self, other):
59 | if self.day < other.day:
60 | return True
61 | return self.day == other.day and self.start_time < other.start_time and self.end_time < other.end_time
62 |
63 | def __hash__(self):
64 | return hash((self.day, self.start_time, self.end_time))
65 |
66 | def __iter__(self):
67 | return iter((self.day.value, self.get_string_start_time(), self.get_string_end_time()))
68 |
69 | @staticmethod
70 | def str_to_time(time_str):
71 | """
72 | :param time_str: time in format "HH:MM" for example: "13:00"
73 | :return: struct_time
74 | """
75 | return strptime(time_str, "%H:%M")
76 |
--------------------------------------------------------------------------------
/src/data/language.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 | from enum import Enum, auto
3 | from typing import Union
4 |
5 |
6 | class Language(Enum):
7 | _ignore_ = ['__current_language']
8 | ENGLISH = auto()
9 | HEBREW = auto()
10 |
11 | __current_language: Language = None
12 |
13 | def short_name(self) -> str:
14 | """Return the short name of the language, e.g., ENGLISH -> 'en'."""
15 | return self.name[:2].lower()
16 |
17 | def __contains__(self, item):
18 | return Language.contains(item)
19 |
20 | @classmethod
21 | def contains(cls, key: str) -> bool:
22 | """Check if the given key (string) is a valid Language enum member."""
23 | return any(key.upper() == item.name for item in cls)
24 |
25 | @classmethod
26 | def from_str(cls, name: Union[int, str]) -> Language:
27 | """
28 | Convert a string or an integer to a Language enum.
29 |
30 | - If given a 2-letter short name, return the corresponding Language.
31 | - If given a digit, return the language at that position (1-based index).
32 | - Otherwise, fallback to standard Enum lookup.
33 | """
34 |
35 | if isinstance(name, int):
36 | try:
37 | return list(cls)[name - 1] # 1-based index
38 | except IndexError:
39 | raise ValueError(f"Invalid index: {name}. Must be between 1 and {len(cls)}.") from None
40 |
41 | if isinstance(name, str):
42 | name = name.strip()
43 | if len(name) == 2:
44 | for language in cls:
45 | if language.short_name() == name.lower():
46 | return language
47 | raise ValueError(f"No matching language for short name: {name}") from None
48 |
49 | if name.isdigit():
50 | return cls.from_str(int(name))
51 |
52 | try:
53 | return cls[name.upper()]
54 | except KeyError:
55 | raise ValueError(f"Invalid language name: {name}") from None
56 |
57 | raise TypeError(f"Invalid type: {type(name)}. Expected int or str.")
58 |
59 | @classmethod
60 | def get_default(cls) -> Language:
61 | """Return the default language."""
62 | return Language.HEBREW
63 |
64 | @classmethod
65 | def get_current(cls) -> Language:
66 | """Return the currently set language."""
67 | return cls.__current_language or cls.get_default()
68 |
69 | @classmethod
70 | def set_current(cls, language: Language) -> None:
71 | """Set the current language."""
72 | if not isinstance(language, cls):
73 | raise TypeError(f"Expected instance of {cls}, got {type(language)}")
74 | cls.__current_language = language
75 |
76 | def __str__(self):
77 | return self.name.lower()
78 |
79 | def __repr__(self):
80 | return str(self)
81 |
--------------------------------------------------------------------------------
/scripts/release.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # PYTHON_ARGCOMPLETE_OK
3 |
4 |
5 | import argparse
6 | import logging
7 | import os
8 | import platform
9 | import subprocess
10 | from enum import Enum
11 | from typing import Optional
12 |
13 | import argcomplete
14 | from src import utils
15 | from src.collector.db import Database
16 |
17 |
18 | class OS(Enum):
19 | WINDOWS = ".exe"
20 | UBUNTU = ""
21 | MAC = ".dmg"
22 |
23 | def __str__(self):
24 | return self.name.capitalize()
25 |
26 |
27 | def get_args():
28 | parser = argparse.ArgumentParser()
29 | parser.add_argument("-b", "--build", help="Create executable file", default=False, action="store_true")
30 | parser.add_argument("-t", "--title", help="Print the title of this build", default=False, action="store_true")
31 | parser.add_argument("-p", "--path", help="Print the executable path result", default=False, action="store_true")
32 | argcomplete.autocomplete(parser)
33 | arguments = parser.parse_args()
34 | return arguments
35 |
36 |
37 | def get_os_type() -> Optional[OS]:
38 | system_type = platform.system()
39 | if system_type == "Windows":
40 | return OS.WINDOWS
41 |
42 | if system_type == "Linux":
43 | return OS.UBUNTU
44 |
45 | if system_type == "Darwin":
46 | return OS.MAC
47 |
48 | return None
49 |
50 |
51 | def build(os_build_type: OS):
52 | database_file_path = Database().shared_database_path
53 | main_path = utils.ROOT_PATH / "main.py"
54 | separator = ';'
55 | if os_build_type in [OS.UBUNTU, OS.MAC]:
56 | separator = ':'
57 |
58 | print(f"Building executable file for {os_build_type} OS")
59 | print(f"Main script path: {main_path}")
60 |
61 | pyinstaller_cmd = f"pyinstaller --onefile " \
62 | f"--add-binary {database_file_path}{separator}database " \
63 | f"--name SemesterOrganizer{os_build_type.value} main.py"
64 |
65 | print("Running pyinstaller command: ", pyinstaller_cmd)
66 | return_code = subprocess.call(pyinstaller_cmd.split(" "))
67 | assert return_code == 0, "Pyinstaller command failed"
68 |
69 | print("Build finished successfully")
70 |
71 |
72 | def main():
73 | utils.init_project()
74 | utils.config_logging_level(logging.DEBUG)
75 | os_type = get_os_type()
76 | args = get_args()
77 | args_count = sum([args.build, args.title, args.path])
78 | error_message = "Need exactly one argument from the following: build, title, path."
79 |
80 | assert args_count == 1, error_message
81 | assert os_type, f"{os.name} OS is not supported."
82 |
83 | if args.title:
84 | print(f"Executable File for {os_type} OS")
85 |
86 | elif args.path:
87 | file_path = os.path.join("dist", f"SemesterOrganizer{os_type.value}")
88 | print(file_path)
89 |
90 | elif args.build:
91 | build(os_type)
92 |
93 |
94 | if __name__ == "__main__":
95 | main()
96 |
--------------------------------------------------------------------------------
/tests/convertor/test_convertor.py:
--------------------------------------------------------------------------------
1 | import os
2 | import shutil
3 |
4 | import pytest
5 | from src import utils
6 | from src.convertor.convertor import Convertor
7 | from src.data.academic_activity import AcademicActivity
8 | from src.data.activity import Activity
9 | from src.data.day import Day
10 | from src.data.language import Language
11 | from src.data.meeting import Meeting
12 | from src.data.output_format import OutputFormat
13 | from src.data.schedule import Schedule
14 | from src.data.type import Type
15 |
16 |
17 | class TestConvertor:
18 |
19 | @staticmethod
20 | def _create_schedule(file_name: str):
21 | activity = AcademicActivity("שם", Type.LECTURE, True, "שם המרצה", 1, 100, "מיקום")
22 | activity.add_slot(Meeting(Day.MONDAY, Meeting.str_to_time("10:00"), Meeting.str_to_time("12:00")))
23 | return Schedule("שם", file_name, "", [activity])
24 |
25 | @pytest.mark.parametrize("file_type, use_multiprocessing",
26 | [(file_type, use_multiprocessing)
27 | for file_type in OutputFormat for use_multiprocessing in [True, False]])
28 | def test_convert_type(self, file_type: OutputFormat, use_multiprocessing: bool):
29 | Language.set_current(Language.HEBREW)
30 |
31 | convertor = Convertor()
32 | path = utils.get_results_test_path()
33 | extension = file_type.value
34 | schedules = []
35 | shutil.rmtree(path, ignore_errors=True)
36 |
37 | for i in range(1, 5):
38 | schedules.append(TestConvertor._create_schedule(f"option_{i}"))
39 |
40 | activity = Activity("שם", Type.PERSONAL, True)
41 | activity.add_slot(Meeting(Day.FRIDAY, "10:00", "12:00"))
42 | schedules.append(Schedule("שם", f"option_{5}", "", [activity]))
43 | os.environ["multiprocessing"] = str(use_multiprocessing)
44 | # Just for coverage
45 | convertor.convert_activities([], path, [file_type])
46 | convertor.convert_activities(schedules, path, [file_type])
47 | for i in range(1, 5):
48 | file_name = f"option_{i}.{extension}"
49 | file_path = path / file_name
50 | assert file_path.is_file(), f"{file_name} is not exist"
51 | assert file_path.stat().st_size > 0, f"{file_name} is empty"
52 |
53 | def test_convert_all_types(self):
54 | convertor = Convertor()
55 | path = utils.get_results_test_path()
56 | schedules = []
57 | shutil.rmtree(path, ignore_errors=True)
58 |
59 | for i in range(1, 10):
60 | schedules.append(TestConvertor._create_schedule(f"option_{i}"))
61 | activity = Activity("שם", Type.PERSONAL, True)
62 | activity.add_slot(Meeting(Day.FRIDAY, "10:00", "12:00"))
63 | schedules.append(Schedule("שם", f"option_{10}", "", [activity]))
64 |
65 | convertor.convert_activities(schedules, path, list(OutputFormat))
66 |
67 | for file_type in OutputFormat:
68 | extension = file_type.value
69 | folder_type_path = path / file_type.name.lower()
70 | for i in range(1, 11):
71 | file_name = f"option_{i}.{extension}"
72 | file_path = folder_type_path / file_name
73 | assert file_path.is_file(), f"{file_name} is not exist"
74 | assert file_path.stat().st_size > 0, f"{file_name} is empty"
75 |
--------------------------------------------------------------------------------
/.github/workflows/cycles.yaml:
--------------------------------------------------------------------------------
1 | name: Code checks
2 |
3 | on:
4 | push:
5 | pull_request:
6 |
7 | jobs:
8 |
9 | Build:
10 | runs-on: ubuntu-latest
11 | steps:
12 | - uses: actions/checkout@v4.1.1
13 | - name: Set up Python 3.13
14 | uses: actions/setup-python@v5.0.0
15 | with:
16 | python-version: "3.13"
17 | - name: Cache pip packages
18 | uses: actions/cache@v4
19 | with:
20 | path: ~/.cache/pip
21 | key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
22 | restore-keys: |
23 | ${{ runner.os }}-pip-
24 | - name: Install dependencies
25 | run: |
26 | python -m pip install --upgrade pip
27 | pip install -r requirements.txt
28 | pip install -r config/development_requirements.txt
29 | - name: Analysing the code with pylint
30 | run: |
31 | pylint --rcfile=config/.pylintrc $(git ls-files '*.py')
32 |
33 | Lint:
34 | runs-on: ubuntu-latest
35 | steps:
36 | - uses: actions/checkout@v4.1.1
37 | with:
38 | submodules: 'true'
39 | token: ${{ secrets.SPEICAL_TOKEN }}
40 | - name: Set up Python 3.13
41 | uses: actions/setup-python@v5.0.0
42 | with:
43 | python-version: "3.13"
44 | - name: Cache pip packages
45 | uses: actions/cache@v4
46 | with:
47 | path: ~/.cache/pip
48 | key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
49 | restore-keys: |
50 | ${{ runner.os }}-pip-
51 | - name: Install dependencies
52 | run: |
53 | python -m pip install --upgrade pip
54 | pip install -r requirements.txt
55 | pip install -r config/development_requirements.txt
56 | - name: Analysing the code with pycodestyle
57 | run: |
58 | pycodestyle --config config/setup.cfg $(git ls-files '*.py')
59 |
60 | Test:
61 | runs-on: ubuntu-latest
62 | steps:
63 | - uses: actions/checkout@v4.1.1
64 | with:
65 | submodules: 'true'
66 | token: ${{ secrets.SPEICAL_TOKEN }}
67 | - name: Set up Python 3.13
68 | uses: actions/setup-python@v5.0.0
69 | with:
70 | python-version: "3.13"
71 | - name: Cache pip packages
72 | uses: actions/cache@v4
73 | with:
74 | path: ~/.cache/pip
75 | key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
76 | restore-keys: |
77 | ${{ runner.os }}-pip-
78 | - name: Install dependencies
79 | run: |
80 | python -m pip install --upgrade pip
81 | pip install -r requirements.txt
82 | pip install -r config/development_requirements.txt
83 | - name: Extract secrets
84 | run: |
85 | mkdir -p src/database
86 | rm -rf src/database/user_data.txt
87 | touch src/database/user_data.txt
88 | echo ${{ secrets.USERNAME_LEVNET }} >> src/database/user_data.txt
89 | echo ${{ secrets.PASSWORD_LEVNET }} >> src/database/user_data.txt
90 | - name: Run pytest
91 | run: |
92 | export JUPYTER_PLATFORM_DIRS=1
93 | jupyter --paths
94 | coverage run --rcfile="config/.coveragerc_ci_cd" -m pytest $(git ls-files 'test_*.py') -c config/pytest.ini -s -v -m "not network"
95 | - name: Run coverage
96 | run: |
97 | coverage report --rcfile="config/.coveragerc_ci_cd" -m --fail-under=100
98 |
--------------------------------------------------------------------------------
/src/data/course.py:
--------------------------------------------------------------------------------
1 | from typing import Union, Set
2 |
3 | from src.data.degree import Degree
4 | from src.data.semester import Semester
5 | from src.data.type import Type
6 |
7 |
8 | class Course:
9 |
10 | def __init__(self, name: str, course_number: int, parent_course_number: int,
11 | semesters: Union[Semester, Set[Semester], None] = None,
12 | degrees: Union[Degree, Set[Degree], None] = None,
13 | mandatory_degrees: Union[Degree, Set[Degree], None] = None,
14 | is_active: bool = False, credits_count: float = 0):
15 | self.name = name
16 | self.course_number = course_number
17 | self.parent_course_number = parent_course_number
18 | self.attendance_required_for_lecture = True
19 | self.attendance_required_for_practice = True
20 | if isinstance(semesters, Semester):
21 | semesters = {semesters}
22 | self.semesters = semesters or set()
23 |
24 | if isinstance(degrees, Degree):
25 | degrees = {degrees}
26 |
27 | if isinstance(mandatory_degrees, Degree):
28 | mandatory_degrees = {mandatory_degrees}
29 |
30 | self.degrees = degrees or set()
31 | self.mandatory_degrees = mandatory_degrees or set()
32 | self.is_active = is_active
33 | self.credits_count = credits_count
34 |
35 | def add_semesters(self, semesters: Union[Semester, Set[Semester]]):
36 | if isinstance(semesters, Semester):
37 | semesters = {semesters}
38 | self.semesters.update(semesters)
39 |
40 | def add_degrees(self, degrees: Union[Degree, Set[Degree]]):
41 | if isinstance(degrees, Degree):
42 | degrees = {degrees}
43 | self.degrees.update(degrees)
44 |
45 | def add_mandatory(self, degrees: Union[Degree, Set[Degree]]):
46 | if isinstance(degrees, Degree):
47 | degrees = {degrees}
48 | self.mandatory_degrees.update(degrees)
49 |
50 | @property
51 | def optional_degrees(self) -> Set[Degree]:
52 | return self.degrees - self.mandatory_degrees
53 |
54 | def __eq__(self, other):
55 | is_equals = self.name == other.name
56 | is_equals = is_equals and self.course_number == other.course_number
57 | is_equals = is_equals and self.parent_course_number == other.parent_course_number
58 | return is_equals
59 |
60 | def __hash__(self):
61 | return hash((self.name, self.course_number, self.parent_course_number))
62 |
63 | def set_attendance_required(self, course_type: Type, required: bool):
64 | if course_type.is_lecture():
65 | self.attendance_required_for_lecture = required
66 | elif course_type.is_exercise():
67 | self.attendance_required_for_practice = required
68 |
69 | def __lt__(self, other):
70 | return self.name < other.name
71 |
72 | def __str__(self):
73 | return self.name
74 |
75 | def __repr__(self):
76 | return str(self)
77 |
78 | def __iter__(self):
79 | return iter((self.name, self.course_number, self.parent_course_number))
80 |
81 | def is_attendance_required(self, course_type: Type):
82 | attendance_required = True
83 |
84 | if course_type.is_lecture():
85 | attendance_required = self.attendance_required_for_lecture
86 |
87 | elif course_type.is_exercise():
88 | attendance_required = self.attendance_required_for_practice
89 |
90 | return attendance_required
91 |
--------------------------------------------------------------------------------
/src/data/activity.py:
--------------------------------------------------------------------------------
1 | from collections import defaultdict
2 | from typing import Dict, List
3 | from itertools import count
4 |
5 | from src.data.type import Type
6 |
7 |
8 | class Activity:
9 | _ids = count(0)
10 |
11 | def __init__(self, name: str = None, activity_type: Type = None, attendance_required: bool = None):
12 | self.activity_id = next(self._ids)
13 | self.name = name
14 | self.type = activity_type or Type.PERSONAL
15 | self.attendance_required = attendance_required if attendance_required is not None else True
16 | self.meetings = []
17 |
18 | @staticmethod
19 | def create_personal_from_database(activity_id: int, name: str):
20 | activity = Activity()
21 | activity.activity_id = activity_id
22 | activity.name = name
23 | activity.type = Type.PERSONAL
24 | activity.attendance_required = True
25 | return activity
26 |
27 | def add_slot(self, meeting):
28 | if meeting.is_crash_with_meetings(self.meetings):
29 | raise RuntimeError("Meeting is crash with other meeting")
30 | self.meetings.append(meeting)
31 |
32 | def is_free_slot(self, meeting):
33 | return not meeting.is_crash_with_meetings(self.meetings)
34 |
35 | def add_slots(self, meetings):
36 | for meeting in meetings:
37 | self.add_slot(meeting)
38 |
39 | def is_crash_with_activities(self, activities):
40 | if not activities:
41 | return False
42 | return any(self.is_crash_with_activity(activity) for activity in activities)
43 |
44 | def is_crash_with_activity(self, activity):
45 | if not self.attendance_required or not activity.attendance_required:
46 | return False
47 | return any(meeting.is_crash_with_meetings(activity.meetings) for meeting in self.meetings)
48 |
49 | def no_meetings(self):
50 | return not self.meetings
51 |
52 | def __hash__(self):
53 | return hash(self.name)
54 |
55 | @staticmethod
56 | def get_activities_by_name(activities) -> Dict[str, List]:
57 | result = defaultdict(list)
58 | for activity in activities:
59 | result[activity.name].append(activity)
60 | return dict(result)
61 |
62 | @staticmethod
63 | def extract_flat_activities_by_type(activities: List["Activity"]) -> List[List["Activity"]]:
64 | result = {activity_type: [] for activity_type in Type}
65 | for activity in activities:
66 | result[activity.type].append(activity)
67 | return [item for item in result.values() if item]
68 |
69 | @staticmethod
70 | def extract_all_options_of_activity(activities_list: List[List["Activity"]]) -> List[List["Activity"]]:
71 | if not activities_list:
72 | return [[]]
73 | all_options = []
74 | options = Activity.extract_all_options_of_activity(activities_list[1:])
75 | for activity in activities_list[0]:
76 | for option in options:
77 | all_options.append([activity] + option)
78 | return all_options
79 |
80 | def __eq__(self, other):
81 | is_equals = self.name == other.name and self.type == other.type
82 | is_equals = is_equals and self.attendance_required == other.attendance_required
83 | is_equals = is_equals and len(self.meetings) == len(other.meetings)
84 | is_equals = is_equals and all(meeting in other.meetings for meeting in self.meetings)
85 | return is_equals
86 |
87 | def __str__(self):
88 | return self.name
89 |
90 | def __repr__(self):
91 | return str(self)
92 |
--------------------------------------------------------------------------------
/src/app/templates/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | Semester Organizer
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 | בחירת קורסים
21 |
22 |
23 |
24 |
25 |
28 |
29 | {% for degree in degrees %}
30 |
31 | {{ degree }}
32 |
33 | {% endfor %}
34 |
35 |
36 |
37 |
38 |
39 |
42 |
45 |
46 |
47 |
48 |
49 |
50 |
53 |
54 |
55 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
68 |
69 |
70 |
71 |
72 |
75 |
76 |
77 |
78 |
79 |
80 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Custom
2 | personal_*
3 | user_data*
4 | src/database/
5 | log.txt
6 | results/
7 | dev.py
8 | src/algorithms/generated_data/*
9 | .env
10 |
11 | .vscode
12 |
13 | # Byte-compiled / optimized / DLL files
14 | __pycache__/
15 | *.py[cod]
16 | *$py.class
17 |
18 | # C extensions
19 | *.so
20 |
21 | # Distribution / packaging
22 | .Python
23 | build/
24 | develop-eggs/
25 | dist/
26 | downloads/
27 | eggs/
28 | .eggs/
29 | lib/
30 | lib64/
31 | parts/
32 | sdist/
33 | var/
34 | wheels/
35 | share/python-wheels/
36 | *.egg-info/
37 | .installed.cfg
38 | *.egg
39 | MANIFEST
40 |
41 | # PyInstaller
42 | # Usually these files are written by a python script from a template
43 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
44 | *.manifest
45 | *.spec
46 |
47 | # Installer logs
48 | pip-log.txt
49 | pip-delete-this-directory.txt
50 |
51 | # Unit test / coverage reports
52 | htmlcov/
53 | .tox/
54 | .nox/
55 | .coverage
56 | .coverage.*
57 | .cache
58 | nosetests.xml
59 | coverage.xml
60 | *.cover
61 | *.py,cover
62 | .hypothesis/
63 | .pytest_cache/
64 | cover/
65 |
66 | # Translations
67 | *.mo
68 | *.pot
69 |
70 | # Django stuff:
71 | *.log
72 | local_settings.py
73 | db.sqlite3
74 | db.sqlite3-journal
75 |
76 | # Flask stuff:
77 | instance/
78 | .webassets-cache
79 |
80 | # Scrapy stuff:
81 | .scrapy
82 |
83 | # Sphinx documentation
84 | docs/_build/
85 |
86 | # PyBuilder
87 | .pybuilder/
88 | target/
89 |
90 | # Jupyter Notebook
91 | .ipynb_checkpoints
92 |
93 | # IPython
94 | profile_default/
95 | ipython_config.py
96 |
97 | # pyenv
98 | # For a library or package, you might want to ignore these files since the code is
99 | # intended to run in multiple environments; otherwise, check them in:
100 | .python-version
101 |
102 | # pipenv
103 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
104 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
105 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
106 | # install all needed dependencies.
107 | Pipfile.lock
108 |
109 | # poetry
110 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
111 | # This is especially recommended for binary packages to ensure reproducibility, and is more
112 | # commonly ignored for libraries.
113 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
114 | poetry.lock
115 |
116 | # pdm
117 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
118 | #pdm.lock
119 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
120 | # in version control.
121 | # https://pdm.fming.dev/#use-with-ide
122 | .pdm.toml
123 |
124 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
125 | __pypackages__/
126 |
127 | # Celery stuff
128 | celerybeat-schedule
129 | celerybeat.pid
130 |
131 | # SageMath parsed files
132 | *.sage.py
133 |
134 | # Environments
135 | .env
136 | .venv
137 | env/
138 | venv/
139 | ENV/
140 | env.bak/
141 | venv.bak/
142 |
143 | # mkdocs documentation
144 | /site
145 |
146 | # mypy
147 | .mypy_cache/
148 | .dmypy.json
149 | dmypy.json
150 |
151 | # Pyre type checker
152 | .pyre/
153 |
154 | # pytype static type analyzer
155 | .pytype/
156 |
157 | # Cython debug symbols
158 | cython_debug/
159 |
160 | # PyCharm
161 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
162 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
163 | # and can be added to the global gitignore or merged into this file. For a more nuclear
164 | # option (not recommended) you can uncomment the following to ignore the entire idea folder.
165 | .idea/
166 |
--------------------------------------------------------------------------------
/tests/controller/test_controller.py:
--------------------------------------------------------------------------------
1 | import shutil
2 | from contextlib import suppress
3 | from typing import Dict, Set
4 | from unittest.mock import MagicMock, patch
5 | import pytest
6 | from pytest import fixture
7 |
8 | from src import utils
9 | from src.collector.db import Database
10 | from src.collector.network import Network
11 | from src.controller.controller import Controller
12 | from src.convertor.convertor import Convertor
13 | from src.data.course_choice import CourseChoice
14 | from src.data.language import Language
15 | from src.data.settings import Settings
16 |
17 |
18 | @patch('src.utils.get_results_path', return_value=utils.get_results_test_path())
19 | @patch('time.sleep', return_value=None)
20 | class TestController:
21 |
22 | @pytest.mark.parametrize("language", list(Language))
23 | def test_flow_console(self, _time_sleep_mock, results_path_mock, controller_mock, language):
24 | Language.set_current(language)
25 | controller_mock.database.save_language(language)
26 | inputs = []
27 | # show settings menu
28 | inputs.append("1")
29 | # don't change settings
30 | inputs.append("2")
31 | # don't add courses already done
32 | inputs.append("2")
33 | # don't show only courses can enroll in since tests always run with the same user details
34 | # inputs.append("2")
35 | # choose courses indexes
36 | inputs.append("1")
37 | # don't select lectures
38 | inputs.append("2")
39 |
40 | test_input = iter([str(item) for item in inputs])
41 |
42 | def input_next(*_unused_args):
43 | with suppress(StopIteration):
44 | return next(test_input)
45 |
46 | results = results_path_mock()
47 | shutil.rmtree(results, ignore_errors=True)
48 | with patch('builtins.input', side_effect=input_next) as _input_mock:
49 | controller_mock.run_console_flow()
50 | # Check that the results file was created.
51 | assert results.exists(), f"ERROR: Results files was not created in {results}."
52 | files_count, _dirs = utils.count_files_and_directory(results)
53 | assert files_count >= 1
54 |
55 | @fixture
56 | def convertor_mock(self):
57 | convertor = Convertor()
58 | convertor_mock = MagicMock()
59 | convertor_mock.convert_activities = MagicMock(side_effect=convertor.convert_activities)
60 | return convertor_mock
61 |
62 | @fixture
63 | def database_mock(self):
64 | class DatabaseMock(Database):
65 |
66 | def __init__(self):
67 | super().__init__("test_database")
68 | self.settings = Settings()
69 | self.settings.campus_name = utils.get_campus_name_test()
70 | super().__init__()
71 |
72 | def save_language(self, language: Language):
73 | self.logger.info("save_language was called with %s", language.name)
74 | self.settings.language = language
75 |
76 | def load_language(self):
77 | self.logger.info("load_language was called")
78 | return self.settings.language
79 |
80 | def save_settings(self, _settings: Settings):
81 | self.logger.info("save_settings was called")
82 |
83 | def save_courses_console_choose(self, _course_choices: Dict[str, CourseChoice]):
84 | self.logger.info("save_courses_console_choose was called")
85 |
86 | def load_settings(self):
87 | self.logger.info("load_settings was called")
88 | return self.settings
89 |
90 | return DatabaseMock()
91 |
92 | @fixture
93 | def network_mock(self):
94 | class NetworkMock(Network):
95 | def extract_all_activities_ids_can_enroll_in(self, *_unused_args) -> Dict[str, Set[int]]:
96 | self.logger.info("extract_all_activities_ids_can_enroll_in was called")
97 | return {}
98 |
99 | return NetworkMock()
100 |
101 | @fixture
102 | def controller_mock(self, database_mock, convertor_mock, network_mock):
103 | # pylint: disable=protected-access
104 | controller = Controller(verbose=True)
105 | controller.max_output = 1
106 | controller.convertor = convertor_mock
107 | controller.database = database_mock
108 | controller.network = network_mock
109 | controller._open_results_folder = MagicMock()
110 | return controller
111 |
--------------------------------------------------------------------------------
/scripts/argcomplete_semester_organizer.sh:
--------------------------------------------------------------------------------
1 | # Copyright 2012-2021, Andrey Kislyuk and argcomplete contributors.
2 | # Licensed under the Apache License. See https://github.com/kislyuk/argcomplete for more info.
3 |
4 | # Copy of __expand_tilde_by_ref from bash-completion
5 | __python_argcomplete_expand_tilde_by_ref () {
6 | if [ "${!1:0:1}" = "~" ]; then
7 | if [ "${!1}" != "${!1//\/}" ]; then
8 | eval $1="${!1/%\/*}"/'${!1#*/}';
9 | else
10 | eval $1="${!1}";
11 | fi;
12 | fi
13 | }
14 |
15 | # Run something, muting output or redirecting it to the debug stream
16 | # depending on the value of _ARC_DEBUG.
17 | # If ARGCOMPLETE_USE_TEMPFILES is set, use tempfiles for IPC.
18 | __python_argcomplete_run() {
19 | if [[ -z "${ARGCOMPLETE_USE_TEMPFILES-}" ]]; then
20 | __python_argcomplete_run_inner "$@"
21 | return
22 | fi
23 | local tmpfile="$(mktemp)"
24 | _ARGCOMPLETE_STDOUT_FILENAME="$tmpfile" __python_argcomplete_run_inner "$@"
25 | local code=$?
26 | cat "$tmpfile"
27 | rm "$tmpfile"
28 | return $code
29 | }
30 |
31 | __python_argcomplete_run_inner() {
32 | if [[ -z "${_ARC_DEBUG-}" ]]; then
33 | "$@" 8>&1 9>&2 1>/dev/null 2>&1
34 | else
35 | "$@" 8>&1 9>&2 1>&9 2>&1
36 | fi
37 | }
38 |
39 | # Scan the beginning of an executable file ($1) for a regexp ($2). By default,
40 | # scan for the magic string indicating that the executable supports the
41 | # argcomplete completion protocol. By default, scan the first kilobyte;
42 | # if $3 is set to -n, scan until the first line break up to a kilobyte.
43 | __python_argcomplete_scan_head() {
44 | read -s -r ${3:--N} 1024 < "$1"
45 | [[ "$REPLY" =~ ${2:-PYTHON_ARGCOMPLETE_OK} ]]
46 | }
47 |
48 | __python_argcomplete_scan_head_noerr() {
49 | __python_argcomplete_scan_head "$@" 2>/dev/null
50 | }
51 |
52 | _python_argcomplete_global() {
53 | local executable=$1
54 | __python_argcomplete_expand_tilde_by_ref executable
55 |
56 | local ARGCOMPLETE=0
57 | if [[ "$executable" == python* ]] || [[ "$executable" == pypy* ]]; then
58 | if [[ "${COMP_WORDS[1]}" == -m ]]; then
59 | if __python_argcomplete_run "$executable" -m argcomplete._check_module "${COMP_WORDS[2]}"; then
60 | ARGCOMPLETE=3
61 | else
62 | return
63 | fi
64 | elif [[ -f "${COMP_WORDS[1]}" ]] && __python_argcomplete_scan_head_noerr "${COMP_WORDS[1]}"; then
65 | local ARGCOMPLETE=2
66 | else
67 | return
68 | fi
69 | elif type -P "$executable" >/dev/null 2>&1; then
70 | local SCRIPT_NAME=$(type -P "$executable")
71 | if (type -t pyenv && [[ "$SCRIPT_NAME" = $(pyenv root)/shims/* ]]) >/dev/null 2>&1; then
72 | local SCRIPT_NAME=$(pyenv which "$executable")
73 | fi
74 | if __python_argcomplete_scan_head_noerr "$SCRIPT_NAME"; then
75 | local ARGCOMPLETE=1
76 | elif __python_argcomplete_scan_head_noerr "$SCRIPT_NAME" '^#!(.*)$' -n && [[ "${BASH_REMATCH[1]}" =~ ^.*(python|pypy)[0-9\.]*$ ]]; then
77 | local interpreter="$BASH_REMATCH"
78 | if (__python_argcomplete_scan_head_noerr "$SCRIPT_NAME" "(PBR Generated)|(EASY-INSTALL-(SCRIPT|ENTRY-SCRIPT|DEV-SCRIPT))" \
79 | && "$interpreter" "$(type -P python-argcomplete-check-easy-install-script)" "$SCRIPT_NAME") >/dev/null 2>&1; then
80 | local ARGCOMPLETE=1
81 | elif __python_argcomplete_run "$interpreter" -m argcomplete._check_console_script "$SCRIPT_NAME"; then
82 | local ARGCOMPLETE=1
83 | fi
84 | fi
85 | fi
86 |
87 | if [[ $ARGCOMPLETE != 0 ]]; then
88 | local IFS=$(echo -e '\v')
89 | COMPREPLY=( $(_ARGCOMPLETE_IFS="$IFS" \
90 | COMP_LINE="$COMP_LINE" \
91 | COMP_POINT="$COMP_POINT" \
92 | COMP_TYPE="$COMP_TYPE" \
93 | _ARGCOMPLETE_COMP_WORDBREAKS="$COMP_WORDBREAKS" \
94 | _ARGCOMPLETE=$ARGCOMPLETE \
95 | _ARGCOMPLETE_SUPPRESS_SPACE=1 \
96 | __python_argcomplete_run "$executable" "${COMP_WORDS[@]:1:ARGCOMPLETE-1}") )
97 | if [[ $? != 0 ]]; then
98 | unset COMPREPLY
99 | elif [[ "${COMPREPLY-}" =~ [=/:]$ ]]; then
100 | compopt -o nospace
101 | fi
102 | else
103 | type -t _completion_loader | grep -q 'function' && _completion_loader "$@"
104 | fi
105 | }
106 | complete -o default -o bashdefault -D -F _python_argcomplete_global
107 |
--------------------------------------------------------------------------------
/scripts/update_levnet_data.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # PYTHON_ARGCOMPLETE_OK
3 |
4 |
5 | import argparse
6 | import logging
7 | from datetime import timedelta
8 | from timeit import default_timer as timer
9 |
10 | import argcomplete
11 |
12 | from src import utils
13 | from src.collector.db import Database
14 | from src.collector.network import Network
15 | from src.data.degree import Degree
16 | from src.data.language import Language
17 | from src.data.user import User
18 |
19 |
20 | def get_args():
21 | parser = argparse.ArgumentParser()
22 | parser.add_argument("-c", "--campus", default=None, type=str,
23 | help="Download data to specific campus, default is for all the campuses")
24 | parser.add_argument("-l", "--language", default=None, type=str,
25 | help="Download data to specific language, default is for all the languages")
26 | parser.add_argument("-u", "--username", help="The username user in the server", default=None, type=str)
27 | parser.add_argument("-p", "--password", help="The password user in the server", default=None, type=str)
28 | argcomplete.autocomplete(parser)
29 | return parser.parse_args()
30 |
31 |
32 | def run_update_levnet_data_flow():
33 | start = timer()
34 | network = Network()
35 | database = Database()
36 | logger = utils.get_logging()
37 |
38 | args = get_args()
39 | if args.username and args.password:
40 | database.save_user_data(User(args.username, args.password))
41 |
42 | logger.debug("Start updating the levnet data")
43 | user = database.load_user_data()
44 | assert user, "There is no user data, can't access the levnet website."
45 | logger.debug("User data was loaded successfully")
46 |
47 | network.set_user(user)
48 | assert network.check_connection(), "ERROR: Can't connect to the levnet website"
49 | logger.debug("The username and password are valid")
50 |
51 | database.clear_all_data()
52 | database.init_database_tables()
53 | logger.debug("The database was cleared successfully")
54 |
55 | network.change_language(Language.ENGLISH)
56 | english_campuses = network.extract_campuses()
57 | logger.debug("The english campus were extracted successfully")
58 | logger.debug("The english campus are: %s", ", ".join(english_campuses.values()))
59 |
60 | network.change_language(Language.HEBREW)
61 | hebrew_campuses = network.extract_campuses()
62 | logger.debug("The hebrew campus were extracted successfully")
63 | logger.debug("The hebrew campus are: %s", ", ".join(hebrew_campuses.values()))
64 |
65 | campuses = {key: (english_campuses[key], hebrew_campuses[key]) for key in english_campuses.keys()}
66 |
67 | database.save_campuses(campuses)
68 | database.save_degrees(list(Degree))
69 | languages = [Language[args.language.upper()]] if args.language else list(Language)
70 |
71 | for language in languages:
72 | Language.set_current(language)
73 | network.change_language(language)
74 | logger.debug("The language was changed to %s", language)
75 | all_degrees = set(Degree)
76 | for degree in all_degrees:
77 |
78 | common_campuses_names = database.get_common_campuses_names()
79 | campuses = [args.campus] if args.campus else common_campuses_names
80 |
81 | for campus_name in campuses:
82 |
83 | courses = network.extract_all_courses(campus_name, degree)
84 |
85 | logger.debug("The courses were extracted successfully")
86 | logger.debug("The courses are: %s", ", ".join([course.name for course in courses]))
87 |
88 | database.save_courses(courses, language)
89 |
90 | logger.debug("Extracting data for campus: %s in language %s", campus_name, language.name)
91 | logger.debug("Start extracting the academic activities data for the campus: %s", campus_name)
92 | activities, missings = network.extract_academic_activities_data(campus_name, courses)
93 | if activities and not missings:
94 | logger.debug("The academic activities data were extracted successfully")
95 | else:
96 | logger.debug("The academic activities data were extracted with errors")
97 | logger.debug("The missing courses are: %s", ', '.join(missings))
98 |
99 | database.save_academic_activities(activities, campus_name, language)
100 | end = timer()
101 | logger.debug("The levnet data was updated successfully in %s time", str(timedelta(seconds=end - start)))
102 |
103 |
104 | def main():
105 | Language.set_current(Language.ENGLISH)
106 | utils.init_project()
107 | utils.config_logging_level(logging.DEBUG)
108 | run_update_levnet_data_flow()
109 |
110 |
111 | if __name__ == '__main__':
112 | main()
113 |
--------------------------------------------------------------------------------
/src/data/academic_activity.py:
--------------------------------------------------------------------------------
1 | from typing import List, Union, Dict
2 |
3 | from src.data.activity import Activity
4 | from src.data.course import Course
5 | from src.data.course_choice import CourseChoice
6 | from src.data.type import Type
7 |
8 |
9 | class AcademicActivity(Activity):
10 |
11 | UNLIMITED_CAPACITY = 10000000
12 | DEFAULT_ACTUAL_COURSE_NUMBER = -1
13 |
14 | def __init__(self, name: str = None, activity_type: Union[Type, int] = None, attendance_required: bool = None,
15 | lecturer_name: str = None, course_number: int = None, parent_course_number: int = None,
16 | location: str = None, activity_id: str = None, description: str = None, current_capacity: int = None,
17 | max_capacity: int = None, actual_course_number: int = None):
18 | if isinstance(activity_type, int):
19 | activity_type = Type(activity_type)
20 | super().__init__(name, activity_type, attendance_required)
21 | self.lecturer_name = lecturer_name
22 | self.course_number = course_number
23 | self.parent_course_number = parent_course_number
24 | self.location = location
25 | self.activity_id = activity_id
26 | self.description = description or ""
27 | self.current_capacity = current_capacity or 0
28 | self.max_capacity = max_capacity or AcademicActivity.UNLIMITED_CAPACITY
29 | self.actual_course_number = actual_course_number or AcademicActivity.DEFAULT_ACTUAL_COURSE_NUMBER
30 |
31 | def set_capacity(self, current_capacity: int, max_capacity: int):
32 | """
33 | :param current_capacity: the current number of students registered to the course
34 | :param max_capacity: the maximum number of students allowed to register to the course
35 | :param current_capacity:
36 | """
37 | self.current_capacity = current_capacity
38 | self.max_capacity = max_capacity
39 |
40 | def is_have_free_places(self) -> bool:
41 | return self.current_capacity < self.max_capacity
42 |
43 | def __eq__(self, other):
44 | is_equals = super().__eq__(other)
45 | is_equals = is_equals and self.lecturer_name == other.lecturer_name
46 | is_equals = is_equals and self.course_number == other.course_number
47 | is_equals = is_equals and self.parent_course_number == other.parent_course_number
48 | is_equals = is_equals and self.location == other.location
49 | return is_equals
50 |
51 | def __str__(self):
52 | return self.name
53 |
54 | def __repr__(self):
55 | return str(self)
56 |
57 | def __hash__(self):
58 | return hash((self.name, self.course_number, self.parent_course_number, self.activity_id))
59 |
60 | def same_as_course(self, course: Course):
61 | is_same = self.name == course.name
62 | is_same = is_same and self.course_number == course.course_number
63 | is_same = is_same and self.parent_course_number == course.parent_course_number
64 | return is_same
65 |
66 | @staticmethod
67 | def union_courses(academic_activities, courses: List[Course]):
68 | for activity in academic_activities:
69 | for course in courses:
70 | if activity.same_as_course(course):
71 | activity.attendance_required = course.is_attendance_required(activity.type)
72 | break
73 |
74 | @staticmethod
75 | def create_courses_choices(academic_activities: List["AcademicActivity"]) -> Dict[str, CourseChoice]:
76 | # key = course name, first value list of lectures, second value list of exercises
77 | academic_activities = Activity.get_activities_by_name(academic_activities)
78 | courses_choices = {}
79 |
80 | for name, activities in academic_activities.items():
81 | courses_choices[name] = CourseChoice(name, activities[0].parent_course_number, set(), set())
82 | for activity in activities:
83 | if activity.type.is_lecture():
84 | courses_choices[name].available_teachers_for_lecture.add(activity.lecturer_name)
85 | else:
86 | courses_choices[name].available_teachers_for_practice.add(activity.lecturer_name)
87 |
88 | return courses_choices
89 |
90 | def __iter__(self):
91 | return iter((self.name, self.type.value, self.attendance_required, self.lecturer_name, self.course_number,
92 | self.parent_course_number, self.location, self.activity_id, self.description,
93 | self.current_capacity, self.max_capacity, self.actual_course_number))
94 |
95 | @classmethod
96 | def union_attendance_required(cls, selected_activities: List["AcademicActivity"],
97 | courses_choices: Dict[str, CourseChoice]):
98 | for activity in selected_activities:
99 | course_choice = courses_choices[activity.name]
100 | if activity.type.is_lecture():
101 | activity.attendance_required = course_choice.attendance_required_for_lecture
102 | else:
103 | activity.attendance_required = course_choice.attendance_required_for_practice
104 |
--------------------------------------------------------------------------------
/app.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import os
3 | import shutil
4 | from typing import List
5 |
6 | from flask import Flask, render_template, jsonify, request, send_file, session
7 |
8 | from src import utils
9 | from src.algorithms.csp import CSP, Status
10 | from src.controller.controller import Controller
11 | from src.collector.db import Database
12 | from src.data.degree import Degree
13 | from src.data.course import Course
14 | from src.data.language import Language
15 | from src.data.schedule import Schedule
16 | from src.data.settings import Settings
17 | from src.data.output_format import OutputFormat
18 | from src.data.translation import _
19 |
20 | app_resources = utils.SRC_PATH / 'app'
21 |
22 | app = Flask(
23 | __name__,
24 | template_folder=app_resources / 'templates',
25 | static_folder=app_resources / 'static',
26 | )
27 | app.secret_key = os.urandom(24)
28 | db = Database()
29 | utils.config_logging_level(logging.DEBUG)
30 |
31 |
32 | @app.route("/")
33 | def index():
34 | language = Language.HEBREW
35 | Language.set_current(language)
36 | settings = Settings()
37 | settings.language = language
38 | settings.year = utils.convert_year(settings.year, language)
39 | degrees: List[Degree] = db.load_degrees()
40 | degrees_names: List[str] = [_(degree.name) for degree in degrees]
41 | db.save_settings(settings)
42 | return render_template("index.html", degrees=degrees_names)
43 |
44 |
45 | @app.route("/generate", methods=["POST"])
46 | def generate():
47 | data = request.json
48 | degrees_names = data.get("degrees", [])
49 | campus = data.get("campus", "")
50 | courses_names = data.get("courses", [])
51 | logger = session.get("logger", utils.get_logging())
52 | logger.info(f"Generating schedules for degrees: {degrees_names}, campus: {campus}, courses: {courses_names}")
53 |
54 | if not degrees_names or not campus or not courses_names:
55 | return jsonify({"message": "נא לבחור תואר, קמפוס וקורסים"}), 400
56 |
57 | degrees = {degree for degree in db.load_degrees() if _(degree.name) in degrees_names}
58 | courses = db.load_courses(Language.get_current(), degrees)
59 | courses = [course for course in courses if course.name in courses_names]
60 |
61 | settings = db.load_settings()
62 | settings.output_formats = [OutputFormat.IMAGE]
63 | settings.campus_name = campus
64 | settings.degrees = degrees
65 | db.save_settings(settings)
66 | parent_courses_ids = {course.parent_course_number for course in courses}
67 | language = settings.language
68 | activities = db.load_activities_by_parent_courses_numbers(parent_courses_ids, campus, language, degrees, settings)
69 | csp = CSP()
70 | schedules: List[Schedule] = csp.extract_schedules(activities, settings=settings)
71 | logger.info(f"Finished extracting schedules, total schedules: {len(schedules)}")
72 | status = csp.get_status()
73 | if status is Status.FAILED or not schedules:
74 | return jsonify({"message": "לא היה ניתן ליצור מערכת שעות, נא לבחור קורסים אחרים."}), 400
75 |
76 | results_path = utils.get_results_path()
77 | Controller.save_schedules(schedules, settings, results_path)
78 | logger.info("Finished saving schedules")
79 | # Create a ZIP file
80 | message = _("The schedules were saved in the directory: ") + str(results_path)
81 | logger.info(message)
82 | zip_file = results_path.parent / "semester_organizer_generated_schedules.zip"
83 | zip_file.unlink(missing_ok=True)
84 |
85 | shutil.make_archive(str(zip_file).replace(".zip", ""), 'zip', results_path)
86 | session["zip"] = str(zip_file)
87 |
88 | return jsonify({
89 | "message": "נוצר בהצלחה",
90 | "zip": str(zip_file)
91 | })
92 |
93 |
94 | @app.route("/download_zip", methods=["POST"])
95 | def download_zip():
96 | if "zip" not in session:
97 | return jsonify({"message": "לא נמצא קובץ להורדה"}), 400
98 | return send_file(session["zip"], as_attachment=True)
99 |
100 |
101 | def create_garbage_file(file_path):
102 | """Generate a fake PNG file with random garbage data."""
103 | with open(file_path, "wb") as f:
104 | f.write(os.urandom(1024)) # Create a 1KB garbage file
105 |
106 |
107 | @app.route("/get_campuses", methods=["POST"])
108 | def get_campuses():
109 | selected_degrees = request.json.get("degrees", [])
110 | if not selected_degrees:
111 | return jsonify([])
112 | campuses = db.get_common_campuses_names()
113 | return jsonify(sorted(campuses))
114 |
115 |
116 | @app.route("/get_courses", methods=["POST"])
117 | def get_courses():
118 | degrees_names: List[str] = request.json["degrees"]
119 | campus: str = request.json["campus"]
120 | degrees = {degree for degree in db.load_degrees() if _(degree.name) in degrees_names}
121 |
122 | setting = db.load_settings()
123 | setting.campus_name = campus
124 | setting.degrees = degrees
125 | db.save_settings(setting)
126 |
127 | courses: List[Course] = db.load_courses(Language.get_current(), degrees)
128 | courses_names = list(sorted([course.name for course in courses]))
129 | return jsonify(courses_names)
130 |
131 |
132 | if __name__ == "__main__":
133 | app.run(debug=True, port=5000)
134 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 | # Semester Organizer
3 |
4 | The semester organizer is a tool for the students at Lev Institute to create a schedule quickly and efficiently.
5 | The tool is implemented with the CSP algorithm.
6 | With the semester organizer, you can create a time system according to your convenience.
7 |
8 | ### Save your time and effort, and get the best schedule suit for you!
9 |
10 |
11 | ## Table of Contents
12 |
13 | - [Preview](#preview)
14 | - [Features](#features)
15 | - [Prerequisites](#prerequisites)
16 | - [How to use](#how-to-use)
17 | - [WebUI (Web User Interface - One click) - NEW !](#webui-web-user-interface---one-click---new-)
18 | - [CLI (Command Line Interface)](#cli-command-line-interface)
19 | - [How to update courses](#how-to-update-courses)
20 | - [FAQ](#faq)
21 | - [License](#license)
22 | - [Feedback](#feedback)
23 |
24 | ## Preview
25 |
26 | The semester organizer helps you create a personalized schedule quickly and efficiently. Below are some example outputs:
27 |
28 | 
29 |
30 | 
31 |
32 |
33 |
34 |
35 | ## Features
36 |
37 | - If you consider the course, all the places are taken.
38 | - If we consider only the courses that can be enrolled in for the server (username and password for the Levant site are required).
39 | - Consider which days to study.
40 | - Cross platform.
41 | - Support both Hebrew and English.
42 |
43 | #### Make your choices easy!
44 |
45 | - Show all courses ordered only from those that can be enrolled in.
46 | - Show all courses that can be taken after all prerequisite courses have been completed.
47 | - View all active courses current semester
48 | - Select your favorite lecturers
49 | - Select courses from other degrees and create a dynamic schedule. (From example, if you are from Computer science and want to take courses in Software engineering)
50 | - Config the settings however you want!
51 |
52 | #### Easy to use!
53 |
54 | - Just run python main.py and select your courses.
55 |
56 | #### Several output formats !
57 |
58 | - CSV
59 | - Excel
60 | - Image
61 |
62 | ## Prerequisites
63 |
64 | - Python version 3.8 and above, Preferred version >= 3.13 for efficient and fast use.
65 | - Chrome browser installed.
66 | - Git bash or any terminal support your language (pycharm doesn't support Hebrew by default)
67 |
68 | ### Config git bash to support Hebrew
69 |
70 | - Change the settings (right-click on the screen of git bash)
71 | - Options -> Text -> Font -> Select font "Miriam Fixed"
72 | - Options -> Text -> Character set: Change to utf-8
73 |
74 | ## WebUI (Web User Interface - One click) - NEW !
75 | - Download the project, 3 options:
76 | 1. via github download link: https://github.com/ItamarShalev/semester_organizer/archive/refs/heads/main.zip
77 | 2. via git https clone:
78 | ```bash
79 | git clone https://github.com/ItamarShalev/semester_organizer.git
80 | ```
81 | 3. via ssh git clone:
82 | ```bash
83 | git clone git@github.com:ItamarShalev/semester_organizer.git
84 | ```
85 | - Run the web server by running the file `SemesterOrganizer.bat`.
86 |
87 | ## CLI (Command Line Interface)
88 |
89 | - Open terminal (Git bash recommended and must for Hebrew)
90 | - Clone the project
91 |
92 |
93 | via https:
94 | ```bash
95 | git clone https://github.com/ItamarShalev/semester_organizer.git
96 | ```
97 |
98 | via ssh:
99 | ```bash
100 | git clone git@github.com:ItamarShalev/semester_organizer.git
101 | ```
102 |
103 | - Change the directory to the project
104 |
105 | ```bash
106 | cd semester_organizer
107 | ```
108 | - Update pip version
109 |
110 | ```bash
111 | python -m pip install --upgrade pip
112 | ```
113 |
114 | - Install dependencies
115 |
116 | ```bash
117 | python -m pip install -r requirements.txt
118 | ```
119 |
120 | - Start the main console flow.
121 |
122 | ```bash
123 | python main.py
124 | ```
125 | If you want to change the language or start another flow, check the help.
126 |
127 | ```bash
128 | python main.py -h
129 | ```
130 |
131 | For example:
132 |
133 | ```bash
134 | python main.py --flow console --language hebrew
135 | ```
136 |
137 | The above line is the default and you can just run:
138 |
139 | ```bash
140 | python main.py
141 | ```
142 |
143 | - To get the new data and code, you can reset your repo (your personal selection will be saved)
144 | ```bash
145 | git fetch --all && git reset --hard origin/main
146 | ```
147 |
148 | ## How to update courses
149 |
150 | Semester organizer works offline for efficient uses.
151 |
152 | In order to update the courses information hours and etc
153 | You should run the update script.
154 |
155 | (Notice, the flags should be only on the first update)
156 | ```bash
157 | python update_levnet_data.py --user --password
158 | ```
159 |
160 | Don't worry, the user information saved locally only.
161 |
162 | ## FAQ
163 |
164 | #### I have issues with my data and don't get all, or some tables aren't there.
165 |
166 | The project is in development, so some new features were added and changed the base.
167 | You can clean your data and get the new one from the server.
168 |
169 | ```bash
170 | git clean -fdx && git fetch --all && git reset --hard origin/main
171 | ```
172 |
173 | ## License
174 |
175 | [Apache-2.0](LICENSE)
176 |
177 |
178 | ## Feedback
179 |
180 | If you have any feedback, ideas, or any suggestion, please reach out via the email itamar.shalevv@gmail.com
181 |
182 | Also, if you liked it, please give a star to the project (top right)
183 |
--------------------------------------------------------------------------------
/tests/algorithms/test_constraint_courses.py:
--------------------------------------------------------------------------------
1 | import warnings
2 |
3 | from pytest import fixture
4 |
5 | from src import utils
6 | from src.algorithms.constraint_courses import ConstraintCourses
7 | from src.collector.db import Database
8 | from src.data.degree import Degree
9 | from src.data.language import Language
10 |
11 |
12 | class TestConstraintCourses:
13 |
14 | def test_export_generated_json_data(self):
15 | Language.set_current(Language.HEBREW)
16 | ConstraintCourses().export_generated_json_data()
17 |
18 | def test_new_course_exist_in_levnet_but_not_in_constraint(self):
19 | Language.set_current(Language.HEBREW)
20 | courses, *_ = ConstraintCourses().prepare_data()
21 | degrees = {Degree.COMPUTER_SCIENCE, Degree.SOFTWARE_ENGINEERING}
22 | all_courses = Database().load_courses(Language.HEBREW, degrees)
23 | all_courses_id_name = {course.course_number: course.name for course in all_courses if course.is_active}
24 | courses_doesnt_exist = set(all_courses_id_name.keys()) - {course.course_number for course in courses.values()}
25 | list_doesnt_exist = {course_number: all_courses_id_name[course_number]
26 | for course_number in courses_doesnt_exist}
27 | str_courses_names = '\n'.join(f"Course id: '{course_number}',Course name: '{course_name}'"
28 | for course_number, course_name in list_doesnt_exist.items())
29 | assert not courses_doesnt_exist, f"ERROR: There are more new courses\n" \
30 | f"Please add them.\n" \
31 | f"Courses:\n{str_courses_names}."
32 |
33 | def test_deprecated_course_exist_in_constraint_but_not_in_levnet(self):
34 | Language.set_current(Language.HEBREW)
35 | courses, *_ = ConstraintCourses().prepare_data()
36 | degrees = {Degree.COMPUTER_SCIENCE, Degree.SOFTWARE_ENGINEERING}
37 | all_courses = Database().load_courses(Language.HEBREW, degrees)
38 | all_levnet_courses_ids = {course.course_number for course in all_courses}
39 | constraint_courses_ids_names = {course.course_number: course.name for course in courses.values()}
40 | courses_doesnt_exist = set(constraint_courses_ids_names.keys()) - all_levnet_courses_ids
41 |
42 | list_doesnt_exist = {course_number: constraint_courses_ids_names[course_number]
43 | for course_number in courses_doesnt_exist}
44 | str_courses_names = '\n'.join(f"Course id: '{course_number}',Course name: '{course_name}'"
45 | for course_number, course_name in list_doesnt_exist.items())
46 | if list_doesnt_exist:
47 | warnings.simplefilter("always")
48 | warning_message = f"WARNING: There can be more deprecated courses.\n" \
49 | f"Check if it's because it's exist this semester or really deprecated.\n" \
50 | f"Please remove them if needed or add 'deprecated' = true in the json data file.\n" \
51 | f"Courses:\n{str_courses_names}."
52 | print(warning_message)
53 | warnings.warn(warning_message, UserWarning)
54 |
55 | def test_prepare_data(self, constraint_courses_mock):
56 | all_courses_in_txt, are_blocked_by_result, blocks_courses_result = constraint_courses_mock.prepare_data()
57 | assert all_courses_in_txt
58 | assert are_blocked_by_result
59 | assert blocks_courses_result
60 |
61 | def test_prepare_personal_data(self, constraint_courses_mock):
62 | data = constraint_courses_mock.prepare_personal_data()
63 | all_courses_in_txt, are_blocked_by_result, blocks_courses_result = data
64 | assert all_courses_in_txt
65 | assert are_blocked_by_result
66 | assert blocks_courses_result
67 |
68 | def test_get_courses_cant_do(self, constraint_courses_mock):
69 | courses_cant_do = constraint_courses_mock.get_courses_cant_do()
70 | assert courses_cant_do
71 |
72 | def test_get_courses_can_do(self, constraint_courses_mock):
73 | courses_can_do = constraint_courses_mock.get_courses_can_do()
74 | assert courses_can_do
75 |
76 | @fixture
77 | def constraint_courses_mock(self):
78 | Language.set_current(Language.HEBREW)
79 |
80 | class DatabaseMock(Database):
81 | def __init__(self):
82 | super().__init__("test_database")
83 |
84 | class ConstraintCoursesMock(ConstraintCourses):
85 | _ALL_COURSES_FILE_NAME = "all_courses_blocked_and_blocks_info.json"
86 | _ALL_COURSES_FILE_NAME_PERSONAL = "personal_all_courses_blocked_and_blocks_info.json"
87 |
88 | BLOCKED_COURSES_PATH = ConstraintCourses.GENERATED_DATA_PATH / "are_blocked_by_courses.json"
89 | BLOCKS_COURSES_PATH = ConstraintCourses.GENERATED_DATA_PATH / "blocks_courses.json"
90 | ALL_INFO_PATH = ConstraintCourses.GENERATED_DATA_PATH / _ALL_COURSES_FILE_NAME
91 | PERSONAL_PASSED_COURSES_PATH = ConstraintCourses.GENERATED_DATA_PATH / "personal_passed_courses.json"
92 | PERSONAL_BLOCKED_COURSES_PATH = ConstraintCourses.GENERATED_DATA_PATH / "personal_are_blocked_by.json"
93 | PERSONAL_BLOCKS_COURSES_PATH = ConstraintCourses.GENERATED_DATA_PATH / "personal_blocks_courses.json"
94 | PERSONAL_ALL_INFO_PATH = ConstraintCourses.GENERATED_DATA_PATH / _ALL_COURSES_FILE_NAME_PERSONAL
95 |
96 | database = DatabaseMock()
97 | database.clear_personal_database()
98 | database.init_personal_database_tables()
99 | course = utils.get_course_data_test()
100 | database.save_courses_already_done({course})
101 | constraint_courses = ConstraintCoursesMock()
102 | constraint_courses.database = database
103 | return constraint_courses
104 |
--------------------------------------------------------------------------------
/src/app/static/css/styles.css:
--------------------------------------------------------------------------------
1 | /* General Styles */
2 | body {
3 | font-family: 'Arial', sans-serif;
4 | background-color: #1a1a1a; /* Dark background */
5 | color: #ffffff; /* White text */
6 | direction: rtl; /* RTL for Hebrew */
7 | }
8 |
9 | /* Container Styles */
10 | #container {
11 | max-width: 450px; /* Smaller width */
12 | width: 100%;
13 | margin: 0 auto;
14 | padding: 15px; /* Reduced padding */
15 | background-color: #2d2d2d; /* Dark container background */
16 | border-radius: 10px;
17 | box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1);
18 | }
19 |
20 | /* Section Dividers */
21 | .section-divider {
22 | border-bottom: 1px solid #4a4a4a; /* Thin divider */
23 | margin: 15px 0; /* Reduced margin */
24 | padding-bottom: 15px; /* Reduced padding */
25 | }
26 |
27 | /* Degrees List */
28 | #degrees-list {
29 | display: flex;
30 | flex-wrap: wrap;
31 | gap: 8px; /* Smaller gap */
32 | margin-bottom: 15px; /* Reduced margin */
33 | }
34 |
35 | .degree-item {
36 | padding: 8px 12px; /* Reduced padding */
37 | background-color: #3a3a3a; /* Dark gray background */
38 | border-radius: 5px;
39 | cursor: pointer;
40 | transition: background-color 0.2s;
41 | font-size: 14px; /* Smaller font */
42 | }
43 |
44 | .degree-item:hover {
45 | background-color: #4a4a4a; /* Lighter gray on hover */
46 | }
47 |
48 | .degree-item.selected {
49 | background-color: #6d28d9; /* Purple for selected degrees */
50 | }
51 |
52 | /* Campuses Dropdown */
53 | #campuses {
54 | width: 100%;
55 | padding: 8px; /* Reduced padding */
56 | background-color: #3a3a3a; /* Dark gray background */
57 | border: 1px solid #4a4a4a;
58 | border-radius: 5px;
59 | color: #ffffff;
60 | margin-bottom: 15px; /* Reduced margin */
61 | font-size: 14px; /* Smaller font */
62 | }
63 |
64 | /* Courses List */
65 | #courses-list {
66 | max-height: 250px; /* Smaller height */
67 | overflow-y: auto;
68 | margin-bottom: 15px; /* Reduced margin */
69 | }
70 |
71 | .course-item {
72 | padding: 8px; /* Reduced padding */
73 | background-color: #3a3a3a; /* Dark gray background */
74 | border-radius: 5px;
75 | cursor: pointer;
76 | transition: background-color 0.2s;
77 | margin-bottom: 8px; /* Reduced margin */
78 | font-size: 14px; /* Smaller font */
79 | }
80 |
81 | .course-item:hover {
82 | background-color: #4a4a4a; /* Lighter gray on hover */
83 | }
84 |
85 | .course-item.selected {
86 | background-color: #6d28d9; /* Purple for selected courses */
87 | }
88 |
89 | /* Selected Courses Chips */
90 | #selected-courses {
91 | display: flex;
92 | flex-wrap: wrap;
93 | gap: 8px; /* Smaller gap */
94 | margin-bottom: 15px; /* Reduced margin */
95 | }
96 |
97 | .chip {
98 | display: inline-flex;
99 | align-items: center;
100 | background-color: #6d28d9; /* Purple background */
101 | color: white;
102 | padding: 4px 8px; /* Reduced padding */
103 | border-radius: 9999px; /* Fully rounded */
104 | font-size: 14px; /* Smaller font */
105 | }
106 |
107 | .chip .close {
108 | cursor: pointer;
109 | margin-left: 8px; /* Reduced margin */
110 | color: #d1d5db; /* Light gray */
111 | transition: color 0.2s;
112 | }
113 |
114 | .chip .close:hover {
115 | color: white; /* White on hover */
116 | }
117 |
118 | /* Title Styles */
119 | .section-title {
120 | font-size: 18px; /* Slightly smaller */
121 | font-weight: bold;
122 | text-align: right; /* Align text to right */
123 | background: linear-gradient(to right, #2d0076, #3b82f6); /* Gradient */
124 | -webkit-background-clip: text;
125 | -webkit-text-fill-color: transparent; /* Transparent fill */
126 | display: block;
127 | margin-bottom: 5px;
128 | }
129 |
130 | /* Button Text */
131 | #generate-btn {
132 | font-size: 16px; /* Slightly smaller than before */
133 | font-weight: bold;
134 | text-align: center;
135 | background: linear-gradient(to right, #6d28d9, #3b82f6);
136 | border-radius: 8px;
137 | padding: 10px 0;
138 | }
139 |
140 | #generate-btn:hover {
141 | opacity: 0.9;
142 | }
143 |
144 |
145 | /* Scrollbar */
146 | ::-webkit-scrollbar {
147 | width: 6px; /* Thinner scrollbar */
148 | }
149 |
150 | ::-webkit-scrollbar-track {
151 | background: #374151; /* Dark scrollbar track */
152 | }
153 |
154 | ::-webkit-scrollbar-thumb {
155 | background: #6d28d9; /* Purple scrollbar thumb */
156 | border-radius: 4px;
157 | }
158 |
159 | ::-webkit-scrollbar-thumb:hover {
160 | background: #7c3aed; /* Lighter purple on hover */
161 | }
162 |
163 | /* Search Input */
164 | #search-box {
165 | width: 100%;
166 | padding: 6px; /* Reduced padding */
167 | background-color: #3a3a3a; /* Dark gray background */
168 | border: 1px solid #4a4a4a; /* Light gray border */
169 | border-radius: 4px; /* Slightly rounded corners */
170 | color: #ffffff; /* White text */
171 | font-size: 13px; /* Smaller font */
172 | margin-bottom: 8px; /* Space below the input */
173 | }
174 |
175 | #search-box::placeholder {
176 | color: #a0a0a0; /* Light gray placeholder */
177 | }
178 |
179 | #search-box:focus {
180 | outline: none;
181 | border-color: #6d28d9; /* Purple border on focus */
182 | }
183 |
184 | #loading-container {
185 | width: 100%;
186 | text-align: center;
187 | margin-top: 40px; /* Creates space below the main content */
188 | position: relative;
189 | bottom: 0;
190 | }
191 |
192 | .spinner {
193 | border: 5px solid rgba(255, 255, 255, 0.2);
194 | border-top: 5px solid #6d28d9;
195 | border-radius: 50%;
196 | width: 50px;
197 | height: 50px;
198 | animation: spin 1s linear infinite;
199 | display: block;
200 | margin: 0 auto 10px;
201 | }
202 |
203 | @keyframes spin {
204 | 0% { transform: rotate(0deg); }
205 | 100% { transform: rotate(360deg); }
206 | }
207 |
208 | #loading-text {
209 | display: block;
210 | color: white;
211 | font-size: 16px;
212 | font-weight: bold;
213 | }
214 |
--------------------------------------------------------------------------------
/src/algorithms/constraint_courses.py:
--------------------------------------------------------------------------------
1 | from pathlib import Path
2 | from typing import Set, Dict, Tuple
3 |
4 | from functools import lru_cache
5 | from src.collector.db import Database
6 | from src.data.course_constraint import CourseConstraint
7 | from src.data.degree import Degree
8 | from src.data.language import Language
9 |
10 | Name = str
11 |
12 |
13 | class ConstraintCourses:
14 | CURRENT_DIR = Path(__file__).parent
15 | GENERATED_DATA_PATH = CURRENT_DIR / "generated_data"
16 | CONSTRAINT_COURSES_DATA_PATH = CURRENT_DIR / "constraint.json"
17 |
18 | BLOCKED_COURSES_PATH = GENERATED_DATA_PATH / "are_blocked_by_courses.json"
19 | BLOCKS_COURSES_PATH = GENERATED_DATA_PATH / "blocks_courses.json"
20 | ALL_INFO_PATH = GENERATED_DATA_PATH / "all_courses_blocked_and_blocks_info.json"
21 |
22 | PERSONAL_PASSED_COURSES_PATH = GENERATED_DATA_PATH / "personal_passed_courses.json"
23 | PERSONAL_BLOCKED_COURSES_PATH = GENERATED_DATA_PATH / "personal_are_blocked_by.json"
24 | PERSONAL_BLOCKS_COURSES_PATH = GENERATED_DATA_PATH / "personal_blocks_courses.json"
25 | PERSONAL_ALL_INFO_PATH = GENERATED_DATA_PATH / "personal_all_courses_blocked_and_blocks_info.json"
26 |
27 | def __init__(self):
28 | self.database = Database()
29 | self.course_constraint = CourseConstraint()
30 |
31 | def export_data(self, are_blocked_by_result: Dict, blocks_courses_result: Dict,
32 | file_path_blocked: Path, file_path_blocks: Path, file_path_all: Path):
33 |
34 | self.course_constraint.export(
35 | list(are_blocked_by_result.values()),
36 | include_blocked_by=True,
37 | include_blocks=False,
38 | include_can_be_taken_in_parallel=True,
39 | file_path=file_path_blocked
40 | )
41 | self.course_constraint.export(
42 | list(blocks_courses_result.values()),
43 | include_blocked_by=False,
44 | include_blocks=True,
45 | include_can_be_taken_in_parallel=True,
46 | file_path=file_path_blocks
47 | )
48 | self.course_constraint.export(
49 | list(blocks_courses_result.values()),
50 | include_blocked_by=True,
51 | include_blocks=True,
52 | include_can_be_taken_in_parallel=True,
53 | file_path=file_path_all
54 | )
55 |
56 | def export_generated_json_data(self):
57 | _unused_all_courses_in_json, are_blocked_by_result, blocks_courses_result = self.prepare_data()
58 | self.export_data(are_blocked_by_result, blocks_courses_result, self.BLOCKED_COURSES_PATH,
59 | self.BLOCKS_COURSES_PATH, self.ALL_INFO_PATH)
60 |
61 | _unused_all_courses_in_json, are_blocked_by_result, blocks_courses_result = self.prepare_personal_data()
62 |
63 | self.export_data(are_blocked_by_result, blocks_courses_result, self.PERSONAL_BLOCKED_COURSES_PATH,
64 | self.PERSONAL_BLOCKS_COURSES_PATH, self.PERSONAL_ALL_INFO_PATH)
65 |
66 | @lru_cache(maxsize=128)
67 | def prepare_data(self) -> (Dict[Name, CourseConstraint], Dict[Name, Set[Name]], Dict[Name, Set[Name]]):
68 | all_courses_in_json = self.course_constraint.extract_courses_data(self.CONSTRAINT_COURSES_DATA_PATH)
69 | are_blocked_by_result = self.course_constraint.get_extended_blocked_by_courses(all_courses_in_json)
70 | blocks_courses_result = self.course_constraint.get_extended_blocks_courses(are_blocked_by_result)
71 | return all_courses_in_json, are_blocked_by_result, blocks_courses_result
72 |
73 | @lru_cache(maxsize=128)
74 | def prepare_personal_data(self):
75 | all_courses_in_json, are_blocked_by_result, blocks_courses_result = self.prepare_data()
76 | courses_already_done = self.database.load_courses_already_done(Language.HEBREW)
77 | courses_already_done = {course.course_number: course.name for course in courses_already_done}
78 |
79 | are_blocked_by_result = {object_id: course for object_id, course in are_blocked_by_result.items()
80 | if course.course_number not in courses_already_done}
81 |
82 | blocks_courses_result = {object_id: course for object_id, course in blocks_courses_result.items()
83 | if course.course_number not in courses_already_done}
84 |
85 | return all_courses_in_json, are_blocked_by_result, blocks_courses_result
86 |
87 | @lru_cache(maxsize=128)
88 | def _get_course_do(self, can: bool) -> Set[Tuple[Name, int]]:
89 | _unused_courses, are_blocked_by_result, _unused_blocks_courses_result = self.prepare_data()
90 | are_blocked_by_result = {constraint_course.course_number: constraint_course
91 | for constraint_course in are_blocked_by_result.values()}
92 |
93 | degrees = {Degree.SOFTWARE_ENGINEERING, Degree.COMPUTER_SCIENCE}
94 | all_courses = self.database.load_courses(Language.HEBREW, degrees)
95 | courses_already_done = self.database.load_courses_already_done(Language.HEBREW)
96 | courses_already_done_numbers = {course.course_number for course in courses_already_done}
97 | result = set()
98 | for course in all_courses:
99 | if not course.is_active:
100 | continue
101 | all_needed_courses = {constraint_course.course_number
102 | for constraint_course in are_blocked_by_result[course.course_number].blocked_by}
103 | left_courses = all_needed_courses - courses_already_done_numbers
104 | if can ^ bool(left_courses):
105 | result.add((course.name, course.parent_course_number))
106 | return result
107 |
108 | def get_courses_cant_do(self) -> Set[Tuple[Name, int]]:
109 | """
110 | :return: set of courses that can't be done course name in hebrew and parent course number
111 | """
112 | return self._get_course_do(can=False)
113 |
114 | def get_courses_can_do(self) -> Set[Tuple[Name, int]]:
115 | return self._get_course_do(can=True)
116 |
--------------------------------------------------------------------------------
/scripts/run_linter.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # PYTHON_ARGCOMPLETE_OK
3 |
4 |
5 | import argparse
6 | import logging
7 | import os
8 | import shutil
9 | import subprocess
10 | import sys
11 | from contextlib import suppress
12 |
13 | import argcomplete
14 |
15 | from src import utils
16 | from src.data.user import User
17 |
18 |
19 | def get_all_python_files(test_files=False):
20 | files_result = []
21 | blocked_dirs = [".idea", "results", "database", "logs", ".github", ".pytest_cache", "__pycache__", ".git"]
22 | blocked_dirs += [".venv", "venv"]
23 | for root, dirs, files in os.walk(utils.ROOT_PATH):
24 | dirs[:] = [d for d in dirs if d not in blocked_dirs]
25 | for file in files:
26 | if file.endswith(".py") and (not test_files or file.startswith("test_")):
27 | files_result.append(os.path.join(root, file))
28 | return files_result
29 |
30 |
31 | def pip_install(*arguments):
32 | return_code = subprocess.call([sys.executable, "-m", "pip", "install", *arguments])
33 | assert return_code == 0, "ERROR: pip failed to install, check your network connection"
34 |
35 |
36 | def clear_project():
37 | folders_to_clear = [".pytest_cache", "__pycache__"]
38 | files_to_clear = ["coverage.xml", ".coverage"]
39 | for folder in folders_to_clear:
40 | folder_path = os.path.join(utils.ROOT_PATH, folder)
41 | shutil.rmtree(folder_path, ignore_errors=True)
42 |
43 | for file in files_to_clear:
44 | file_path = os.path.join(utils.ROOT_PATH, file)
45 | with suppress(FileNotFoundError):
46 | os.remove(file_path)
47 |
48 |
49 | def get_args():
50 | parser = argparse.ArgumentParser()
51 | parser.add_argument("-u", "--username", help="The username user in the server", default=None)
52 | parser.add_argument("-p", "--password", help="The password user in the server", default=None)
53 | parser.add_argument("-i", "--install", help="Install all the needed packages", default=False, action="store_true")
54 | parser.add_argument("-c", "--coverage", help="Run coverage", default=False, action="store_true")
55 | parser.add_argument("-n", "--network", help="Run network pytest mark", default=False, action="store_true")
56 | parser.add_argument("-a", "--all", help="Run all tests", default=False, action="store_true")
57 | parser.add_argument("-v", "--verbose", help="Print more debug logs", default=False, action="store_true")
58 | argcomplete.autocomplete(parser)
59 | arguments = parser.parse_args()
60 | return arguments
61 |
62 |
63 | def update_pip():
64 | pip_install("--upgrade", "pip")
65 |
66 |
67 | def install_requirements():
68 | pip_install("-r", "requirements.txt")
69 |
70 |
71 | def install_development_requirements():
72 | pip_install("-r", str(utils.CONFIG_PATH / "development_requirements.txt"))
73 |
74 |
75 | def get_user_data(argument_args):
76 | # pylint: disable=import-outside-toplevel
77 | # in case the user still not installed the requirements
78 | # it will import everything only after installing the requirements
79 | from src.collector.db import Database
80 | if not argument_args.username or not argument_args.password:
81 | user_data = Database().load_user_data()
82 | else:
83 | user_data = User(argument_args.username, argument_args.password)
84 | Database().save_user_data(user_data)
85 |
86 | return user_data
87 |
88 |
89 | def _build_pytest_command(arguments):
90 | coveragerc_ci_cd = os.path.join(utils.CONFIG_PATH, ".coveragerc_ci_cd")
91 | pytest_config = str(utils.CONFIG_PATH / "pytest.ini")
92 |
93 | if arguments.coverage:
94 | if arguments.network:
95 | pytest_cmd = "coverage run -m pytest".split(" ")
96 | else:
97 | pytest_cmd = f"coverage run --rcfile={coveragerc_ci_cd} -m pytest".split(" ")
98 | else:
99 | pytest_cmd = ["pytest"]
100 |
101 | pytest_arguments = ['-m', 'not network']
102 | if arguments.all:
103 | pytest_arguments = ['--reruns', '2', '--reruns-delay', '5']
104 | elif arguments.network:
105 | pytest_arguments = ['--reruns', '2', '--reruns-delay', '5']
106 | if arguments.verbose:
107 | pytest_arguments += ['-v']
108 | pytest_arguments += ['-c', pytest_config]
109 |
110 | return pytest_cmd, pytest_arguments
111 |
112 |
113 | def _build_coverage_command(arguments):
114 | if arguments.network:
115 | coverage_cmd = "coverage report -m --fail-under=95"
116 | else:
117 | coveragerc_ci_cd = os.path.join(utils.CONFIG_PATH, ".coveragerc_ci_cd")
118 | public_network_path = os.path.join(utils.SRC_PATH, "collector", "network.py")
119 | coverage_cmd = f"coverage report --rcfile={coveragerc_ci_cd} -m " \
120 | f"--omit='{public_network_path}' --fail-under=100"
121 |
122 | return coverage_cmd.split(" ")
123 |
124 |
125 | def run_linter_and_tests(arguments):
126 | pytest_cmd, pytest_arguments = _build_pytest_command(arguments)
127 | pylintrc_path = str(utils.CONFIG_PATH / ".pylintrc")
128 | pycodestyle_config_path = str(utils.CONFIG_PATH / "setup.cfg")
129 |
130 | return_code = subprocess.call(["pycodestyle", "--config", pycodestyle_config_path, *get_all_python_files()])
131 |
132 | return_code += subprocess.call(["pylint", "--rcfile", pylintrc_path, *get_all_python_files()])
133 |
134 | return_code += subprocess.call([*pytest_cmd, *get_all_python_files(test_files=True), *pytest_arguments])
135 |
136 | if arguments.coverage:
137 | coverage_cmd = _build_coverage_command(arguments)
138 | return_code += subprocess.call([*coverage_cmd])
139 | assert return_code == 0, "ERROR: Linter failed, check the log file"
140 |
141 |
142 | def main():
143 | utils.init_project()
144 | clear_project()
145 | args = get_args()
146 | if args.install:
147 | update_pip()
148 | install_requirements()
149 | install_development_requirements()
150 | utils.config_logging_level(logging.DEBUG if args.verbose else logging.INFO)
151 | get_user_data(args)
152 | run_linter_and_tests(args)
153 |
154 |
155 | if __name__ == '__main__':
156 | main()
157 |
--------------------------------------------------------------------------------
/src/utils.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import os
3 | import shutil
4 | import sys
5 | import time
6 | from pathlib import Path
7 | from contextlib import suppress
8 | from datetime import datetime
9 | from operator import itemgetter
10 | from typing import Tuple, Dict, Any
11 |
12 | import urllib3
13 | from urllib3.exceptions import InsecureRequestWarning
14 |
15 | from src.data.course import Course
16 | from src.data.degree import Degree
17 | from src.data.semester import Semester
18 | from src.data.language import Language
19 | from src.data.translation import _
20 |
21 | ENCODING = "utf-8-sig"
22 | ROOT_PATH = Path(__file__).parent.parent.resolve()
23 | SRC_PATH = ROOT_PATH / "src"
24 | CONFIG_PATH = ROOT_PATH / "config"
25 | SCRIPTS_PATH = ROOT_PATH / "scripts"
26 | LOG_FILE_HANDLER = logging.FileHandler(filename=ROOT_PATH / "log.txt", encoding=ENCODING, mode='w')
27 | DATA_SOFTWARE_VERSION = "1.0"
28 | SOFTWARE_VERSION = "1.0"
29 |
30 |
31 | def sort_dict_by_key(dictionary: Dict[Any, Any]) -> Dict[Any, Any]:
32 | return dict(sorted(dictionary.items(), key=itemgetter(0)))
33 |
34 |
35 | def disable_logger_third_party_warnings():
36 | urllib3.disable_warnings(InsecureRequestWarning)
37 | logging.getLogger("urllib3").setLevel(logging.WARNING)
38 | logging.getLogger("requests").setLevel(logging.WARNING)
39 | logging.getLogger("charset_normalizer").setLevel(logging.WARNING)
40 | logging.getLogger("WDM").setLevel(logging.WARNING)
41 | logging.getLogger("PIL").setLevel(logging.WARNING)
42 |
43 |
44 | def windows_path_to_unix(path):
45 | drive, rest = path.split(":", 1)
46 | unix_path = rest.replace("\\", "/")
47 | unix_path = f"/{drive.lower()}{unix_path}"
48 | return unix_path
49 |
50 |
51 | def install_auto_complete_cli():
52 | argcomplete_path = os.path.abspath(os.path.join(os.path.expanduser("~"), "argcomplete_semester_organizer.sh"))
53 | if os.path.exists(argcomplete_path):
54 | return
55 | local_argcomplete_path = os.path.join(SCRIPTS_PATH, "argcomplete_semester_organizer.sh")
56 | # copy file to home directory
57 | shutil.copyfile(local_argcomplete_path, argcomplete_path)
58 | bashrc_path = os.path.abspath(os.path.join(os.path.expanduser("~"), ".bashrc"))
59 | files = ["main.py", "release.py", "run_linter.py", "update_levnet_data.py"]
60 | text_to_copy = "\n\n# This part it is for the auto-complete of the semester_organizer project\n"
61 | text_to_copy += "export ARGCOMPLETE_USE_TEMPFILES=1\n"
62 | text_to_copy += f"source {windows_path_to_unix(argcomplete_path)}\n"
63 | for file in files:
64 | file_path = os.path.abspath(os.path.join(ROOT_PATH, file))
65 | file_path = windows_path_to_unix(file_path)
66 | text_to_copy += f'eval "$(register-python-argcomplete {file_path})"\n'
67 | text_to_copy += "# End of the autocomplete section"
68 | if not os.path.exists(bashrc_path):
69 | with open(bashrc_path, "w") as file:
70 | file.write(text_to_copy)
71 | else:
72 | with open(bashrc_path, "+a") as file:
73 | if text_to_copy not in file.read():
74 | file.write(text_to_copy)
75 |
76 |
77 | def init_project():
78 | if sys.version_info < (3, 8):
79 | raise RuntimeError("To run this program you should have Python 3.8 or a more recent version.")
80 | with suppress(AttributeError):
81 | if os.name == "nt":
82 | sys.stdout.reconfigure(encoding="utf-8")
83 | disable_logger_third_party_warnings()
84 | try:
85 | install_auto_complete_cli()
86 | except Exception as error:
87 | get_logging().error(error)
88 |
89 |
90 | def get_current_hebrew_year():
91 | return convert_year(datetime.now().year, Language.HEBREW)
92 |
93 |
94 | def convert_year(year: int, language: Language) -> int:
95 | """
96 | Change year by the language.
97 | """
98 | result = year
99 | diff_hebrew_year = 3761
100 | if language is Language.HEBREW and year < diff_hebrew_year:
101 | result = year + diff_hebrew_year - 1
102 | elif language is Language.ENGLISH and year > diff_hebrew_year:
103 | result = year - diff_hebrew_year + 1
104 | return result
105 |
106 |
107 | def get_current_hebrew_name():
108 | # This is temporal solution until
109 | # תשפ"ט
110 | additional_letter = chr(ord("א") + get_current_hebrew_year() - 5781)
111 | assert additional_letter != "י", "ERROR: Invalid calculation and should using different method."
112 | hebrew_name = 'תשפ"' + additional_letter
113 | return hebrew_name
114 |
115 |
116 | def get_database_path() -> Path:
117 | database_path = SRC_PATH / "database"
118 | database_path.mkdir(parents=True, exist_ok=True)
119 | return database_path
120 |
121 |
122 | def get_results_path() -> Path:
123 | return Path.home() / "semester_organizer_results"
124 |
125 |
126 | def get_results_test_path() -> Path:
127 | return get_database_path() / "results_test"
128 |
129 |
130 | def count_files_and_directory(directory: str) -> Tuple[int, int]:
131 | files = dirs = 0
132 | for _unused, dirs_name, files_names in os.walk(directory):
133 | files += len(files_names)
134 | dirs += len(dirs_name)
135 | return files, dirs
136 |
137 |
138 | def get_last_modified_by_days(file_path: str) -> int:
139 | if not os.path.exists(file_path):
140 | return 0
141 | last_modified = os.path.getmtime(file_path)
142 | return int((time.time() - last_modified) / 60 / 60 / 24)
143 |
144 |
145 | def get_current_semester():
146 | current_month = datetime.now().month
147 | fall_months = [8, 9, 10, 11, 12, 1]
148 | return Semester.FALL if current_month in fall_months else Semester.SPRING
149 |
150 |
151 | def config_logging_level(level=logging.DEBUG) -> logging.Logger:
152 | disable_logger_third_party_warnings()
153 | format_logging = "%(asctime)s %(name)s.%(funcName)s +%(lineno)s: %(message)s"
154 | handlers = [logging.StreamHandler()]
155 | if level == logging.DEBUG:
156 | handlers.append(LOG_FILE_HANDLER)
157 | logging.basicConfig(handlers=handlers, datefmt="%H:%M:%S", level=level, format=format_logging)
158 | return get_logging()
159 |
160 |
161 | def get_logging() -> logging.Logger:
162 | return logging.getLogger(get_custom_software_name())
163 |
164 |
165 | def get_custom_software_name():
166 | return "semester_organizer_lev"
167 |
168 |
169 | def get_campus_name_test():
170 | return _("Machon Lev")
171 |
172 |
173 | def get_course_data_test():
174 | return Course(_("Infinitesimal Calculus 1"), 120131, 318, {Semester.SPRING, Semester.FALL}, set(Degree))
175 |
--------------------------------------------------------------------------------
/tests/collector/test_network.py:
--------------------------------------------------------------------------------
1 | import ssl
2 | from datetime import datetime
3 |
4 | import pytest
5 | from pytest import fixture
6 |
7 | from src import utils
8 | from src.collector.db import Database
9 | from src.collector.network import Network, WeakNetworkConnectionException, InvalidServerRequestException
10 | from src.collector.network import InvalidSemesterTimeRequestException, TLSAdapter
11 | from src.data.course import Course
12 | from src.data.language import Language
13 | from src.data.settings import Settings
14 | from src.data.translation import _
15 | from src.data.user import User
16 |
17 |
18 | @pytest.mark.network
19 | class TestPublicNetwork:
20 |
21 | already_fail_once = False
22 |
23 | @fixture
24 | def user(self):
25 | user_data = ""
26 | try:
27 | user_data = Database().load_user_data()
28 | assert user_data, "Can't load user data."
29 | network = Network(user_data)
30 | assert network.check_connection(), "Can't connect to the server."
31 | except Exception as error:
32 | if not TestPublicNetwork.already_fail_once:
33 | TestPublicNetwork.already_fail_once = True
34 | raise error
35 | pytest.skip(str(error))
36 | return user_data
37 |
38 | def test_fail_connection(self):
39 | network = Network(User("123456789", "123456789"))
40 | with pytest.raises(RuntimeError):
41 | network.connect()
42 |
43 | def test_connect_disconnect(self, user):
44 | network = Network(user)
45 | network.connect()
46 | network.disconnect()
47 |
48 | def test_extract_all_activities_ids_can_enroll_in(self, user):
49 | network = Network(user)
50 | settings = Settings()
51 | activities_ids_can_enroll_in = []
52 |
53 | try:
54 | activities_ids_can_enroll_in = network.extract_all_activities_ids_can_enroll_in(settings, [])
55 | except InvalidSemesterTimeRequestException:
56 | return
57 | assert "120131.04.5783.01" in activities_ids_can_enroll_in, "Can't extract activities ids can enroll in."
58 |
59 | def test_check_setup(self, user):
60 | network = Network()
61 | network.set_user(user)
62 | assert network.check_connection(), "Can't connect to the server."
63 |
64 | def test_extract_courses_already_did(self, user):
65 | network = Network(user)
66 | courses = network.extract_courses_already_did()
67 | assert courses, "Can't extract courses already did."
68 | assert any(course for course in courses if course[1] == 120701)
69 |
70 | def test_for_coverage(self):
71 | network = Network()
72 | network.set_settings(Settings())
73 | network.change_language(Language.ENGLISH)
74 | with pytest.raises(WeakNetworkConnectionException):
75 | raise WeakNetworkConnectionException()
76 |
77 | @pytest.mark.parametrize("language", list(Language))
78 | def test_extract_campus_names(self, user, language: Language):
79 | Language.set_current(language)
80 | database = Database()
81 | network = Network(user)
82 | network.change_language(language)
83 | campus_names = network.extract_campus_names()
84 | assert campus_names, "Can't extract campus names from the server."
85 | all_campuses_found = all(campus_name in campus_names for campus_name in database.get_common_campuses_names())
86 | assert all_campuses_found, "Some campuses names are missing."
87 |
88 | def test_extract_extra_course_info(self, user):
89 | network = Network(user)
90 | result = network.extract_extra_course_info(utils.get_course_data_test())
91 | assert result
92 |
93 | @pytest.mark.parametrize("language", list(Language))
94 | def test_extract_years(self, user, language: Language):
95 | network = Network(user)
96 | network.change_language(language)
97 | years = network.extract_years()
98 | current_year = datetime.now().year
99 | current_hebrew_year = utils.get_current_hebrew_year()
100 | hebrew_year_data = (current_hebrew_year, utils.get_current_hebrew_name())
101 | english_year_data = (current_hebrew_year, f"{current_year - 1}-{current_year}")
102 | test_year = hebrew_year_data if language is Language.HEBREW else english_year_data
103 | assert years, "Can't extract years from the server."
104 | assert test_year in years.items(), f"The year {current_hebrew_year} is missing."
105 |
106 | @pytest.mark.parametrize("language", list(Language))
107 | def test_extract_all_courses(self, user, language: Language):
108 | network = Network(user)
109 | network.change_language(language)
110 | campus_name = utils.get_campus_name_test()
111 |
112 | courses = network.extract_all_courses(campus_name)
113 | assert courses, "Can't extract courses from the server."
114 | with pytest.raises(RuntimeError):
115 | network.extract_all_courses("Not a campus name")
116 |
117 | @pytest.mark.parametrize("language", list(Language))
118 | def test_extract_academic_activities_data(self, user, language: Language):
119 | network = Network(user)
120 | network.change_language(language)
121 | course = utils.get_course_data_test()
122 | campus_name = utils.get_campus_name_test()
123 |
124 | academic_activities, missings = network.extract_academic_activities_data(campus_name, [course])
125 | missing_meetings_data = any(activity.no_meetings() for activity in academic_activities)
126 |
127 | if missings:
128 | print("WARNING: The following courses don't have activities: " + ", ".join(missings))
129 | if not academic_activities:
130 | print("WARNING: Fail to extract the activities, skip it since it can be delay of the college.")
131 | assert not missing_meetings_data, "Can't extract academic activities from the server."
132 |
133 | not_found_course = Course("name", -10, -30)
134 | loaded_academic_activities = network.extract_academic_activities_data(campus_name, [not_found_course])
135 | assert loaded_academic_activities == ([], ["name"])
136 |
137 | @pytest.mark.parametrize("language", list(Language))
138 | def test_change_language_campuses(self, user, language: Language):
139 | network = Network(user)
140 | network.change_language(language)
141 | campuses = network.extract_campuses()
142 | # Campus ID of Machon lev is 1
143 | assert campuses[1] == _("Machon Lev")
144 |
145 | def test_coverage(self, user):
146 | with pytest.raises(WeakNetworkConnectionException):
147 | raise WeakNetworkConnectionException()
148 |
149 | with pytest.raises(InvalidServerRequestException):
150 | try:
151 | raise InvalidServerRequestException("url_request", {}, None)
152 | except InvalidServerRequestException as error:
153 | assert not error.has_json()
154 | raise
155 |
156 | TLSAdapter.session(ssl.OP_NO_TLSv1_2)
157 |
--------------------------------------------------------------------------------
/SemesterOrganizer.bat:
--------------------------------------------------------------------------------
1 | @echo off
2 | setlocal enabledelayedexpansion
3 |
4 | echo ====================================================
5 | echo Checking for any existing instance of app.py...
6 | echo ====================================================
7 | rem Use WMIC to look for processes whose CommandLine contains "app.py"
8 | for /f "skip=1 tokens=1" %%p in ('wmic process where "CommandLine like '%%app.py%%'" get ProcessId ^| findstr /r "[0-9]"') do (
9 | echo Terminating existing app.py process with PID %%p...
10 | taskkill /PID %%p /F >nul 2>&1
11 | )
12 |
13 | echo ====================================================
14 | echo Checking for Python installation...
15 | echo ====================================================
16 |
17 | :: First, try to run "python --version" to see if Python is on PATH
18 | python --version >nul 2>&1
19 | if %errorlevel%==0 (
20 | echo Python is available on PATH.
21 | goto :checkPip
22 | ) else (
23 | echo Python not found on PATH. Checking common installation paths...
24 | )
25 |
26 | :: Initialize variable to hold found installation path
27 | set "PYTHON_PATH="
28 |
29 | :: Search common user installation directory (e.g., from the Python.org installer)
30 | for /d %%a in ("%LocalAppData%\Programs\Python\*") do (
31 | if exist "%%a\python.exe" (
32 | set "PYTHON_PATH=%%a"
33 | goto :foundPython
34 | )
35 | )
36 |
37 | :: Optionally, search Program Files (if installed for all users)
38 | for /d %%a in ("%ProgramFiles%\Python*") do (
39 | if exist "%%a\python.exe" (
40 | set "PYTHON_PATH=%%a"
41 | goto :foundPython
42 | )
43 | )
44 |
45 | :foundPython
46 | if defined PYTHON_PATH (
47 | echo Found Python at: %PYTHON_PATH%
48 | echo Adding this directory to PATH.
49 | set "PATH=%PYTHON_PATH%;%PATH%"
50 | goto :checkPip
51 | )
52 |
53 | :: If no Python installation was found in common locations, ask the user to download/install it.
54 | echo No Python installation found in common locations.
55 | set /p USER_CHOICE="Do you want to download and install Python? (y/n): "
56 | if /i "%USER_CHOICE%"=="y" (
57 | echo Downloading Python installer...
58 | rem Define a temporary location for the installer.
59 | set "PYTHON_INSTALLER=%TEMP%\python_installer.exe"
60 | rem Download the latest Python installer (update URL as needed)
61 | curl -L -o "%PYTHON_INSTALLER%" https://www.python.org/ftp/python/3.13.2/python-3.13.2-amd64.exe
62 | if %errorlevel% neq 0 (
63 | echo Error downloading Python installer.
64 | goto :end
65 | )
66 | echo Installing Python for the current user...
67 | rem Install quietly for the current user and add to PATH (no admin rights)
68 | "%PYTHON_INSTALLER%" /quiet InstallAllUsers=0 PrependPath=1
69 | if %errorlevel% neq 0 (
70 | echo Error during Python installation.
71 | goto :end
72 | )
73 | del "%PYTHON_INSTALLER%"
74 | echo Python installation completed.
75 | rem Pause briefly to allow the installation to finalize
76 | timeout /t 5 /nobreak >nul
77 | ) else (
78 | echo Python is required to run this script. Exiting.
79 | goto :end
80 | )
81 |
82 | :checkPip
83 | echo ====================================================
84 | echo Checking for pip...
85 | echo ====================================================
86 | python -m pip --version >nul 2>&1
87 | if %errorlevel% neq 0 (
88 | echo Pip is not available. Attempting to install pip...
89 | python -m ensurepip --default-pip
90 | if %errorlevel% neq 0 (
91 | echo Failed to install pip. Exiting.
92 | goto :end
93 | )
94 | )
95 |
96 | :: Decide which python interpreter to use (system python or virtual environment)
97 | set "PYTHON_INTERPRETER=python"
98 |
99 | echo ====================================================
100 | echo Setting up python virtual environment...
101 | echo ====================================================
102 | if exist ".venv\Scripts\python.exe" (
103 | echo Virtual environment already exists. Using it.
104 | set "PYTHON_INTERPRETER=.venv\Scripts\python.exe"
105 | ) else (
106 | echo Creating virtual environment...
107 | python -m venv .venv
108 | if %errorlevel% neq 0 (
109 | echo Failed to create virtual environment.
110 | goto :end
111 | )
112 | set "PYTHON_INTERPRETER=.venv\Scripts\python.exe"
113 | )
114 |
115 | echo ====================================================
116 | echo Upgrading pip in the python virtual environment...
117 | echo ====================================================
118 | "%PYTHON_INTERPRETER%" -m pip install --upgrade pip
119 | if %errorlevel% neq 0 (
120 | echo Failed to upgrade pip.
121 | goto :end
122 | )
123 |
124 | echo ====================================================
125 | echo Installing required packages in the python virtual environment...
126 | echo ====================================================
127 | "%PYTHON_INTERPRETER%" -m pip install --upgrade -r requirements.txt
128 | if %errorlevel% neq 0 (
129 | echo Failed to install required packages.
130 | goto :end
131 | )
132 |
133 | echo ====================================================
134 | echo Configuring Semester Organizer web application environment...
135 | echo ====================================================
136 | rem Set environment variables for Flask
137 | set "FLASK_APP=app.py"
138 | set "FLASK_ENV=development"
139 |
140 | echo ====================================================
141 | echo Starting the Semester Organizer web application...
142 | echo ====================================================
143 | rem Start the Flask server in a new minimized window so the script continues.
144 | set FLASK_APP=app.py
145 | set FLASK_ENV=development
146 | start "" /min "%PYTHON_INTERPRETER%" -m flask --app app.py --debug run
147 | rem Wait a couple of seconds for the server to start up
148 | timeout /t 2 /nobreak >nul
149 |
150 | echo ====================================================
151 | echo Opening your default web browser at http://localhost:5000...
152 | echo ====================================================
153 | rem Search for supported browsers in common installation locations.
154 | set BROWSER=""
155 | set BROWSER_PATH=""
156 | set URL="http://localhost:5000"
157 |
158 | if exist "C:\Program Files\Google\Chrome\Application\chrome.exe" (
159 | set "BROWSER=chrome"
160 | set "BROWSER_PATH=C:\Program Files\Google\Chrome\Application\chrome.exe"
161 | ) else if exist "C:\Program Files (x86)\Google\Chrome\Application\chrome.exe" (
162 | set "BROWSER=chrome"
163 | set "BROWSER_PATH=C:\Program Files (x86)\Google\Chrome\Application\chrome.exe"
164 | ) else if exist "C:\Program Files\Microsoft\Edge\Application\msedge.exe" (
165 | set "BROWSER=edge"
166 | set "BROWSER_PATH=C:\Program Files\Microsoft\Edge\Application\msedge.exe"
167 | ) else if exist "C:\Program Files (x86)\Microsoft\Edge\Application\msedge.exe" (
168 | set "BROWSER=edge"
169 | set "BROWSER_PATH=C:\Program Files (x86)\Microsoft\Edge\Application\msedge.exe"
170 | ) else if exist "C:\Program Files\Mozilla Firefox\firefox.exe" (
171 | set "BROWSER=firefox"
172 | set "BROWSER_PATH=C:\Program Files\Mozilla Firefox\firefox.exe"
173 | ) else if exist "C:\Program Files (x86)\Mozilla Firefox\firefox.exe" (
174 | set "BROWSER=firefox"
175 | set "BROWSER_PATH=C:\Program Files (x86)\Mozilla Firefox\firefox.exe"
176 | )
177 |
178 | if defined BROWSER (
179 | if /i "%BROWSER%"=="chrome" (
180 | echo Launching Chrome in incognito mode with cache disabled.
181 | start "" "%BROWSER_PATH%" --incognito --disable-application-cache --new-window "%URL%"
182 | ) else if /i "%BROWSER%"=="edge" (
183 | echo Launching Edge in inprivate mode with cache disabled.
184 | start "" "%BROWSER_PATH%" --inprivate --disable-application-cache --new-window "%URL%"
185 | ) else if /i "%BROWSER%"=="firefox" (
186 | echo Launching Firefox in private mode.
187 | start "" "%BROWSER_PATH%" -private-window "%URL%"
188 | ) else (
189 | echo Browser type not recognized, opening default browser.
190 | start "" "%URL%"
191 | )
192 | ) else (
193 | echo No supported browser found.
194 | echo Opening default browser...
195 | start "" "%URL%"
196 | )
197 | echo.
198 | echo Press any key to terminate the Flask application and exit...
199 | pause >nul
200 |
201 | if defined PID (
202 | echo Terminating Flask application with PID %PID%...
203 | taskkill /PID %PID% /F >nul 2>&1
204 | )
205 |
206 | :end
207 | echo "Script finished."
208 | endlocal
209 |
--------------------------------------------------------------------------------
/src/app/static/js/script.js:
--------------------------------------------------------------------------------
1 | function getSelectedDegrees() {
2 | let selectedDegrees = [];
3 | document.querySelectorAll('.degree-item.selected').forEach(item => {
4 | selectedDegrees.push(item.dataset.degree);
5 | });
6 | selectedDegrees = Array.from(selectedDegrees)
7 | return selectedDegrees;
8 | }
9 |
10 | // Update campuses based on selected degrees
11 | async function loadCampuses() {
12 | if (getSelectedDegrees().length === 0) {
13 | // Reset the dropdown when no degrees are selected
14 | updateCampusesOptions([]);
15 | // Clear courses if no degrees are selected
16 | loadCourses();
17 | return;
18 | }
19 |
20 | const response = await fetch('/get_campuses', {
21 | method: 'POST',
22 | headers: { 'Content-Type': 'application/json' },
23 | body: JSON.stringify({ degrees: getSelectedDegrees() })
24 | });
25 |
26 | const campuses = await response.json();
27 | updateCampusesOptions(campuses);
28 | loadCourses();
29 |
30 | }
31 |
32 |
33 | // Toggle course selection (add/remove chip)
34 | function toggleCourseSelection(course) {
35 | const selectedCourses = document.getElementById('selected-courses');
36 |
37 | // Check if already exists
38 | const existingChip = Array.from(selectedCourses.children)
39 | .find(chip => chip.textContent.includes(course));
40 |
41 | if (existingChip) {
42 | existingChip.remove();
43 | } else {
44 | const chip = document.createElement('div');
45 | chip.className = 'chip';
46 | chip.innerHTML = `
47 | ×
48 | ${course}
49 | `;
50 | chip.querySelector('.close').addEventListener('click', () => {
51 | chip.remove();
52 | // Deselect the corresponding course in the list
53 | const courseItem = Array.from(document.querySelectorAll('#courses-list .course-item'))
54 | .find(item => item.textContent === course);
55 | if (courseItem) {
56 | courseItem.classList.remove('selected');
57 | }
58 | });
59 | selectedCourses.appendChild(chip);
60 | }
61 | }
62 |
63 | function updateCampusesOptions(items) {
64 | const dropdown = document.getElementById("campuses");
65 | const previousValue = dropdown.value;
66 |
67 | if (items.length === 0) {
68 | dropdown.innerHTML = '';
69 | renderCourses([]);
70 | removeInvalidSelectedCourses([]);
71 | return;
72 | }
73 |
74 | dropdown.innerHTML = '';
75 |
76 | items.forEach(item => {
77 | const option = document.createElement('option');
78 | option.value = item;
79 | option.textContent = item;
80 | dropdown.appendChild(option);
81 | });
82 |
83 | // Restore previous selection if it still exists
84 | if (items.includes(previousValue)) {
85 | dropdown.value = previousValue;
86 | }
87 | }
88 |
89 |
90 |
91 |
92 | // Load courses for the selected campus and degrees
93 | async function loadCourses() {
94 | const campus = document.getElementById('campuses').value;
95 | if (!campus || getSelectedDegrees().length === 0) {
96 | renderCourses([]);
97 | removeInvalidSelectedCourses([]);
98 | return;
99 | }
100 | const response = await fetch('/get_courses', {
101 | method: 'POST',
102 | headers: {'Content-Type': 'application/json'},
103 | body: JSON.stringify({
104 | degrees: getSelectedDegrees(),
105 | campus: campus
106 | })
107 | });
108 |
109 | const courses = await response.json();
110 | renderCourses(courses);
111 | removeInvalidSelectedCourses(courses);
112 | }
113 |
114 | // Remove selected courses if they are no longer available
115 | function removeInvalidSelectedCourses(availableCourses) {
116 | const selectedCoursesContainer = document.getElementById('selected-courses');
117 | const selectedChips = Array.from(selectedCoursesContainer.children);
118 |
119 | selectedChips.forEach(chip => {
120 | const courseName = chip.querySelector('span:not(.close)').textContent.trim();
121 | if (!availableCourses.includes(courseName)) {
122 | chip.remove(); // Remove from selected list if no longer available
123 | }
124 | });
125 | }
126 |
127 | // Render courses in the courses list
128 | function renderCourses(courses) {
129 | const coursesList = document.getElementById('courses-list');
130 | coursesList.innerHTML = ''; // Clear previous
131 |
132 | courses.forEach(course => {
133 | const courseItem = document.createElement('div');
134 | courseItem.className = 'course-item';
135 | courseItem.textContent = course;
136 |
137 | // Check if already selected
138 | if (Array.from(document.querySelectorAll('#selected-courses .chip'))
139 | .some(chip => chip.textContent.includes(course))) {
140 | courseItem.classList.add('selected');
141 | }
142 |
143 | courseItem.addEventListener('click', () => {
144 | courseItem.classList.toggle('selected');
145 | toggleCourseSelection(course);
146 | });
147 |
148 | coursesList.appendChild(courseItem);
149 | });
150 | }
151 |
152 | document.addEventListener('DOMContentLoaded', () => {
153 | // Degree selection
154 | document.querySelectorAll('.degree-item').forEach(item => {
155 | item.addEventListener('click', () => {
156 | item.classList.toggle('selected');
157 | loadCampuses();
158 | });
159 | });
160 |
161 | // Campus selection
162 | document.getElementById('campuses').addEventListener('change', loadCourses);
163 |
164 | // Search functionality
165 | document.getElementById('search-box').addEventListener('input', function () {
166 | const searchTerm = this.value.toLowerCase();
167 | const courses = document.querySelectorAll('#courses-list .course-item');
168 |
169 | courses.forEach(course => {
170 | const courseName = course.textContent.toLowerCase();
171 | if (courseName.includes(searchTerm)) {
172 | course.style.display = 'block';
173 | } else {
174 | course.style.display = 'none';
175 | }
176 | });
177 | });
178 |
179 | document.getElementById('generate-btn').addEventListener('click', async () => {
180 | const selectedDegrees = getSelectedDegrees();
181 | const selectedCampus = document.getElementById('campuses').value;
182 | const selectedCourses = Array.from(document.querySelectorAll('#selected-courses .chip span:not(.close)'))
183 | .map(chip => chip.textContent.trim());
184 |
185 | if (!selectedDegrees.length || !selectedCampus || !selectedCourses.length) {
186 | alert("נא לבחור תואר, קמפוס וקורסים");
187 | return;
188 | }
189 |
190 | // Remove any existing loading animation
191 | let existingLoader = document.getElementById('loading-container');
192 | if (existingLoader) {
193 | existingLoader.remove();
194 | }
195 |
196 | // Create a new loading spinner container at the bottom of #container
197 | const loadingContainer = document.createElement('div');
198 | loadingContainer.id = "loading-container";
199 | loadingContainer.innerHTML = `
200 |
201 | יוצר מערכת שעות נא להמתין...
202 | `;
203 |
204 | // Append loading container **inside #container at the bottom**
205 | document.getElementById("container").appendChild(loadingContainer);
206 |
207 | // Auto-scroll to loading spinner
208 | loadingContainer.scrollIntoView({ behavior: "smooth", block: "center" });
209 |
210 | const requestData = {
211 | degrees: selectedDegrees,
212 | campus: selectedCampus,
213 | courses: selectedCourses
214 | };
215 |
216 | try {
217 | const response = await fetch('/generate', {
218 | method: 'POST',
219 | headers: { 'Content-Type': 'application/json' },
220 | body: JSON.stringify(requestData)
221 | });
222 |
223 | const result = await response.json();
224 |
225 | if (result.zip) {
226 | document.getElementById('loading-text').textContent = "נוצר בהצלחה ✅";
227 |
228 | // Use POST method to download the zip file
229 | const form = document.createElement('form');
230 | form.method = 'POST';
231 | form.action = '/download_zip';
232 | document.body.appendChild(form);
233 | form.submit();
234 | document.body.removeChild(form);
235 |
236 | // Remove the loading spinner after success
237 | setTimeout(() => {
238 | loadingContainer.remove();
239 | }, 2000);
240 | }
241 | } catch (error) {
242 | console.error('Error generating files:', error);
243 | document.getElementById('loading-text').textContent = "שגיאה ביצירת הקבצים ❌";
244 | }
245 | });
246 | });
247 |
--------------------------------------------------------------------------------
/src/data/course_constraint.py:
--------------------------------------------------------------------------------
1 | import json
2 | from dataclasses import dataclass, field
3 | from pathlib import Path
4 | from typing import Dict, List, Optional
5 | from collections import OrderedDict
6 | from copy import deepcopy
7 |
8 | from src.collector.db import Database
9 | from src.data.course import Course
10 | from src.data.degree import Degree
11 | from src.data.language import Language
12 | from src.data import translation
13 |
14 |
15 | @dataclass(order=True)
16 | class PrerequisiteCourse:
17 | id: int = field(compare=True)
18 | course_number: int
19 | name: str
20 | can_be_taken_in_parallel: bool = False
21 |
22 | def to_json(self, include_can_be_taken_in_parallel: bool) -> Dict:
23 | result = {
24 | "id": self.id,
25 | "course_number": self.course_number,
26 | "name": self.name,
27 | }
28 | if include_can_be_taken_in_parallel:
29 | result["can_be_taken_in_parallel"] = self.can_be_taken_in_parallel
30 | return result
31 |
32 | def __hash__(self):
33 | return hash(self.id)
34 |
35 | def __eq__(self, other):
36 | return isinstance(other, PrerequisiteCourse) and self.id == other.id
37 |
38 |
39 | @dataclass(order=True)
40 | class ConstraintCourseData:
41 | id: int = field(hash=True, compare=True)
42 | course_number: int
43 | name: str
44 | aliases: List[str] = field(default_factory=list)
45 | blocked_by: List[PrerequisiteCourse] = field(default_factory=list)
46 | blocks: List[PrerequisiteCourse] = field(default_factory=list)
47 | course_info: Optional[Course] = None
48 |
49 | def to_json(self, include_blocked_by: bool, include_blocks: bool, include_can_be_taken_in_parallel: bool) -> Dict:
50 | mandatory_degrees = list(self.course_info.mandatory_degrees) if self.course_info else []
51 | mandatory_degrees = sorted([translation.hebrew(degree.name) for degree in mandatory_degrees])
52 | optional_degrees = list(self.course_info.optional_degrees) if self.course_info else []
53 | optional_degrees = sorted([translation.hebrew(degree.name) for degree in optional_degrees])
54 |
55 | result = {
56 | "id": self.id,
57 | "name": self.name,
58 | "course_number": self.course_number,
59 | "is_active": self.course_info.is_active if self.course_info else None,
60 | "credits": self.course_info.credits_count if self.course_info else None,
61 | "aliases": self.aliases,
62 | "mandatory_for_degrees": mandatory_degrees,
63 | "optional_for_degrees": optional_degrees,
64 | }
65 | if include_blocked_by:
66 | result["blocked_by"] = [course.to_json(include_can_be_taken_in_parallel) for course in self.blocked_by]
67 | if include_blocks:
68 | result["blocks"] = [course.to_json(include_can_be_taken_in_parallel) for course in self.blocks]
69 | return result
70 |
71 | def __hash__(self):
72 | return hash(self.id)
73 |
74 | def __eq__(self, other):
75 | return isinstance(other, ConstraintCourseData) and self.id == other.id
76 |
77 |
78 | class CourseConstraint:
79 |
80 | def __init__(self):
81 | self.name = None
82 | self.version = None
83 | self.comment = None
84 |
85 | def export(self, all_courses: List[ConstraintCourseData], include_blocked_by: bool, include_blocks: bool,
86 | include_can_be_taken_in_parallel: bool, file_path: Path):
87 | all_courses.sort(key=lambda course: course.id)
88 | assert self.version, "ERROR: Version json file unknown."
89 | assert self.comment is not None, "ERROR: Comment json file unknown"
90 | include_parallel = include_can_be_taken_in_parallel
91 | json_data = {
92 | "version": self.version,
93 | "_comment": self.comment,
94 | "courses": [course.to_json(include_blocked_by, include_blocks, include_parallel) for course in all_courses]
95 | }
96 | file_path.parent.mkdir(parents=True, exist_ok=True)
97 | with open(file_path, 'w', encoding='utf-8', newline='\n') as json_file:
98 | json.dump(json_data, json_file, ensure_ascii=False, sort_keys=False, indent=4)
99 |
100 | def extract_courses_data(self, file_path: Path) -> Dict[int, ConstraintCourseData]:
101 | assert file_path.exists(), "File does not exist"
102 | all_courses_ids = set()
103 | all_ids = set()
104 | courses = OrderedDict()
105 | db = Database()
106 | all_courses_objects = db.load_courses(Language.HEBREW, set(Degree))
107 | all_courses_objects = {course.course_number: course for course in all_courses_objects}
108 |
109 | def get_pre_request_courses_list(json_object: Dict, key: str) -> List[PrerequisiteCourse]:
110 | prerequisite_courses = []
111 | for prerequisite_json_object in json_object.get(key, []):
112 | pre_object_id = prerequisite_json_object["id"]
113 | if pre_object_id not in courses:
114 | raise RuntimeError(f"ERROR: The object course id {pre_object_id} didn't find.")
115 | pre_course_number = courses[pre_object_id].course_number
116 | name = courses[pre_object_id].name
117 | can_be_taken_in_parallel = prerequisite_json_object.get("can_be_taken_in_parallel", False)
118 | pre_object = PrerequisiteCourse(pre_object_id, pre_course_number, name, can_be_taken_in_parallel)
119 | prerequisite_courses.append(pre_object)
120 | return prerequisite_courses
121 |
122 | with open(file_path, 'r', encoding='utf-8') as json_file:
123 | json_data = json.load(json_file)
124 | self.version = json_data["version"]
125 | self.comment = json_data["_comment"]
126 | for course_data in json_data["courses"]:
127 | if course_data.get("deprecated", False):
128 | continue
129 | object_id = course_data["id"]
130 | course_number = course_data["course_number"]
131 | assert object_id > 0, "ERROR: Object id must be positive non zero, edit the 'id' key in the file."
132 | assert course_number > 0, "ERROR: Course id must be positive, edit the 'course_number' key in the file."
133 | default_course_data = Course("", course_number, 0)
134 | has_course_info = course_number in all_courses_objects
135 | course_info = all_courses_objects.get(course_number, default_course_data)
136 | default_is_active = course_info.is_active if has_course_info else None
137 | default_credits_count = course_info.credits_count if has_course_info else None
138 |
139 | course_info.is_active = course_data.get("is_active", default_is_active)
140 | course_info.credits_count = course_data.get("credits", default_credits_count)
141 |
142 | object_data = ConstraintCourseData(
143 | id=object_id,
144 | course_number=course_number,
145 | name=course_data["name"],
146 | aliases=course_data.get("aliases", []) + [course_data["name"]],
147 | course_info=course_info
148 | )
149 |
150 | assert object_id not in all_ids, f"ERROR: Found multiple id {object_id}, should remove it."
151 | assert course_number not in all_courses_ids, \
152 | f"ERROR: Found multiple course id {course_number}, should remove it."
153 | all_ids.add(object_id)
154 | all_courses_ids.add(course_number)
155 | courses[object_id] = object_data
156 |
157 | for course_data in json_data["courses"]:
158 | if course_data.get("deprecated", False):
159 | continue
160 | course_id = course_data["id"]
161 | course = courses[course_id]
162 | course.blocked_by = get_pre_request_courses_list(course_data, "blocked_by")
163 | course.blocks = get_pre_request_courses_list(course_data, "blocks")
164 |
165 | return courses
166 |
167 | def get_extended_blocked_by_courses(self, all_courses: Dict[int, ConstraintCourseData]) \
168 | -> Dict[int, ConstraintCourseData]:
169 | result_courses = deepcopy(all_courses)
170 | all_pre_courses = []
171 | for course in result_courses.values():
172 | all_pre_courses.clear()
173 | main_queue = list(deepcopy(course.blocked_by))
174 | sub_quese = []
175 | for current_pre_course in main_queue:
176 | can_be_taken_in_parallel = current_pre_course.can_be_taken_in_parallel
177 | sub_quese.append(current_pre_course)
178 | while sub_quese:
179 | current_course = deepcopy(sub_quese.pop())
180 | current_course.can_be_taken_in_parallel = can_be_taken_in_parallel
181 | all_pre_courses.append(current_course)
182 | sub_quese.extend(all_courses[current_course.id].blocked_by)
183 |
184 | for pre_course in all_pre_courses:
185 | for other_pre_course in all_pre_courses:
186 | must = not other_pre_course.can_be_taken_in_parallel or not pre_course.can_be_taken_in_parallel
187 | if pre_course == other_pre_course and must:
188 | pre_course.can_be_taken_in_parallel = False
189 | other_pre_course.can_be_taken_in_parallel = False
190 |
191 | course.blocked_by = list(sorted(set(all_pre_courses), key=lambda data: data.id))
192 | return result_courses
193 |
194 | def get_extended_blocks_courses(self, all_courses: Dict) -> Dict[int, ConstraintCourseData]:
195 | result_courses = deepcopy(all_courses)
196 | found_in_courses = set()
197 | for object_id, course in result_courses.items():
198 | found_in_courses.clear()
199 | for _object_id, other_course in result_courses.items():
200 | if object_id in {pre_course.id for pre_course in other_course.blocked_by}:
201 | can_be_taken_in_parallel = any(course.can_be_taken_in_parallel
202 | for course in other_course.blocked_by if course.id == object_id)
203 | pre_course_obj = PrerequisiteCourse(
204 | other_course.id, other_course.course_number, other_course.name, can_be_taken_in_parallel
205 | )
206 | found_in_courses.add(pre_course_obj)
207 | course.blocks = list(sorted(found_in_courses, key=lambda data: data.id))
208 | return result_courses
209 |
--------------------------------------------------------------------------------
/src/algorithms/csp.py:
--------------------------------------------------------------------------------
1 | from enum import Enum, auto
2 | from typing import List, Optional, Dict, Set
3 |
4 | from constraint.problem import Problem
5 | from constraint.constraints import AllEqualConstraint
6 |
7 | from src.data.activity import Activity
8 | from src.data.course_choice import CourseChoice
9 | from src.data.day import Day
10 | from src.data.degree import Degree
11 | from src.data.schedule import Schedule
12 | from src.data.settings import Settings
13 | from src.data.translation import _
14 |
15 |
16 | class Status(Enum):
17 | SUCCESS = auto()
18 | SUCCESS_WITH_ONE_FAVORITE_LECTURER = auto()
19 | SUCCESS_WITHOUT_FAVORITE_LECTURERS = auto()
20 | FAILED = auto()
21 |
22 |
23 | class CSP:
24 |
25 | def __init__(self):
26 | self.courses_degrees = None
27 | self.activities_ids_groups = None
28 | self.courses_choices = None
29 | self.consist_one_favorite_teacher = False
30 | self.settings = None
31 | self.status = None
32 | self.last_courses_crashed = (None, None)
33 |
34 | def extract_schedules_minimal_consists(self, activities: List[Activity],
35 | activities_ids_groups: Dict[str, Set[int]] = None) -> List[Schedule]:
36 | """
37 | Extract only the schedules that consist the minimal conditions
38 | if the activities_ids is None, the function will return only schedules that consist by their meetings
39 | else will return only schedules that consist by their meetings and the activities_ids
40 | (help for classes can enroll consist).
41 | """
42 | all_activities_names, problem = self._prepare_activities(activities)
43 | self.activities_ids_groups = activities_ids_groups
44 | self.status = Status.SUCCESS
45 | for name in all_activities_names:
46 | for other_name in all_activities_names:
47 | if name == other_name:
48 | continue
49 | problem.addConstraint(self._is_consist_activity, (name, other_name))
50 | if activities_ids_groups:
51 | problem.addConstraint(self._is_consist_activities_ids_can_enroll, (name,))
52 | problem.addConstraint(self._is_consist_itself, (name,))
53 |
54 | schedules = self._extract_solutions(problem)
55 |
56 | if not schedules:
57 | self.status = Status.FAILED
58 |
59 | return schedules
60 |
61 | def extract_schedules(self, activities: List[Activity],
62 | courses_choices: Optional[Dict[str, CourseChoice]] = None,
63 | settings: Settings = None,
64 | activities_ids_groups: Dict[str, Set[int]] = None,
65 | courses_degrees: Dict[int, Set[Degree]] = None) -> List[Schedule]:
66 |
67 | self.settings = settings or Settings()
68 | self.courses_choices = courses_choices or {}
69 | self.activities_ids_groups = activities_ids_groups
70 | self.courses_degrees = courses_degrees or {}
71 | all_activities_names, problem = self._prepare_activities(activities)
72 |
73 | for name in all_activities_names:
74 | for other_name in all_activities_names:
75 | if name == other_name:
76 | continue
77 | problem.addConstraint(self._is_consist_activity, (name, other_name))
78 | problem.addConstraint(self._is_consist_favorite_teachers, (name,))
79 | problem.addConstraint(self._is_consist_itself, (name,))
80 | if set(self.settings.show_only_classes_in_days) != set(Day):
81 | problem.addConstraint(self._is_consist_classes_in_days, (name,))
82 | if self.settings.show_only_courses_with_free_places:
83 | problem.addConstraint(self._is_consist_capacity, (name,))
84 | if self.settings.show_only_courses_with_the_same_actual_number:
85 | problem.addConstraint(self._is_consist_actual_course, (name,))
86 | if not self.settings.show_hertzog_and_yeshiva:
87 | problem.addConstraint(self._is_consist_hertzog_and_yeshiva, (name,))
88 | if self.settings.show_only_classes_can_enroll and self.activities_ids_groups:
89 | problem.addConstraint(self._is_consist_activities_ids_can_enroll, (name,))
90 |
91 | schedule_result = self._extract_solutions(problem)
92 |
93 | if not schedule_result and courses_choices and not self.consist_one_favorite_teacher:
94 | # If there are no schedules, try to find schedules without favorite teachers
95 | self.status = Status.SUCCESS_WITH_ONE_FAVORITE_LECTURER
96 | self.consist_one_favorite_teacher = True
97 | return self.extract_schedules(activities, courses_choices, self.settings,
98 | self.activities_ids_groups, self.courses_degrees)
99 |
100 | if not schedule_result and courses_choices and self.consist_one_favorite_teacher:
101 | # If there are no schedules, try to find schedules without favorite teachers
102 | self.status = Status.SUCCESS_WITHOUT_FAVORITE_LECTURERS
103 | self.consist_one_favorite_teacher = False
104 | return self.extract_schedules(activities, None, self.settings,
105 | self.activities_ids_groups, self.courses_degrees)
106 |
107 | if not schedule_result:
108 | self.status = Status.FAILED
109 | elif self.status is None:
110 | self.status = Status.SUCCESS
111 |
112 | return schedule_result
113 |
114 | def get_status(self):
115 | return self.status
116 |
117 | def get_last_activities_crashed(self):
118 | return self.last_courses_crashed
119 |
120 | def _is_consist_activity(self, group_one: List[Activity], group_two: List[Activity]):
121 | result = all(not activity.is_crash_with_activities(group_one) for activity in group_two)
122 | if not result:
123 | self.last_courses_crashed = (group_one[0].name, group_two[0].name)
124 | return result
125 |
126 | def _is_consist_capacity(self, activities: List[Activity]):
127 | """
128 | Check if the activities consist the capacity
129 | :param activities: list of activities
130 | :param activities: List[Activity]
131 | :return: bool
132 | """
133 | return all(activity.type.is_personal() or activity.is_have_free_places() for activity in activities)
134 |
135 | def _is_consist_itself(self, activities: List[Activity]):
136 | for i, activity in enumerate(activities):
137 | for j in range(i + 1, len(activities)):
138 | if activity.is_crash_with_activity(activities[j]):
139 | return False
140 | return True
141 |
142 | def _is_consist_favorite_teachers(self, activities: List[Activity]):
143 | """
144 | Check if the activities consist the favorite teachers
145 | :param activities: list of activities
146 | :param activities: List[Activity]
147 | :return: bool
148 | """
149 | # If there are no courses choices, return True or if it's a personal activity return True
150 | if not self.courses_choices or activities[0].name not in self.courses_choices.keys():
151 | return True
152 | names_list = []
153 | is_consist = True
154 | course_choice = self.courses_choices[activities[0].name]
155 | for activity in activities:
156 | if activity.type.is_lecture():
157 | names_list = course_choice.available_teachers_for_lecture
158 | elif activity.type.is_exercise():
159 | names_list = course_choice.available_teachers_for_practice
160 | if self.consist_one_favorite_teacher:
161 | is_consist = is_consist and activity.lecturer_name in names_list
162 | else:
163 | is_consist = is_consist and (not names_list or activity.lecturer_name in names_list)
164 | if self.consist_one_favorite_teacher and is_consist:
165 | break
166 |
167 | return is_consist
168 |
169 | def _is_consist_actual_course(self, activities: List[Activity]):
170 | """
171 | Check if the activities consist the actual course
172 | :param activities: list of activities
173 | :param activities: List[Activity]
174 | :return: bool
175 | """
176 | if activities[0].type.is_personal():
177 | return True
178 | # All academic activities must have the same actual course
179 | return len({activity.actual_course_number for activity in activities}) == 1
180 |
181 | def _is_consist_hertzog_and_yeshiva(self, activities: List[Activity]):
182 | if activities[0].type.is_personal():
183 | return True
184 | herzog = "הרצוג"
185 | yeshiva = """יש"ת"""
186 | descriptions = [activity.description for activity in activities if activity.description]
187 | return not any(description for description in descriptions if herzog in description or yeshiva in description)
188 |
189 | def _is_consist_classes_in_days(self, activities: List[Activity]):
190 | if activities[0].type.is_personal():
191 | return True
192 | return all(meeting.day in self.settings.show_only_classes_in_days
193 | for activity in activities for meeting in activity.meetings)
194 |
195 | def _is_consist_activities_ids_can_enroll(self, activities: List[Activity]):
196 | # Ignore personal activities
197 | if activities[0].type.is_personal():
198 | return True
199 |
200 | # Ignore activities not related to your degree
201 | parent_course_number = activities[0].parent_course_number
202 | if parent_course_number in self.courses_degrees:
203 | course_degrees = self.courses_degrees[parent_course_number]
204 | if self.settings.degrees - course_degrees and not {self.settings.degree} & course_degrees:
205 | return True
206 |
207 | all_activities_ids_found = all(activity.activity_id in self.activities_ids_groups for activity in activities)
208 | if not all_activities_ids_found:
209 | return False
210 |
211 | problem = Problem()
212 | for activity in activities:
213 | problem.addVariable(activity.activity_id, list(self.activities_ids_groups[activity.activity_id]))
214 | problem.addConstraint(AllEqualConstraint())
215 | return problem.getSolution() is not None
216 |
217 | def _prepare_activities(self, activities: List[Activity]):
218 | problem = Problem()
219 | activities_by_name = Activity.get_activities_by_name(activities)
220 |
221 | for name, activities_values in activities_by_name.items():
222 | flat_activities_by_type = Activity.extract_flat_activities_by_type(activities_values)
223 | options_for_activity = Activity.extract_all_options_of_activity(flat_activities_by_type)
224 | problem.addVariable(name, options_for_activity)
225 |
226 | all_activities_names = list(activities_by_name.keys())
227 |
228 | return all_activities_names, problem
229 |
230 | def _extract_solutions(self, problem: Problem) -> List[Schedule]:
231 | activities_result = []
232 | schedule_result = []
233 | option_counter = 1
234 |
235 | for solution in problem.getSolutions():
236 | activities_result.clear()
237 | for activities_solution in solution.values():
238 | activities_result += activities_solution
239 | name = f"{_('Option')} {option_counter}"
240 | file_name = f"{_('option')}_{option_counter}"
241 | schedule = Schedule(name, file_name, "", activities_result.copy())
242 | schedule_result.append(schedule)
243 | option_counter += 1
244 |
245 | return schedule_result
246 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [2023] [Itamar Shalev]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/src/convertor/convertor.py:
--------------------------------------------------------------------------------
1 | import csv
2 | import functools
3 | import os
4 | import shutil
5 | import sys
6 | from concurrent.futures import ThreadPoolExecutor
7 | from pathlib import Path
8 | from typing import List, cast
9 | import warnings
10 | from multiprocessing import Pool
11 | from dataclasses import dataclass
12 |
13 | import pandas as pd
14 | import dataframe_image as dfi
15 |
16 | from src import utils
17 | from src.data.academic_activity import AcademicActivity
18 | from src.data.activity import Activity
19 | from src.data.language import Language
20 | from src.data.meeting import Meeting
21 | from src.data.output_format import OutputFormat
22 | from src.data.schedule import Schedule
23 | from src.data.type import Type
24 | from src.data.translation import _
25 | from src.data.day import Day
26 |
27 |
28 | @functools.total_ordering
29 | class MeetingClass:
30 | def __init__(self, meeting: Meeting, activity: Activity):
31 | self.meeting = meeting
32 | self.activity = activity
33 |
34 | def __str__(self):
35 | activity = self.activity
36 | activity_name = activity.name
37 | activity_type = activity.type
38 | activity_time = str(self.meeting)
39 | if activity_type is not Type.PERSONAL:
40 | academic_activity = cast(AcademicActivity, activity)
41 | lecturer_name = academic_activity.lecturer_name
42 | course_location = academic_activity.location
43 | lecturer_type = [_(str(activity_type)), lecturer_name]
44 | activity_id = academic_activity.activity_id
45 | if Language.get_current() is Language.HEBREW:
46 | lecturer_type.reverse()
47 | result = f"{activity_name}\n"
48 | result += " - ".join(lecturer_type)
49 | result += f"\n{activity_time}\n{activity_id}\n{course_location}"
50 | else:
51 | result = f"{activity_name}\n{activity_time}"
52 | return result
53 |
54 | def __lt__(self, other):
55 | return self.meeting < other.meeting
56 |
57 |
58 | @dataclass
59 | class Color:
60 | strong: str
61 | weak: str
62 |
63 |
64 | class Convertor:
65 |
66 | # https://www.w3schools.com/colors/colors_picker.asp will help to choose the right color
67 | # Every tuple contains the strong and weak variety of color
68 |
69 | # Gray
70 | PERSONAL_COLOR = "#bdc3c7"
71 |
72 | COLORS = [
73 | # Light blue
74 | Color("#4d94ff", "#80b3ff"),
75 | # Light green
76 | Color("#79d279", "#9fdf9f"),
77 | # Purple
78 | Color("#b366ff", "#cc99ff"),
79 | # Red
80 | Color("#ff8080", "#ffb3b3"),
81 | # Light yellow
82 | Color("#f9e79f", "#fcf3cf"),
83 | # Weak light blue
84 | Color("#99e6e6", "#d6f5f5"),
85 | # Shiny light green
86 | Color("#80ff80", "#b3ffb3"),
87 | # Blue
88 | Color("#005ce6", "#1a75ff"),
89 | # Green
90 | Color("#339933", "#40bf40"),
91 | # Yellow
92 | Color("#cc9900", "#ffbf00"),
93 | ]
94 |
95 | def __init__(self):
96 | warnings.simplefilter(action='ignore', category=FutureWarning)
97 | self._activities_colors = {}
98 | self._loger = utils.get_logging()
99 |
100 | def _init_activities_color_indexes(self, activities: List[Activity]):
101 | all_names = {activity.name for activity in activities if not activity.type.is_personal()}
102 | if all(name in self._activities_colors for name in all_names):
103 | return
104 | all_names = sorted(list(all_names))
105 | self._activities_colors = {}
106 | for index, name in enumerate(all_names):
107 | self._activities_colors[name] = Convertor.COLORS[index % len(Convertor.COLORS)]
108 |
109 | def _coloring(self, meeting_class):
110 | # White
111 | color = "#ffffff"
112 | if meeting_class:
113 | activity_type = meeting_class.activity.type
114 | activity_name = meeting_class.activity.name
115 | if activity_type.is_personal():
116 | color = Convertor.PERSONAL_COLOR
117 | elif activity_type.is_lecture():
118 | color = self._activities_colors[activity_name].strong
119 | elif activity_type.is_exercise():
120 | color = self._activities_colors[activity_name].weak
121 | return f'background-color: {color}'
122 |
123 | def _create_schedule_table(self, schedule):
124 | columns = [_(str(day)) for day in Day]
125 | all_days = list(Day)
126 | if Language.get_current() is Language.HEBREW:
127 | columns.reverse()
128 | all_days.reverse()
129 | week_table = {day.value: [] for day in all_days}
130 |
131 | _headers = [
132 | "activity_name",
133 | "activity_type",
134 | "lecturer_name",
135 | "course_location",
136 | "activity_id"
137 | ]
138 |
139 | for activity in schedule.activities:
140 | for meeting in activity.meetings:
141 | day_index = meeting.day.value
142 | week_table[day_index].append(MeetingClass(meeting, activity))
143 |
144 | max_length = max(len(day_meetings) for day_meetings in week_table.values())
145 | for day_meetings in week_table.values():
146 | day_meetings.sort()
147 | day_meetings += [None] * (max_length - len(day_meetings))
148 | table_set = {_(str(day)): week_table[day.value] for day in all_days}
149 |
150 | df = pd.DataFrame(table_set, columns=columns)
151 |
152 | df.fillna('', inplace=True)
153 |
154 | df_styled = df.style
155 | df_styled.map(self._coloring)
156 | df_styled.set_properties(**{'border': '1px black solid',
157 | 'text-align': 'center',
158 | 'white-space': 'pre-wrap'})
159 | table_style = {"selector": 'th', "props": [('text-align', 'center')]}
160 | df_styled.set_table_styles([table_style])
161 | df_styled.hide(axis="index")
162 |
163 | return df_styled
164 |
165 | def convert_activities_to_excel(self, schedules: List[Schedule], folder_location: Path):
166 | shutil.rmtree(folder_location, ignore_errors=True)
167 | folder_location.mkdir(parents=True)
168 | self._init_activities_color_indexes(schedules[0].activities)
169 | for schedule in schedules:
170 | data_frame = self._create_schedule_table(schedule)
171 | file_location = folder_location / f"{schedule.file_name}.{OutputFormat.EXCEL.value}"
172 | # pylint: disable=abstract-class-instantiated
173 | writer = pd.ExcelWriter(file_location)
174 | data_frame.to_excel(writer, index=False, encoding=utils.ENCODING, sheet_name=schedule.name,
175 | engine='xlsxwriter')
176 | for column in data_frame.columns:
177 | column_length = max(data_frame.data[column].astype(str).map(len).max(), len(column))
178 | column_length = min(column_length, 25)
179 | col_idx = data_frame.columns.get_loc(column)
180 | writer.sheets[schedule.name].set_column(col_idx, col_idx, column_length)
181 |
182 | writer.close()
183 |
184 | def convert_activities_to_png(self, schedules: List[Schedule], folder_path: Path):
185 | shutil.rmtree(folder_path, ignore_errors=True)
186 | folder_path.mkdir(parents=True)
187 | self._init_activities_color_indexes(schedules[0].activities)
188 | use_multiprocessing = sys.version_info <= (3, 12) or os.environ.get("multiprocessing", "F").lower() == "true"
189 | self._loger.info(f"Use multiprocessing: {use_multiprocessing}")
190 |
191 | if use_multiprocessing:
192 | with Pool() as pool:
193 | pool.starmap(self.process_schedule, [(schedule, folder_path, self) for schedule in schedules])
194 | else:
195 | with ThreadPoolExecutor() as executor:
196 | path = folder_path
197 | futures = [executor.submit(self.process_schedule, schedule, path, self) for schedule in schedules]
198 | for future in futures:
199 | # Wait for all tasks to complete
200 | future.result()
201 |
202 | @staticmethod
203 | def process_schedule(schedule, folder_location, convertor):
204 | # pylint: disable=protected-access
205 | df = convertor._create_schedule_table(schedule)
206 | full_file_path = folder_location / f"{schedule.file_name}.{OutputFormat.IMAGE.value}"
207 | dfi.export(df, str(full_file_path), table_conversion="chrome")
208 |
209 | def convert_activities_to_csv(self, schedules: List[Schedule], folder_location: Path):
210 | shutil.rmtree(folder_location, ignore_errors=True)
211 | folder_location.mkdir(parents=True)
212 | self._init_activities_color_indexes(schedules[0].activities)
213 | headers = [
214 | _("activity name"),
215 | _("activity type"),
216 | _("day"),
217 | _("start time"),
218 | _("end time"),
219 | _("lecturer name"),
220 | _("course location"),
221 | _("activity id"),
222 | _("course id")
223 | ]
224 | rows = []
225 | for schedule in schedules:
226 | rows.clear()
227 | rows.append(headers.copy())
228 | for activity in schedule.activities:
229 | for meeting in activity.meetings:
230 | activity_type = _(str(activity.type))
231 | activity_day = _(str(meeting.day))
232 | start_time = meeting.get_string_start_time()
233 | end_time = meeting.get_string_end_time()
234 | if activity.type is not Type.PERSONAL:
235 | academic_activity = cast(AcademicActivity, activity)
236 | course_name = academic_activity.name
237 | course_location = academic_activity.location
238 | course_number = academic_activity.course_number
239 | new_row = [
240 | course_name,
241 | activity_type,
242 | activity_day,
243 | start_time,
244 | end_time,
245 | academic_activity.lecturer_name,
246 | course_location,
247 | academic_activity.activity_id,
248 | course_number
249 | ]
250 | rows.append(new_row)
251 | else:
252 | # Five empty cells since it's not relevant to personal activity
253 | new_row = [
254 | activity.name,
255 | activity_type,
256 | activity_day,
257 | start_time,
258 | end_time]
259 | new_row += [None] * (len(headers) - len(new_row))
260 | rows.append(new_row)
261 |
262 | if Language.get_current() is Language.HEBREW:
263 | for row in rows:
264 | row.reverse()
265 |
266 | file_location = folder_location / f"{schedule.file_name}.{OutputFormat.CSV.value}"
267 | with open(file_location, 'w', encoding=utils.ENCODING, newline='') as file:
268 | writer = csv.writer(file, delimiter=',')
269 | writer.writerows(rows)
270 |
271 | def convert_activities(self, schedules: List[Schedule], folder_location: Path, formats: List[OutputFormat]):
272 | """
273 | The function will save each schedule in the folder location in the wanted formats.
274 | :param schedules: the schedules
275 | :param folder_location: the folder location
276 | :param formats: the formats
277 | :return:
278 | """
279 | if not schedules:
280 | return
281 |
282 | if OutputFormat.CSV in formats:
283 | if len(formats) == 1:
284 | csv_location = folder_location
285 | else:
286 | csv_location = folder_location / OutputFormat.CSV.name.lower()
287 | self.convert_activities_to_csv(schedules, csv_location)
288 |
289 | if OutputFormat.EXCEL in formats:
290 | if len(formats) == 1:
291 | excel_location = folder_location
292 | else:
293 | excel_location = folder_location / OutputFormat.EXCEL.name.lower()
294 | self.convert_activities_to_excel(schedules, excel_location)
295 |
296 | if OutputFormat.IMAGE in formats:
297 | if len(formats) == 1:
298 | png_location = folder_location
299 | else:
300 | png_location = folder_location / OutputFormat.IMAGE.name.lower()
301 | self.convert_activities_to_png(schedules, png_location)
302 |
--------------------------------------------------------------------------------
/tests/data/test_data.py:
--------------------------------------------------------------------------------
1 | import os
2 | import shutil
3 | from copy import copy
4 |
5 | import pytest
6 |
7 | from src import utils
8 | from src.data.academic_activity import AcademicActivity
9 | from src.data.activity import Activity
10 | from src.data.case_insensitive_dict import CaseInsensitiveDict, TextCaseInsensitiveDict
11 | from src.data.course_constraint import PrerequisiteCourse, ConstraintCourseData
12 | from src.data.degree import Degree
13 | from src.data.flow import Flow
14 | from src.data.language import Language
15 | from src.data.meeting import Meeting
16 | from src.data.course import Course
17 | from src.data.day import Day
18 | from src.data.output_format import OutputFormat
19 | from src.data.schedule import Schedule
20 | from src.data.semester import Semester
21 | from src.data.settings import Settings
22 | from src.data.type import Type
23 | from src.data.course_choice import CourseChoice
24 | from src.data import translation
25 | from src.data.translation import _
26 |
27 |
28 | class TestData:
29 |
30 | def test_meetings(self):
31 | meeting = Meeting(Day.MONDAY, "09:00", "11:00")
32 | meeting2 = Meeting(Day.MONDAY, "09:00", "10:00")
33 | meeting3 = Meeting(Day.MONDAY, "09:10", "10:00")
34 | meeting4 = Meeting(Day.MONDAY, "09:10", "20:00")
35 | meeting5 = Meeting(Day.MONDAY, "11:00", "20:00")
36 | meetings = [meeting2, meeting3, meeting4]
37 |
38 | assert meeting.get_string_start_time() == "09:00"
39 | assert meeting.get_string_end_time() == "11:00"
40 | assert all(meeting.is_crash_with_meeting(meeting_item) for meeting_item in meetings)
41 | assert not meeting5.is_crash_with_meeting(meeting)
42 |
43 | with pytest.raises(Exception):
44 | Meeting(Day.MONDAY, "11:00", "10:00")
45 |
46 | with pytest.raises(Exception):
47 | Meeting(Day.MONDAY, "11:00", "11:00")
48 |
49 | assert repr(meeting) == "09:00 - 11:00"
50 |
51 | assert [Day.MONDAY.value, "09:00", "11:00"] == [*meeting]
52 |
53 | meeting = Meeting(Day.SUNDAY, "14:30", "16:00")
54 | meeting1 = Meeting(Day.SUNDAY, "16:15", "17:00")
55 | meeting2 = Meeting(Day.SUNDAY, "17:00", "18:30")
56 | meeting3 = Meeting(Day.FRIDAY, "17:00", "18:30")
57 | meetings = [meeting2, meeting1, meeting3, meeting]
58 | assert sorted(meetings) == [meeting, meeting1, meeting2, meeting3]
59 |
60 | def test_course(self):
61 | course = Course("", 0, 0, set(Semester), set(Degree))
62 | course2 = Course("", 0, 0, Semester.ANNUAL, Degree.SOFTWARE_ENGINEERING)
63 | assert course == course2
64 | course3 = Course("", 0, 0, Semester.ANNUAL, Degree.SOFTWARE_ENGINEERING)
65 | course3.add_degrees(Degree.COMPUTER_SCIENCE)
66 | course3.add_degrees({Degree.SOFTWARE_ENGINEERING, Degree.COMPUTER_SCIENCE})
67 | assert len(course3.degrees) == 2
68 |
69 | course.set_attendance_required(Type.LAB, True)
70 | course.set_attendance_required(Type.LECTURE, False)
71 | assert course.is_attendance_required(Type.LAB)
72 | assert not course.is_attendance_required(Type.LECTURE)
73 |
74 | course.name = "name"
75 | assert repr(course) == "name"
76 | assert repr(Semester.SUMMER) == "Summer"
77 | assert repr(Day.MONDAY) == "Monday"
78 |
79 | course.add_semesters({Semester.SUMMER})
80 | course.add_semesters(Semester.ANNUAL)
81 | assert course.semesters == {Semester.SUMMER, Semester.ANNUAL, Semester.SPRING, Semester.FALL}
82 |
83 | course.add_mandatory(Degree.COMPUTER_SCIENCE)
84 | assert course.mandatory_degrees == {Degree.COMPUTER_SCIENCE}
85 |
86 | course = Course("0", 0, 0, set(Semester), set(Degree))
87 | course1 = Course("1", 0, 0, Semester.ANNUAL, Degree.SOFTWARE_ENGINEERING)
88 | assert list(sorted([course, course1])) == [course, course1]
89 |
90 | def test_activity(self):
91 | activity = Activity("", Type.LAB, False)
92 | activity.add_slot(Meeting(Day.MONDAY, "09:00", "11:00"))
93 | with pytest.raises(Exception):
94 | activity.add_slot(Meeting(Day.MONDAY, "09:00", "10:00"))
95 | assert activity.is_free_slot(Meeting(Day.MONDAY, "11:00", "12:00"))
96 | meetings = [Meeting(Day.MONDAY, "16:00", "18:00"), Meeting(Day.MONDAY, "18:00", "19:00")]
97 | activity.add_slots(meetings)
98 | assert not activity.is_crash_with_activities([])
99 |
100 | activity1 = Activity("", Type.LAB, True)
101 | assert not activity.is_crash_with_activity(activity1)
102 |
103 | assert activity1.no_meetings()
104 |
105 | activity.name = "name"
106 | assert repr(activity) == "name"
107 | assert hash(activity) == hash("name")
108 |
109 | def test_academic_activity(self):
110 | activity = AcademicActivity("name", activity_type=Type.LAB, course_number=10, parent_course_number=20)
111 | hash_attributes = (activity.name, activity.course_number, activity.parent_course_number, activity.activity_id)
112 | assert hash(activity) == hash(tuple([*hash_attributes]))
113 | course = Course("name", 10, 20, set(Semester), set(Degree))
114 | assert activity.same_as_course(course)
115 | activities = [activity]
116 |
117 | AcademicActivity.union_courses(activities, [course])
118 | assert repr(activity) == "name"
119 |
120 | activities = [
121 | AcademicActivity("name", Type.LECTURE, lecturer_name="a"),
122 | AcademicActivity("name", Type.SEMINAR, lecturer_name="b"),
123 | AcademicActivity("name", Type.PRACTICE, lecturer_name="d"),
124 | AcademicActivity("name", Type.LAB, lecturer_name="c"),
125 | ]
126 | loaded_data = AcademicActivity.create_courses_choices(activities)
127 | assert len(loaded_data) == 1
128 | assert "name" in loaded_data
129 | assert set(loaded_data["name"].available_teachers_for_lecture) == {"a", "b"}
130 | assert set(loaded_data["name"].available_teachers_for_practice) == {"c", "d"}
131 |
132 | activity1 = AcademicActivity("name1", Type.LAB, True, lecturer_name="a")
133 | activity2 = AcademicActivity("name1", Type.LAB, True, lecturer_name="b")
134 | course_choice = CourseChoice("name1", 10, {"a"}, {"b"}, False, False)
135 | AcademicActivity.union_attendance_required([activity1, activity2], {"name1": course_choice})
136 | assert not activity1.attendance_required
137 | assert not activity2.attendance_required
138 |
139 | def test_type(self):
140 | typ = Type.LAB
141 | assert typ == Type.LAB
142 | assert typ.is_exercise()
143 | assert repr(typ) == "Lab"
144 |
145 | typ = Type.PERSONAL
146 | assert typ == Type.PERSONAL
147 | assert typ.is_personal()
148 |
149 | def test_course_choices(self):
150 | course_choice = CourseChoice("A", 1, set(), set())
151 | assert course_choice.name == "A"
152 | assert hash(course_choice) == hash("A")
153 |
154 | def test_schedule(self):
155 | standby_time_in_minutes = 0
156 |
157 | meeting = Meeting(Day.MONDAY, "09:00", "11:00")
158 | meeting3 = Meeting(Day.MONDAY, "11:10", "12:00")
159 | meeting2 = Meeting(Day.MONDAY, "18:00", "20:00")
160 | standby_time_in_minutes += 6 * 60
161 | meeting4 = Meeting(Day.FRIDAY, "09:10", "20:00")
162 | meeting5 = Meeting(Day.SUNDAY, "11:00", "20:00")
163 | meeting7 = Meeting(Day.THURSDAY, "19:00", "20:20")
164 | meetings = [meeting, meeting2, meeting3, meeting4, meeting5, meeting7]
165 | activity = Activity("name", Type.LAB, False)
166 | activity.add_slots(meetings)
167 |
168 | activity2 = Activity("name2", Type.LAB, False)
169 | meeting6 = Meeting(Day.THURSDAY, "21:00", "22:00")
170 | standby_time_in_minutes += 40
171 | activity2.add_slot(meeting6)
172 | meetings.append(meeting6)
173 |
174 | schedule = Schedule("name", "file_name", "description", [activity, activity2])
175 |
176 | assert schedule.get_all_academic_meetings() == meetings
177 | assert schedule.get_all_meetings_by_day(Day.MONDAY) == {meeting, meeting3, meeting2}
178 | assert schedule.get_learning_days() == {Day.SUNDAY, Day.MONDAY, Day.THURSDAY, Day.FRIDAY}
179 | assert schedule.get_standby_in_minutes() == standby_time_in_minutes
180 | assert repr(schedule) == "name"
181 |
182 | copied_schedule = copy(schedule)
183 | assert copied_schedule == schedule
184 | assert hash(copied_schedule) == hash(schedule)
185 |
186 | def test_sort_meeting(self):
187 | meeting = Meeting(Day.MONDAY, "09:00", "11:00")
188 | meeting2 = Meeting(Day.MONDAY, "18:00", "20:00")
189 | meeting3 = Meeting(Day.MONDAY, "11:10", "12:00")
190 | meeting4 = Meeting(Day.FRIDAY, "09:10", "20:00")
191 | meeting5 = Meeting(Day.SUNDAY, "11:00", "20:00")
192 | meetings = [meeting, meeting2, meeting3, meeting4, meeting5]
193 | meetings.sort()
194 | assert meetings == [meeting5, meeting, meeting3, meeting2, meeting4]
195 |
196 | def test_language(self):
197 | language = Language.ENGLISH
198 | assert repr(language) == "english"
199 | assert language.short_name() == "en"
200 |
201 | Language.set_current(Language.ENGLISH)
202 | assert _("Test") == "Test"
203 |
204 | Language.set_current(Language.HEBREW)
205 | assert _("Test") == "בדיקה"
206 |
207 | assert translation.translate("Test") == translation._("Test")
208 |
209 | assert repr(Language.ENGLISH) == "english"
210 | assert Language.contains("EnglISh")
211 | assert not Language.contains("France")
212 |
213 | assert Language.from_str("engLISH") is Language.ENGLISH
214 | assert Language.from_str("1") is Language.ENGLISH
215 | assert Language.from_str("en") is Language.ENGLISH
216 | with pytest.raises(ValueError):
217 | Language.from_str("France")
218 |
219 | Language.set_current(Language.ENGLISH)
220 | assert Language.get_current() == Language.ENGLISH
221 | Language.set_current(Language.HEBREW)
222 | assert Language.get_current() == Language.HEBREW
223 | assert Language.contains("Hebrew")
224 | assert Language.get_default() == Language.HEBREW
225 | assert not Language.contains("France")
226 | assert "English" in language
227 | with pytest.raises(ValueError):
228 | Language.from_str(20)
229 | with pytest.raises(ValueError):
230 | Language.from_str("fr")
231 | with pytest.raises(TypeError):
232 | Language.from_str(None)
233 | with pytest.raises(TypeError):
234 | Language.set_current("France")
235 |
236 | def test_case_insensitive_dict(self):
237 | case_insensitive_dict = CaseInsensitiveDict()
238 | case_insensitive_dict["A"] = 1
239 | assert case_insensitive_dict["a"] == 1
240 | assert case_insensitive_dict["A"] == 1
241 | assert case_insensitive_dict.get("A") == 1
242 | with pytest.raises(KeyError):
243 | _var = case_insensitive_dict["b"]
244 | assert case_insensitive_dict.get("B") is None
245 | assert case_insensitive_dict.get("B", 2) == 2
246 | assert "A" in case_insensitive_dict
247 | assert "B" not in case_insensitive_dict
248 | assert len(case_insensitive_dict) == 1
249 | del case_insensitive_dict["A"]
250 | assert len(case_insensitive_dict) == 0
251 | case_insensitive_dict["A"] = 1
252 | case_insensitive_dict.pop("A")
253 | assert len(case_insensitive_dict) == 0
254 | case_insensitive_dict["A"] = 1
255 | case_insensitive_dict.popitem()
256 | assert len(case_insensitive_dict) == 0
257 | case_insensitive_dict["A"] = 1
258 | case_insensitive_dict.clear()
259 | assert len(case_insensitive_dict) == 0
260 | case_insensitive_dict["A"] = 1
261 | case_insensitive_dict.setdefault("A", 2)
262 | assert case_insensitive_dict["A"] == 1
263 | case_insensitive_dict.update({"A": 2})
264 | assert case_insensitive_dict["A"] == 2
265 |
266 | case_insensitive_dict = TextCaseInsensitiveDict({"? ASA,= - ?": 1, 2: 3})
267 | assert case_insensitive_dict["AsA"] == 1
268 | assert case_insensitive_dict[2] == 3
269 |
270 | def test_utils(self):
271 | assert utils.get_logging()
272 | assert utils.get_custom_software_name() == "semester_organizer_lev"
273 | assert utils.get_course_data_test().parent_course_number == 318
274 | assert utils.windows_path_to_unix("C:\\path\\to") == "/c/path/to"
275 | test_folder = os.path.join(utils.get_database_path(), "test_folder")
276 | test_file = os.path.join(test_folder, "test_file.txt")
277 | shutil.rmtree(test_folder, ignore_errors=True)
278 | os.mkdir(test_folder)
279 | assert utils.get_last_modified_by_days(test_folder) == 0
280 | with open(test_file, "w") as file:
281 | file.write("test")
282 | assert utils.count_files_and_directory(test_folder) == (1, 0)
283 | shutil.rmtree(test_folder, ignore_errors=True)
284 | assert utils.get_last_modified_by_days(test_folder) == 0
285 | assert utils.get_results_path()
286 | assert utils.convert_year(2021, Language.HEBREW) == 5781
287 | assert utils.convert_year(5781, Language.ENGLISH) == 2021
288 |
289 | def test_degree(self):
290 | degrees = set()
291 | degrees.add(Degree.COMPUTER_SCIENCE)
292 | degrees.add(Degree.SOFTWARE_ENGINEERING)
293 | assert repr(Degree.COMPUTER_SCIENCE) == "Computer Science"
294 | assert len(Degree.get_defaults()) == 2
295 | assert set(Degree) == {Degree.COMPUTER_SCIENCE, Degree.SOFTWARE_ENGINEERING, Degree.BIOINFORMATICS}
296 | assert ["COMPUTER_SCIENCE", 20] == [*Degree.COMPUTER_SCIENCE]
297 | assert Degree.COMPUTER_SCIENCE == Degree["COMPUTER_SCIENCE"]
298 | assert Degree.SOFTWARE_ENGINEERING.value.track_names
299 |
300 | def test_flow_enum(self):
301 | flow = Flow.CONSOLE
302 | assert flow is Flow.CONSOLE
303 | assert str(flow) == "console"
304 | assert flow.from_str("COnsole") is Flow.CONSOLE
305 | assert flow.from_str("1") is Flow.CONSOLE
306 | assert flow.from_str(1) is Flow.CONSOLE
307 | with pytest.raises(ValueError):
308 | flow.from_str("18")
309 | with pytest.raises(ValueError):
310 | flow.from_str("NonExist")
311 | assert str(Flow.CONSOLE) == "console"
312 | assert repr(Flow.CONSOLE) == "console"
313 |
314 | def test_settings(self):
315 | # pylint: disable=no-member
316 | excepted_settings = Settings()
317 | excepted_settings.degrees = Degree.get_defaults()
318 | json_settings = Settings().to_json()
319 | settings = Settings.from_json(json_settings)
320 | assert settings == Settings()
321 | assert settings.degrees == Degree.get_defaults()
322 | settings.degree = Degree.COMPUTER_SCIENCE
323 | assert settings.degree == Degree.COMPUTER_SCIENCE
324 |
325 | def test_prerequisite_course(self):
326 | object_data = PrerequisiteCourse(id=1, course_number=20, name="Name", can_be_taken_in_parallel=True)
327 | json_data = object_data.to_json(include_can_be_taken_in_parallel=True)
328 | assert "can_be_taken_in_parallel" in json_data
329 |
330 | other_object_data = PrerequisiteCourse(id=1, course_number=11, name="aa", can_be_taken_in_parallel=False)
331 | set_data = {other_object_data, object_data}
332 | assert len(set_data) == 1, "ERROR: Hash doesn't work on the id."
333 |
334 | def test_constraint_course_data(self):
335 | object_data = ConstraintCourseData(id=20, course_number=20, name="Name")
336 | other_object_data = ConstraintCourseData(id=20, course_number=33, name="aa")
337 | assert hash(object_data)
338 | assert object_data == other_object_data, "ERROR: Compare doesn't compare the id alone."
339 |
340 | def test_year(self):
341 | assert utils.get_current_hebrew_year() > 5700
342 | assert "תשפ" in utils.get_current_hebrew_name()
343 |
344 | def test_output_format(self):
345 | assert repr(OutputFormat.IMAGE) == "image"
346 | assert OutputFormat.IMAGE == OutputFormat["IMAGE"]
347 | assert OutputFormat.IMAGE.value == "png"
348 |
--------------------------------------------------------------------------------
/tests/algorithms/test_csp.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from src.algorithms.csp import CSP, Status
4 | from src.data.academic_activity import AcademicActivity
5 | from src.data.activity import Activity
6 | from src.data.course_choice import CourseChoice
7 | from src.data.day import Day
8 | from src.data.degree import Degree
9 | from src.data.meeting import Meeting
10 | from src.data.schedule import Schedule
11 | from src.data.settings import Settings
12 | from src.data.type import Type
13 |
14 |
15 | class TestCsp:
16 |
17 | def test_is_consist_itself_option(self):
18 | activities = []
19 | csp = CSP()
20 | academic_activity = AcademicActivity("a", Type.LECTURE, True, "a", 1, 1, "a", "1")
21 | academic_activity.add_slot(Meeting(Day.SUNDAY, "10:00", "11:00"))
22 | activities.append(academic_activity)
23 |
24 | academic_activity = AcademicActivity("a", Type.LAB, True, "a", 1, 1, "a", "2")
25 | academic_activity.add_slot(Meeting(Day.SUNDAY, "10:20", "11:00"))
26 | activities.append(academic_activity)
27 |
28 | activities_ids_groups = {
29 | "1": {1},
30 | "2": {2}
31 | }
32 |
33 | for schedules in [csp.extract_schedules(activities),
34 | csp.extract_schedules_minimal_consists(activities, activities_ids_groups)]:
35 | assert len(schedules) == 0
36 |
37 | def test_zero_from_one_option(self):
38 | activities = []
39 | csp = CSP()
40 | academic_activity = AcademicActivity("a", Type.LECTURE, True, "a", 1, 1, "a", "1")
41 | academic_activity.add_slot(Meeting(Day.SUNDAY, "10:00", "11:00"))
42 | with pytest.raises(RuntimeError):
43 | academic_activity.add_slot(Meeting(Day.SUNDAY, "10:20", "10:40"))
44 | activities.append(academic_activity)
45 |
46 | academic_activity = AcademicActivity("b", Type.LECTURE, True, "b", 1, 1, "b", "2")
47 | academic_activity.add_slot(Meeting(Day.SUNDAY, "10:30", "14:30"))
48 | activities.append(academic_activity)
49 |
50 | activities_ids_groups = {
51 | "1": {1},
52 | "2": {1},
53 | }
54 |
55 | for schedules in [csp.extract_schedules(activities),
56 | csp.extract_schedules_minimal_consists(activities, activities_ids_groups)]:
57 | assert len(schedules) == 0
58 |
59 | def test_one_from_one_option(self):
60 | activities = []
61 | csp = CSP()
62 | academic_activity = AcademicActivity("a", Type.LECTURE, True, "a", 1, 1, "a", "1")
63 | academic_activity.add_slot(Meeting(Day.SUNDAY, "10:00", "11:00"))
64 | activities.append(academic_activity)
65 |
66 | academic_activity = AcademicActivity("b", Type.LAB, True, "b", 1, 1, "b", "2")
67 | academic_activity.add_slot(Meeting(Day.SUNDAY, "12:00", "14:30"))
68 | activities.append(academic_activity)
69 |
70 | activity = Activity("c", Type.PERSONAL, True)
71 | activity.add_slot(Meeting(Day.MONDAY, "12:00", "14:30"))
72 | activities.append(activity)
73 |
74 | schedule = Schedule("Option 0", "option_0", "", activities)
75 | activities_ids_groups = {
76 | "1": {1},
77 | "2": {1},
78 | }
79 |
80 | for schedules in [csp.extract_schedules(activities),
81 | csp.extract_schedules_minimal_consists(activities, activities_ids_groups)]:
82 | assert len(schedules) == 1
83 | assert any(schedule.contains(activities) for schedule in schedules)
84 | assert schedule in schedules
85 |
86 | def test_two_from_two_options(self):
87 | activities_option_1 = []
88 | activities_option_2 = []
89 |
90 | activities = []
91 | academic_activity = AcademicActivity("a", Type.LECTURE, True, "a", 1, 1, "a")
92 | academic_activity.add_slot(Meeting(Day.SUNDAY, Meeting.str_to_time("10:00"), Meeting.str_to_time("11:00")))
93 | activities.append(academic_activity)
94 | activities_option_1.append(academic_activity)
95 |
96 | academic_activity = AcademicActivity("a", Type.LECTURE, True, "a", 2, 2, "a")
97 | academic_activity.add_slot(Meeting(Day.SUNDAY, Meeting.str_to_time("12:00"), Meeting.str_to_time("14:30")))
98 | activities.append(academic_activity)
99 | activities_option_2.append(academic_activity)
100 |
101 | activity = Activity("c", Type.PERSONAL, True)
102 | activity.add_slot(Meeting(Day.MONDAY, Meeting.str_to_time("12:00"), Meeting.str_to_time("14:30")))
103 | activities.append(activity)
104 | activities_option_1.append(activity)
105 | activities_option_2.append(activity)
106 |
107 | schedules = CSP().extract_schedules(activities)
108 | assert len(schedules) == 2
109 | assert any(schedule.contains(activities_option_1) for schedule in schedules)
110 | assert any(schedule.contains(activities_option_2) for schedule in schedules)
111 |
112 | def test_one_from_two_options(self):
113 | activities_option_1 = []
114 |
115 | activities = []
116 | academic_activity = AcademicActivity("a", Type.LECTURE, True, "a", 1, 1, "a")
117 | academic_activity.add_slot(Meeting(Day.SUNDAY, Meeting.str_to_time("10:00"), Meeting.str_to_time("11:00")))
118 | activities.append(academic_activity)
119 | activities_option_1.append(academic_activity)
120 |
121 | academic_activity = AcademicActivity("a", Type.PRACTICE, True, "a", 2, 2, "a")
122 | academic_activity.add_slot(Meeting(Day.SUNDAY, Meeting.str_to_time("12:00"), Meeting.str_to_time("14:30")))
123 | activities.append(academic_activity)
124 | activities_option_1.append(academic_activity)
125 |
126 | academic_activity = AcademicActivity("a", Type.PRACTICE, True, "a", 2, 2, "a")
127 | academic_activity.add_slot(Meeting(Day.MONDAY, Meeting.str_to_time("13:00"), Meeting.str_to_time("13:30")))
128 | activities.append(academic_activity)
129 |
130 | activity = Activity("c", Type.PERSONAL, True)
131 | activity.add_slot(Meeting(Day.MONDAY, Meeting.str_to_time("12:00"), Meeting.str_to_time("14:30")))
132 | activities.append(activity)
133 | activities_option_1.append(activity)
134 |
135 | schedules = CSP().extract_schedules(activities)
136 | assert len(schedules) == 1
137 | assert any(schedule.contains(activities_option_1) for schedule in schedules)
138 |
139 | def test_no_option(self):
140 | activities = []
141 | csp = CSP()
142 | academic_activity = AcademicActivity("a", Type.LECTURE, True, "a", 1, 1, "a")
143 | academic_activity.add_slot(Meeting(Day.SUNDAY, "10:00", "11:00"))
144 | activities.append(academic_activity)
145 |
146 | academic_activity = AcademicActivity("a", Type.LAB, True, "a", 2, 2, "a")
147 | academic_activity.add_slot(Meeting(Day.MONDAY, "12:00", "13:30"))
148 | activities.append(academic_activity)
149 |
150 | activity = Activity("c", Type.PERSONAL, True)
151 | activity.add_slot(Meeting(Day.MONDAY, "12:00", "14:30"))
152 | activities.append(activity)
153 |
154 | schedules = csp.extract_schedules(activities)
155 | assert len(schedules) == 0
156 | assert csp.get_status() is Status.FAILED
157 |
158 | schedules = csp.extract_schedules_minimal_consists(activities)
159 | assert len(schedules) == 0
160 | assert csp.get_status() is Status.FAILED
161 | assert csp.get_last_activities_crashed() == ("a", "c")
162 |
163 | def test_one_option_favorite_one_teacher(self):
164 | activities_option = []
165 |
166 | activities = []
167 | academic_activity = AcademicActivity("a", Type.LECTURE, True, "a", 1, 1, "a")
168 | academic_activity.add_slot(Meeting(Day.SUNDAY, "10:00", "11:00"))
169 | activities.append(academic_activity)
170 |
171 | academic_activity = AcademicActivity("a", Type.LECTURE, True, "Mike", 2, 2, "a")
172 | academic_activity.add_slot(Meeting(Day.SUNDAY, "15:00", "17:30"))
173 | activities.append(academic_activity)
174 | activities_option.append(academic_activity)
175 |
176 | activity = Activity("c", Type.PERSONAL, True)
177 | activity.add_slot(Meeting(Day.MONDAY, "12:00", "14:30"))
178 | activities.append(activity)
179 | activities_option.append(activity)
180 |
181 | course_choice = CourseChoice("a", 1, {"Mike"}, set())
182 |
183 | schedules = CSP().extract_schedules(activities, {"a": course_choice})
184 | assert len(schedules) == 1
185 | assert schedules[0].contains(activities_option)
186 |
187 | def test_one_option_no_options_for_favorite_teacher(self):
188 | activities_option = []
189 |
190 | activities = []
191 | academic_activity = AcademicActivity("a", Type.LECTURE, True, "a", 1, 1, "a")
192 | academic_activity.add_slot(Meeting(Day.SUNDAY, "10:00", "11:00"))
193 | activities.append(academic_activity)
194 | activities_option.append(academic_activity)
195 |
196 | academic_activity = AcademicActivity("a", Type.LECTURE, True, "Mike", 2, 2, "a")
197 | academic_activity.add_slot(Meeting(Day.MONDAY, "12:00", "14:30"))
198 | activities.append(academic_activity)
199 |
200 | activity = Activity("c", Type.PERSONAL, True)
201 | activity.add_slot(Meeting(Day.MONDAY, "12:00", "14:30"))
202 | activities.append(activity)
203 | activities_option.append(activity)
204 |
205 | course_choice = CourseChoice("a", 1, {"Mike"}, set())
206 |
207 | schedules = CSP().extract_schedules(activities, {"a": course_choice})
208 | assert len(schedules) == 1
209 | assert schedules[0].contains(activities_option)
210 |
211 | def test_one_option_only_parts_options_for_favorite_teacher(self):
212 | activities_option = []
213 | csp = CSP()
214 |
215 | activities = []
216 | academic_activity = AcademicActivity("a", Type.LECTURE, True, "a", 1, 1, "a")
217 | academic_activity.add_slot(Meeting(Day.SUNDAY, "10:00", "11:00"))
218 | activities.append(academic_activity)
219 | activities_option.append(academic_activity)
220 |
221 | academic_activity = AcademicActivity("a", Type.PRACTICE, True, "Mike", 2, 2, "a")
222 | academic_activity.add_slot(Meeting(Day.SUNDAY, "12:00", "14:30"))
223 | activities.append(academic_activity)
224 | activities_option.append(academic_activity)
225 |
226 | academic_activity = AcademicActivity("a", Type.PRACTICE, True, "a", 2, 2, "a")
227 | academic_activity.add_slot(Meeting(Day.MONDAY, "18:00", "20:30"))
228 | activities.append(academic_activity)
229 |
230 | activity = Activity("c", Type.PERSONAL, True)
231 | activity.add_slot(Meeting(Day.MONDAY, "12:00", "14:30"))
232 | activities.append(activity)
233 | activities_option.append(activity)
234 |
235 | course_choice = CourseChoice("a", 1, set(), {"Mike"})
236 |
237 | schedules = csp.extract_schedules(activities, {"a": course_choice})
238 | assert len(schedules) == 1
239 | assert schedules[0].contains(activities_option)
240 | assert csp.get_status() is Status.SUCCESS
241 |
242 | def test_one_option_capacity_consist(self):
243 | activities_option = []
244 |
245 | activities = []
246 | academic_activity = AcademicActivity("a", Type.LECTURE, True, "a", 1, 1, "a")
247 | academic_activity.add_slot(Meeting(Day.SUNDAY, "10:00", "11:00"))
248 | activities.append(academic_activity)
249 | activities_option.append(academic_activity)
250 |
251 | academic_activity = AcademicActivity("a", Type.PRACTICE, True, "a", 2, 2, "a")
252 | academic_activity.set_capacity(13, 30)
253 | academic_activity.add_slot(Meeting(Day.SUNDAY, "12:00", "14:30"))
254 | activities.append(academic_activity)
255 | activities_option.append(academic_activity)
256 |
257 | academic_activity = AcademicActivity("a", Type.PRACTICE, True, "a", 2, 2, "a")
258 | academic_activity.set_capacity(10, 10)
259 | academic_activity.add_slot(Meeting(Day.MONDAY, "18:00", "20:30"))
260 | activities.append(academic_activity)
261 |
262 | activity = Activity("c", Type.PERSONAL, True)
263 | activity.add_slot(Meeting(Day.MONDAY, "12:00", "14:30"))
264 | activities.append(activity)
265 | activities_option.append(activity)
266 |
267 | settings = Settings()
268 | settings.show_only_courses_with_free_places = True
269 |
270 | schedules = CSP().extract_schedules(activities, settings=settings)
271 | assert len(schedules) == 1
272 | assert schedules[0].contains(activities_option)
273 |
274 | def test_two_options_by_actual_course_number(self):
275 | activities_option_1 = []
276 | activities_option_2 = []
277 | activities = []
278 |
279 | academic_activity = AcademicActivity("a", Type.LECTURE, True, "a", 1, 1, "a")
280 | academic_activity.actual_course_number = 1
281 | academic_activity.add_slot(Meeting(Day.SUNDAY, "10:00", "11:00"))
282 | activities.append(academic_activity)
283 | activities_option_1.append(academic_activity)
284 |
285 | academic_activity = AcademicActivity("a", Type.LECTURE, True, "a", 1, 1, "a")
286 | academic_activity.actual_course_number = 2
287 | academic_activity.add_slot(Meeting(Day.FRIDAY, "10:00", "11:00"))
288 | activities.append(academic_activity)
289 | activities_option_2.append(academic_activity)
290 |
291 | academic_activity = AcademicActivity("a", Type.PRACTICE, True, "a", 2, 2, "a")
292 | academic_activity.actual_course_number = 1
293 | academic_activity.add_slot(Meeting(Day.MONDAY, "12:00", "14:30"))
294 | activities.append(academic_activity)
295 | activities_option_1.append(academic_activity)
296 |
297 | academic_activity = AcademicActivity("a", Type.PRACTICE, True, "a", 2, 2, "a")
298 | academic_activity.actual_course_number = 2
299 | academic_activity.add_slot(Meeting(Day.MONDAY, "18:00", "20:30"))
300 | activities.append(academic_activity)
301 | activities_option_2.append(academic_activity)
302 |
303 | activity = Activity("c", Type.PERSONAL, True)
304 | activity.add_slot(Meeting(Day.THURSDAY, "12:00", "14:30"))
305 | activities.append(activity)
306 | activities_option_1.append(activity)
307 | activities_option_2.append(academic_activity)
308 |
309 | settings = Settings()
310 | settings.show_only_courses_with_the_same_actual_number = True
311 |
312 | schedules = CSP().extract_schedules(activities, settings=settings)
313 | assert len(schedules) == 2
314 | assert any(schedule.contains(activities_option_1) for schedule in schedules)
315 | assert any(schedule.contains(activities_option_2) for schedule in schedules)
316 |
317 | def test_only_activities_ids_can_enroll(self):
318 | activities_ids_can_enroll = {
319 | "1": {1, 2},
320 | "2": {1, 2},
321 | "3": {1},
322 | }
323 | activities_option_1 = []
324 | activities_option_2 = []
325 | activities = []
326 | csp = CSP()
327 |
328 | academic_activity = AcademicActivity("a", Type.LECTURE, True, "a", 1, 1, activity_id="1")
329 | academic_activity.add_slot(Meeting(Day.SUNDAY, "10:00", "11:00"))
330 | activities.append(academic_activity)
331 | activities_option_1.append(academic_activity)
332 |
333 | academic_activity = AcademicActivity("a", Type.LECTURE, True, "a", 1, 1, "a", activity_id="5")
334 | academic_activity.add_slot(Meeting(Day.MONDAY, "10:00", "11:00"))
335 | activities.append(academic_activity)
336 |
337 | academic_activity = AcademicActivity("a", Type.PRACTICE, True, "a", 2, 2, "a", activity_id="2")
338 | academic_activity.add_slot(Meeting(Day.THURSDAY, "12:00", "14:30"))
339 | activities.append(academic_activity)
340 | activities_option_2.append(academic_activity)
341 |
342 | academic_activity = AcademicActivity("a", Type.PRACTICE, True, "a", 2, 2, "a", activity_id="3")
343 | academic_activity.add_slot(Meeting(Day.TUESDAY, "18:00", "20:30"))
344 | activities.append(academic_activity)
345 | activities_option_1.append(academic_activity)
346 |
347 | activity = Activity("c", Type.PERSONAL, True)
348 | activity.add_slot(Meeting(Day.WEDNESDAY, "12:00", "14:30"))
349 | activities.append(activity)
350 | activities_option_1.append(activity)
351 | activities_option_2.append(activity)
352 |
353 | settings = Settings()
354 | settings.show_only_classes_in_days = [Day.SUNDAY, Day.MONDAY, Day.THURSDAY, Day.TUESDAY, Day.WEDNESDAY]
355 | settings.show_only_classes_can_enroll = True
356 |
357 | schedules = csp.extract_schedules(activities, None, settings, activities_ids_can_enroll)
358 | assert len(schedules) == 2
359 | assert any(schedule.contains(activities_option_1) for schedule in schedules)
360 | assert any(schedule.contains(activities_option_2) for schedule in schedules)
361 |
362 | courses_degrees = {
363 | 1: {Degree.SOFTWARE_ENGINEERING},
364 | }
365 | schedules = csp.extract_schedules(activities, None, settings, activities_ids_can_enroll, courses_degrees)
366 | assert len(schedules) == 4
367 |
--------------------------------------------------------------------------------