├── .env.example
├── .gitignore
├── Dockerfile
├── README.md
├── app
    ├── __init__.py
    ├── config.py
    ├── epg
    │   ├── EpgGenerator.py
    │   └── __init__.py
    ├── epg_platform
    │   ├── Astro.py
    │   ├── CN_epg_pw.py
    │   ├── HOY.py
    │   ├── Hami.py
    │   ├── MyTvSuper.py
    │   ├── NowTV.py
    │   ├── RTHK.py
    │   ├── Starhub.py
    │   ├── __init__.py
    │   ├── base.py
    │   └── platform_template.py
    ├── file_manager.py
    ├── http_client.py
    ├── logger.py
    ├── main.py
    └── utils.py
├── build_and_push.sh
├── docker-compose.example.yml
└── requirements.txt
/.env.example:
--------------------------------------------------------------------------------
 1 | # CharmingEPG Configuration Template
 2 | # Copy this file to .env and modify the values as needed
 3 | 
 4 | # ============================================================================
 5 | # Application Settings
 6 | # ============================================================================
 7 | 
 8 | # Logging configuration
 9 | LOG_LEVEL=INFO
10 | LOG_FILE=runtime.log
11 | LOG_ROTATION=10 MB
12 | LOG_RETENTION=7 days
13 | 
14 | # ============================================================================
15 | # EPG Settings
16 | # ============================================================================
17 | #EPG接口缓存时间(秒)
18 | EPG_CACHE_TTL=3600
19 | 
20 | # Update interval in minutes (default: 10)
21 | EPG_UPDATE_INTERVAL=10
22 | 
23 | # EPG files base directory (default: epg_files)
24 | EPG_BASE_DIR=epg_files
25 | 
26 | # Timezone for EPG data (default: Asia/Shanghai)
27 | EPG_TIMEZONE=Asia/Shanghai
28 | 
29 | # ============================================================================
30 | # Platform Enable/Disable Settings
31 | # ============================================================================
32 | # Set to true/false, 1/0, yes/no, or on/off to enable/disable platforms
33 | 
34 | # China (epg.pw) - Mainland China channels
35 | EPG_ENABLE_CN=true
36 | 
37 | # MyTV Super (TVB) - Hong Kong TVB channels
38 | EPG_ENABLE_TVB=true
39 | 
40 | # NowTV - Hong Kong NowTV channels
41 | EPG_ENABLE_NOWTV=true
42 | 
43 | # Hami Video - Taiwan channels
44 | EPG_ENABLE_HAMI=true
45 | 
46 | # Astro Go - Malaysia channels
47 | EPG_ENABLE_ASTRO=true
48 | 
49 | # RTHK - Hong Kong public broadcaster
50 | EPG_ENABLE_RTHK=true
51 | 
52 | # HOY TV - Hong Kong HOY channels
53 | EPG_ENABLE_HOY=true
54 | 
55 | # StarHub - Singapore channels
56 | EPG_ENABLE_STARHUB=true
57 | 
58 | # ============================================================================
59 | # HTTP Client Settings
60 | # ============================================================================
61 | 
62 | # HTTP request timeout in seconds (default: 30)
63 | HTTP_TIMEOUT=30
64 | 
65 | # Maximum number of retries for failed requests (default: 3)
66 | HTTP_MAX_RETRIES=3
67 | 
68 | # Retry backoff multiplier (default: 2.0)
69 | HTTP_RETRY_BACKOFF=2.0
70 | 
71 | # ============================================================================
72 | # Proxy Settings (Optional)
73 | # ============================================================================
74 | # Uncomment and configure if you need to use a proxy
75 | 
76 | # HTTP proxy URL
77 | # PROXY_HTTP=http://proxy.example.com:8080
78 | 
79 | # HTTPS proxy URL
80 | # PROXY_HTTPS=http://proxy.example.com:8080
81 | 
82 | # ============================================================================
83 | # Development Settings
84 | # ============================================================================
85 | 
86 | # Set to true to enable FastAPI auto-reload (development only)
87 | # FASTAPI_RELOAD=false
88 | 
89 | # Set to true to enable FastAPI OpenAPI docs (development only)
90 | # FASTAPI_DOCS=false
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
  1 | ### Python template
  2 | # Byte-compiled / optimized / DLL files
  3 | __pycache__/
  4 | *.py[cod]
  5 | *$py.class
  6 | 
  7 | # C extensions
  8 | *.so
  9 | 
 10 | # Distribution / packaging
 11 | .Python
 12 | build/
 13 | develop-eggs/
 14 | dist/
 15 | downloads/
 16 | eggs/
 17 | .eggs/
 18 | lib/
 19 | lib64/
 20 | parts/
 21 | sdist/
 22 | var/
 23 | wheels/
 24 | share/python-wheels/
 25 | *.egg-info/
 26 | .installed.cfg
 27 | *.egg
 28 | MANIFEST
 29 | 
 30 | # PyInstaller
 31 | #  Usually these files are written by a python script from a template
 32 | #  before PyInstaller builds the exe, so as to inject date/other infos into it.
 33 | *.manifest
 34 | *.spec
 35 | 
 36 | # Installer logs
 37 | pip-log.txt
 38 | pip-delete-this-directory.txt
 39 | 
 40 | # Unit test / coverage reports
 41 | htmlcov/
 42 | .tox/
 43 | .nox/
 44 | .coverage
 45 | .coverage.*
 46 | .cache
 47 | nosetests.xml
 48 | coverage.xml
 49 | *.cover
 50 | *.py,cover
 51 | .hypothesis/
 52 | .pytest_cache/
 53 | cover/
 54 | 
 55 | # Translations
 56 | *.mo
 57 | *.pot
 58 | 
 59 | # Django stuff:
 60 | *.log
 61 | local_settings.py
 62 | db.sqlite3
 63 | db.sqlite3-journal
 64 | 
 65 | # Flask stuff:
 66 | instance/
 67 | .webassets-cache
 68 | 
 69 | # Scrapy stuff:
 70 | .scrapy
 71 | 
 72 | # Sphinx documentation
 73 | docs/_build/
 74 | 
 75 | # PyBuilder
 76 | .pybuilder/
 77 | target/
 78 | 
 79 | # Jupyter Notebook
 80 | .ipynb_checkpoints
 81 | 
 82 | # IPython
 83 | profile_default/
 84 | ipython_config.py
 85 | 
 86 | # pyenv
 87 | #   For a library or package, you might want to ignore these files since the code is
 88 | #   intended to run in multiple environments; otherwise, check them in:
 89 | # .python-version
 90 | 
 91 | # pipenv
 92 | #   According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
 93 | #   However, in case of collaboration, if having platform-specific dependencies or dependencies
 94 | #   having no cross-platform support, pipenv may install dependencies that don't work, or not
 95 | #   install all needed dependencies.
 96 | #Pipfile.lock
 97 | 
 98 | # poetry
 99 | #   Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
100 | #   This is especially recommended for binary packages to ensure reproducibility, and is more
101 | #   commonly ignored for libraries.
102 | #   https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
103 | #poetry.lock
104 | 
105 | # pdm
106 | #   Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
107 | #pdm.lock
108 | #   pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
109 | #   in version control.
110 | #   https://pdm.fming.dev/latest/usage/project/#working-with-version-control
111 | .pdm.toml
112 | .pdm-python
113 | .pdm-build/
114 | 
115 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
116 | __pypackages__/
117 | 
118 | # Celery stuff
119 | celerybeat-schedule
120 | celerybeat.pid
121 | 
122 | # SageMath parsed files
123 | *.sage.py
124 | 
125 | # Environments
126 | .env
127 | .venv
128 | env/
129 | venv/
130 | ENV/
131 | env.bak/
132 | venv.bak/
133 | 
134 | # Spyder project settings
135 | .spyderproject
136 | .spyproject
137 | 
138 | # Rope project settings
139 | .ropeproject
140 | 
141 | # mkdocs documentation
142 | /site
143 | 
144 | # mypy
145 | .mypy_cache/
146 | .dmypy.json
147 | dmypy.json
148 | 
149 | # Pyre type checker
150 | .pyre/
151 | 
152 | # pytype static type analyzer
153 | .pytype/
154 | 
155 | # Cython debug symbols
156 | cython_debug/
157 | 
158 | # PyCharm
159 | #  JetBrains specific template is maintained in a separate JetBrains.gitignore that can
160 | #  be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
161 | #  and can be added to the global gitignore or merged into this file.  For a more nuclear
162 | #  option (not recommended) you can uncomment the following to ignore the entire idea folder.
163 | #.idea/
164 | 
165 | .DS_Store
166 | 
167 | #docker
168 | docker-compose.yml
169 | compose.yml
170 | 
171 | device_client_id_blob
172 | device_private_key
173 | 
174 | ### JetBrains template
175 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
176 | # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
177 | 
178 | # User-specific stuff
179 | .idea/**/workspace.xml
180 | .idea/**/tasks.xml
181 | .idea/**/usage.statistics.xml
182 | .idea/**/dictionaries
183 | .idea/**/shelf
184 | 
185 | # AWS User-specific
186 | .idea/**/aws.xml
187 | 
188 | # Generated files
189 | .idea/**/contentModel.xml
190 | 
191 | # Sensitive or high-churn files
192 | .idea/**/dataSources/
193 | .idea/**/dataSources.ids
194 | .idea/**/dataSources.local.xml
195 | .idea/**/sqlDataSources.xml
196 | .idea/**/dynamic.xml
197 | .idea/**/uiDesigner.xml
198 | .idea/**/dbnavigator.xml
199 | 
200 | # Gradle
201 | .idea/**/gradle.xml
202 | .idea/**/libraries
203 | 
204 | # Gradle and Maven with auto-import
205 | # When using Gradle or Maven with auto-import, you should exclude module files,
206 | # since they will be recreated, and may cause churn.  Uncomment if using
207 | # auto-import.
208 | # .idea/artifacts
209 | # .idea/compiler.xml
210 | # .idea/jarRepositories.xml
211 | # .idea/modules.xml
212 | # .idea/*.iml
213 | # .idea/modules
214 | # *.iml
215 | # *.ipr
216 | 
217 | .idea/
218 | 
219 | # CMake
220 | cmake-build-*/
221 | 
222 | # Mongo Explorer plugin
223 | .idea/**/mongoSettings.xml
224 | 
225 | # File-based project format
226 | *.iws
227 | 
228 | # IntelliJ
229 | out/
230 | 
231 | # mpeltonen/sbt-idea plugin
232 | .idea_modules/
233 | 
234 | # JIRA plugin
235 | atlassian-ide-plugin.xml
236 | 
237 | # Cursive Clojure plugin
238 | .idea/replstate.xml
239 | 
240 | # SonarLint plugin
241 | .idea/sonarlint/
242 | 
243 | # Crashlytics plugin (for Android Studio and IntelliJ)
244 | com_crashlytics_export_strings.xml
245 | crashlytics.properties
246 | crashlytics-build.properties
247 | fabric.properties
248 | 
249 | # Editor-based Rest Client
250 | .idea/httpRequests
251 | 
252 | # Android studio 3.1+ serialized cache file
253 | .idea/caches/build_file_checksums.ser
254 | 
255 | postgres_data/
256 | 
257 | 
258 | epg_files/
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
 1 | #从官方Python基础镜像开始。
 2 | FROM python:3.9-alpine
 3 | 
 4 | #将当前工作目录设置为/code。
 5 | #这是我们放置requirements.txt文件和app目录的位置。
 6 | WORKDIR /code
 7 | 
 8 | #将符合要求的文件复制到/code目录中。
 9 | #首先仅复制requirements.txt文件,而不复制其余代码。
10 | #由于此文件不经常更改,Docker 将检测到它并在这一步中使用缓存,从而为下一步启用缓存。
11 | COPY ./requirements.txt /code/requirements.txt
12 | 
13 | #安装需求文件中的包依赖项。
14 | #--no-cache-dir 选项告诉 pip 不要在本地保存下载的包,因为只有当 pip 再次运行以安装相同的包时才会这样,但在与容器一起工作时情况并非如此。
15 | #
16 | #笔记
17 | #--no-cache-dir 仅与 pip 相关,与 Docker 或容器无关。
18 | #--upgrade 选项告诉 pip 升级软件包(如果已经安装)。
19 | #
20 | #因为上一步复制文件可以被 Docker 缓存 检测到,所以此步骤也将 使用 Docker 缓存(如果可用)。
21 | #
22 | #在开发过程中一次又一次构建镜像时,在此步骤中使用缓存将为你节省大量时间,而不是每次都下载和安装所有依赖项。
23 | RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt
24 | 
25 | #将“./app”目录复制到“/code”目录中。
26 | #
27 | #由于其中包含更改最频繁的所有代码,因此 Docker 缓存不会轻易用于此操作或任何后续步骤。
28 | #
29 | #因此,将其放在Dockerfile接近最后的位置非常重要,以优化容器镜像的构建时间。
30 | COPY ./app /code/app
31 | 
32 | #设置命令来运行 uvicorn 服务器。
33 | #
34 | #CMD 接受一个字符串列表,每个字符串都是你在命令行中输入的内容,并用空格分隔。
35 | #
36 | #该命令将从 当前工作目录 运行,即你上面使用WORKDIR /code设置的同一/code目录。
37 | #
38 | #因为程序将从/code启动,并且其中包含你的代码的目录./app,所以Uvicorn将能够从app.main中查看并importapp。
39 | CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "80"]
40 | 
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
  1 | # CharmingEPG
  2 | 
  3 | ## Support
  4 | 
  5 | - MyTV Super
  6 | - NowTV
  7 | - RTHK
  8 | - HOY
  9 | - Hami
 10 | - Astro Go(中文台都是中文描述)
 11 | - StarHub(中文台都是中文描述)
 12 | - CN (via epg.pw)
 13 | 
 14 | ## Feature
 15 | 
 16 | - 获取多个平台的7天EPG,每天更新一次。
 17 | - 每天生成的epg以xml存在本地。
 18 | - 如需持久化epg文件,请挂载/code/epg_files目录。
 19 | 
 20 | ## How to use
 21 | 
 22 | ### 环境变量
 23 | 
 24 | ```dotenv
 25 | #配置需要启用的平台
 26 | EPG_ENABLE_CN=true
 27 | EPG_ENABLE_TVB=true
 28 | EPG_ENABLE_NOWTV=false
 29 | EPG_ENABLE_HAMI=true
 30 | EPG_ENABLE_ASTRO=false
 31 | EPG_ENABLE_RTHK=false
 32 | EPG_ENABLE_HOY=false
 33 | EPG_ENABLE_STARHUB=false
 34 | #支持`1`/`0` `yes`/`no` `true`/`false` `on`/`off`
 35 | #这些配置已经在`docker-compose.example.yml`中列好,自行配置即可。
 36 | 
 37 | ###以下为可选项###
 38 | #日志
 39 | LOG_LEVEL=INFO
 40 | LOG_ROTATION=10 MB
 41 | LOG_RETENTION=7 days
 42 | 
 43 | #EPG
 44 | EPG_CACHE_TTL=3600 #EPG返回header的缓存ttl,方便配合CF做缓存
 45 | EPG_UPDATE_INTERVAL=10 #每10分钟检查一次是否要更新(如果当天已更新会忽略)
 46 | 
 47 | #HTTP
 48 | HTTP_TIMEOUT=30 #默认30秒超时
 49 | HTTP_MAX_RETRIES=3 #默认3次重试
 50 | 
 51 | #Proxy
 52 | PROXY_HTTP=http://proxy.example.com:8080
 53 | PROXY_HTTPS=http://proxy.example.com:8080
 54 | ```
 55 | 
 56 | 
 57 | 
 58 | ### Docker Compose
 59 | docker-compose.yml示例
 60 | ```yaml
 61 | version: '3.3'
 62 | services:
 63 |   charming_epg:
 64 |     image: charmingcheung000/charming-epg:latest
 65 |     container_name: charming_epg
 66 |     environment:
 67 |       - EPG_ENABLE_CN=true
 68 |       - EPG_ENABLE_TVB=true
 69 |       - EPG_ENABLE_NOWTV=true
 70 |       - EPG_ENABLE_HAMI=true
 71 |       - EPG_ENABLE_ASTRO=true
 72 |       - EPG_ENABLE_RTHK=true
 73 |       - EPG_ENABLE_HOY=true
 74 |       - EPG_ENABLE_STARHUB=true
 75 |       - TZ=Asia/Shanghai
 76 |       - EPG_CACHE_TTL=3600
 77 |     volumes:
 78 |       - /root/docker/epg_data/epg_files:/code/epg_files
 79 |     ports:
 80 |       - "30008:80"
 81 |     restart: always
 82 | ```
 83 | 
 84 | 
 85 | ### Docker Cli
 86 | 
 87 | ```bash
 88 | # 自行配置平台开关
 89 | docker run -d \
 90 |   -p 30008:80 \
 91 |   --name charming_epg \
 92 |   -e EPG_ENABLE_CN=true \
 93 |   -e EPG_ENABLE_TVB=true \
 94 |   -e EPG_ENABLE_NOWTV=false \
 95 |   -e EPG_ENABLE_HAMI=true \
 96 |   -e EPG_ENABLE_ASTRO=false \
 97 |   -e EPG_ENABLE_RTHK=false \
 98 |   -e EPG_ENABLE_HOY=false \
 99 |   -e EPG_ENABLE_STARHUB=false \
100 |   charmingcheung000/charming-epg:latest
101 | ```
102 | 
103 | ### Request
104 | 
105 | #### 请求所有平台
106 | 
107 | ```
108 | http://[ip]:[port]/all  #xml
109 | http://[ip]:[port]/all.xml.gz #gzip压缩包
110 | ```
111 | 
112 | #### 请求单个或多个平台
113 | 
114 | ```
115 | http://[ip]:[port]/epg?platforms=tvb,nowtv,rthk,hoy,hami,astro,starhub,cn
116 | ```
117 | 
--------------------------------------------------------------------------------
/app/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CharmingCheung/CharmingEPG/17c7d1f290062a7c3a7f39018302532208c81d1a/app/__init__.py
--------------------------------------------------------------------------------
/app/config.py:
--------------------------------------------------------------------------------
 1 | import os
 2 | from typing import Dict, Optional
 3 | from dotenv import load_dotenv
 4 | 
 5 | load_dotenv(verbose=True, override=True)
 6 | 
 7 | 
 8 | class Config:
 9 |     """Centralized configuration management for CharmingEPG"""
10 | 
11 |     # Application settings
12 |     APP_NAME = "CharmingEPG"
13 |     APP_VERSION = "1.0.0"
14 | 
15 |     # Logging settings
16 |     LOG_LEVEL = os.getenv("LOG_LEVEL", "INFO").upper()
17 |     LOG_FILE = os.getenv("LOG_FILE", "runtime.log")
18 |     LOG_ROTATION = os.getenv("LOG_ROTATION", "10 MB")
19 |     LOG_RETENTION = os.getenv("LOG_RETENTION", "7 days")
20 | 
21 |     # EPG settings
22 |     EPG_UPDATE_INTERVAL = int(os.getenv("EPG_UPDATE_INTERVAL", "10"))  # minutes
23 |     EPG_BASE_DIR = os.getenv("EPG_BASE_DIR", "epg_files")
24 |     EPG_TIMEZONE = os.getenv("EPG_TIMEZONE", "Asia/Shanghai")
25 |     EPG_CACHE_TTL = int(os.getenv("EPG_CACHE_TTL", "3600"))  # seconds (default: 1 hour)
26 | 
27 |     # HTTP client settings
28 |     HTTP_TIMEOUT = int(os.getenv("HTTP_TIMEOUT", "30"))
29 |     HTTP_MAX_RETRIES = int(os.getenv("HTTP_MAX_RETRIES", "3"))
30 |     HTTP_RETRY_BACKOFF = float(os.getenv("HTTP_RETRY_BACKOFF", "2.0"))
31 | 
32 |     # Proxy settings
33 |     PROXY_HTTP = os.getenv("PROXY_HTTP")
34 |     PROXY_HTTPS = os.getenv("PROXY_HTTPS")
35 | 
36 |     @classmethod
37 |     def get_proxies(cls) -> Optional[Dict[str, str]]:
38 |         """Get proxy configuration if available"""
39 |         if cls.PROXY_HTTP and cls.PROXY_HTTPS:
40 |             return {
41 |                 "http": cls.PROXY_HTTP,
42 |                 "https": cls.PROXY_HTTPS
43 |             }
44 |         return None
45 | 
46 |     @classmethod
47 |     def platform_enabled(cls, platform: str) -> bool:
48 |         """Check if a platform is enabled via environment variable"""
49 |         env_key = f"EPG_ENABLE_{platform.upper()}"
50 |         val = os.getenv(env_key, "true").strip().lower()
51 |         return val in {"1", "true", "yes", "on"}
52 | 
53 |     @classmethod
54 |     def get_epg_file_path(cls, platform: str, date_str: str) -> str:
55 |         """Get the file path for EPG data"""
56 |         return os.path.join(cls.EPG_BASE_DIR, platform, f"{platform}_{date_str}.xml")
57 | 
58 |     # Platform configuration
59 |     EPG_PLATFORMS = [
60 |         {"platform": "tvb", "name": "MyTV Super", "fetcher": "request_my_tv_super_epg"},
61 |         {"platform": "nowtv", "name": "NowTV", "fetcher": "request_now_tv_epg"},
62 |         {"platform": "hami", "name": "Hami", "fetcher": "request_hami_epg"},
63 |         {"platform": "astro", "name": "Astro Go", "fetcher": "request_astro_epg"},
64 |         {"platform": "rthk", "name": "RTHK", "fetcher": "request_rthk_epg"},
65 |         {"platform": "hoy", "name": "HOY", "fetcher": "request_hoy_epg"},
66 |         {"platform": "starhub", "name": "StarHub", "fetcher": "request_starhub_epg"},
67 |         {"platform": "cn", "name": "CN (epg.pw)", "fetcher": "request_cn_epg"},
68 |     ]
69 | 
70 |     @classmethod
71 |     def get_enabled_platforms(cls):
72 |         """Get list of enabled platforms"""
73 |         return [
74 |             platform for platform in cls.EPG_PLATFORMS
75 |             if cls.platform_enabled(platform["platform"])
76 |         ]
77 | 
78 |     # User-Agent for HTTP requests
79 |     DEFAULT_USER_AGENT = (
80 |         "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) "
81 |         "AppleWebKit/537.36 (KHTML, like Gecko) "
82 |         "Chrome/133.0.0.0 Safari/537.36"
83 |     )
--------------------------------------------------------------------------------
/app/epg/EpgGenerator.py:
--------------------------------------------------------------------------------
 1 | import html
 2 | import re
 3 | import xml.etree.ElementTree as ET
 4 | 
 5 | import pytz
 6 | 
 7 | 
 8 | async def generateEpg(channels, programs):
 9 |     tv = ET.Element("tv",
10 |                     {"generator-info-name": "Charming"})
11 | 
12 |     for channel in channels:
13 |         channelName: str = channel["channelName"]
14 |         # 创建 channel 元素
15 |         channel = ET.SubElement(tv, "channel", id=channelName)
16 |         display_name = ET.SubElement(channel, "display-name", lang="zh")
17 |         display_name.text = channelName
18 | 
19 |     data_list = []
20 |     for programs in programs:
21 |         channelName: str = programs["channelName"]
22 |         start_time = time_stamp_to_timezone_str(programs["start"])
23 |         end_time = time_stamp_to_timezone_str(programs["end"])
24 |         programme = ET.SubElement(tv, "programme", channel=channelName, start=start_time, stop=end_time)
25 |         title = ET.SubElement(programme, "title", lang="zh")
26 |         title.text = programs["programName"]
27 |         if programs["description"]:
28 |             description_str = programs["description"]
29 |             description_str = clean_invalid_xml_chars(description_str)
30 |             description_str = html.escape(description_str)
31 |             data_list.append(description_str)
32 |             description = ET.SubElement(programme, "desc", lang="zh")
33 |             description.text = description_str
34 | 
35 |     xml_str = ET.tostring(tv, encoding='utf-8')
36 |     return xml_str
37 | 
38 | 
39 | def clean_invalid_xml_chars(text):
40 |     # 使用正则表达式去除无效字符
41 |     return re.sub(r'[^\x09\x0A\x0D\x20-\uD7FF\uE000-\uFFFD]', '', text)
42 | 
43 | 
44 | def time_stamp_to_timezone_str(timestamp_s):
45 |     target_tz = pytz.timezone('Asia/Shanghai')
46 |     local_dt = timestamp_s.astimezone(target_tz)
47 |     formatted_time = local_dt.strftime('%Y%m%d%H%M%S %z')
48 |     return formatted_time
49 | 
--------------------------------------------------------------------------------
/app/epg/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CharmingCheung/CharmingEPG/17c7d1f290062a7c3a7f39018302532208c81d1a/app/epg/__init__.py
--------------------------------------------------------------------------------
/app/epg_platform/Astro.py:
--------------------------------------------------------------------------------
  1 | import math
  2 | import pytz
  3 | from datetime import datetime, time, timezone, timedelta
  4 | from typing import List
  5 | from urllib.parse import urlparse
  6 | from zoneinfo import ZoneInfo
  7 | 
  8 | from ..logger import get_logger
  9 | from ..utils import has_chinese
 10 | from .base import BaseEPGPlatform, Channel, Program
 11 | 
 12 | logger = get_logger(__name__)
 13 | 
 14 | 
 15 | class AstroPlatform(BaseEPGPlatform):
 16 |     """Astro Go EPG platform implementation"""
 17 | 
 18 |     def __init__(self):
 19 |         super().__init__("astro")
 20 |         self.base_url = "https://sg-sg-sg.astro.com.my:9443"
 21 |         self.oauth_url = f"{self.base_url}/oauth2/authorize"
 22 |         self.channels_url = f"{self.base_url}/ctap/r1.6.0/shared/channels"
 23 |         self.grid_url = f"{self.base_url}/ctap/r1.6.0/shared/grid"
 24 | 
 25 |         self.referer = "https://astrogo.astro.com.my/"
 26 |         self.client_token = "v:1!r:80800!ur:GUEST_REGION!community:Malaysia%20Live!t:k!dt:PC!f:Astro_unmanaged!pd:CHROME-FF!pt:Adults"
 27 |         self.access_token = None
 28 | 
 29 |     async def fetch_channels(self) -> List[Channel]:
 30 |         """Fetch channel list from Astro Go API"""
 31 |         self.logger.info("📡 正在获取 Astro Go 频道列表")
 32 | 
 33 |         # Get access token first
 34 |         if not await self._get_access_token():
 35 |             raise Exception("❌ 获取 Astro 访问令牌失败")
 36 | 
 37 |         headers = self.get_default_headers({
 38 |             "Referer": self.referer,
 39 |             "Authorization": f"Bearer {self.access_token}",
 40 |             "Accept-Language": "zh"
 41 |         })
 42 | 
 43 |         params = {
 44 |             "clientToken": self.client_token
 45 |         }
 46 | 
 47 |         response = self.http_client.get(
 48 |             self.channels_url,
 49 |             headers=headers,
 50 |             params=params
 51 |         )
 52 | 
 53 |         data = response.json()
 54 |         channels = []
 55 | 
 56 |         for channel_data in data.get("channels", []):
 57 |             # Find logo URL
 58 |             logo = ""
 59 |             for media in channel_data.get("media", []):
 60 |                 if media.get("type") == "regular":
 61 |                     logo = media.get("url", "")
 62 |                     break
 63 | 
 64 |             if not logo and channel_data.get("media"):
 65 |                 logo = channel_data["media"][0].get("url", "")
 66 | 
 67 |             # Clean channel name
 68 |             channel_name = channel_data.get("name", "").replace(" HD", "").strip()
 69 | 
 70 |             channels.append(Channel(
 71 |                 channel_id=str(channel_data.get("id", "")),
 72 |                 name=channel_name,
 73 |                 logo=logo,
 74 |                 raw_data=channel_data
 75 |             ))
 76 | 
 77 |         self.logger.info(f"📺 发现 {len(channels)} 个 Astro Go 频道")
 78 |         return channels
 79 | 
 80 |     async def fetch_programs(self, channels: List[Channel]) -> List[Program]:
 81 |         """Fetch program data for all channels"""
 82 |         self.logger.info(f"📡 正在抓取 {len(channels)} 个频道的节目数据")
 83 | 
 84 |         if not channels:
 85 |             return []
 86 | 
 87 |         # Get channel count and first channel ID for API query
 88 |         channel_count = len(channels)
 89 |         first_id = channels[0].channel_id
 90 | 
 91 |         # Fetch EPG for 7 days and merge
 92 |         merged_channels = {}
 93 | 
 94 |         for day in range(7):
 95 |             try:
 96 |                 self.logger.info(f"🔍 【Astro】正在获取第 {day} 天的 EPG 数据")
 97 |                 date_str, duration = self._get_date_params(day)
 98 | 
 99 |                 raw_epg = await self._query_epg(date_str, duration, channel_count, first_id)
100 | 
101 |                 if raw_epg.get("channels"):
102 |                     for channel_data in raw_epg["channels"]:
103 |                         channel_id = str(channel_data.get("id", ""))
104 |                         schedule = channel_data.get("schedule", [])
105 | 
106 |                         if channel_id not in merged_channels:
107 |                             merged_channels[channel_id] = []
108 | 
109 |                         merged_channels[channel_id].extend(schedule)
110 | 
111 |             except Exception as e:
112 |                 self.logger.error(f"❌ 获取第 {day} 天的 EPG 数据失败: {e}")
113 |                 continue
114 | 
115 |         # Convert to Program objects
116 |         programs = []
117 |         channel_lookup = {ch.channel_id: ch.name for ch in channels}
118 | 
119 |         for channel_id, schedule in merged_channels.items():
120 |             channel_name = channel_lookup.get(channel_id, f"Channel {channel_id}")
121 | 
122 |             for program_data in schedule:
123 |                 try:
124 |                     start_time_str = program_data.get("startDateTime")
125 |                     duration = program_data.get("duration")
126 | 
127 |                     if start_time_str and duration:
128 |                         start_time, end_time = self._parse_program_time(start_time_str, duration)
129 | 
130 |                         title = program_data.get("title", "")
131 |                         description = program_data.get("synopsis", "")
132 |                         episode_number = program_data.get("episodeNumber")
133 | 
134 |                         # Add episode number based on language
135 |                         if episode_number:
136 |                             if has_chinese(title) or has_chinese(description):
137 |                                 title += f" 第{episode_number}集"
138 |                             else:
139 |                                 title += f" Ep{episode_number}"
140 | 
141 |                         programs.append(Program(
142 |                             channel_id=channel_id,
143 |                             title=title,
144 |                             start_time=start_time,
145 |                             end_time=end_time,
146 |                             description=description,
147 |                             raw_data=program_data
148 |                         ))
149 | 
150 |                 except Exception as e:
151 |                     self.logger.warning(f"⚠️ 解析节目数据失败: {e}")
152 |                     continue
153 | 
154 |         self.logger.info(f"📊 总共抓取了 {len(programs)} 个节目")
155 |         return programs
156 | 
157 |     async def _get_access_token(self) -> bool:
158 |         """Get OAuth access token for Astro API"""
159 |         try:
160 |             params = {
161 |                 "client_id": "browser",
162 |                 "state": "guestUserLogin",
163 |                 "response_type": "token",
164 |                 "redirect_uri": "https://astrogo.astro.com.my",
165 |                 "scope": "urn:synamedia:vcs:ovp:guest-user",
166 |                 "prompt": "none",
167 |             }
168 | 
169 |             headers = self.get_default_headers({
170 |                 "Referer": self.referer,
171 |             })
172 | 
173 |             response = self.http_client.get(
174 |                 self.oauth_url,
175 |                 headers=headers,
176 |                 params=params,
177 |                 allow_redirects=False  # We need to handle redirect manually
178 |             )
179 | 
180 |             location = response.headers.get("Location")
181 |             if not location:
182 |                 self.logger.error("❌ OAuth 响应中未找到 Location 头")
183 |                 return False
184 | 
185 |             # Extract access token from fragment
186 |             parsed = urlparse(location)
187 |             fragment = parsed.fragment
188 |             params = {}
189 | 
190 |             for item in fragment.split("&"):
191 |                 if "=" in item:
192 |                     key, value = item.split("=", 1)
193 |                     params[key] = value
194 | 
195 |             access_token = params.get("access_token")
196 |             if access_token:
197 |                 self.access_token = access_token
198 |                 self.logger.debug(f"✨ 获取 Astro 访问令牌成功: {access_token[:20]}...")
199 |                 return True
200 |             else:
201 |                 self.logger.error(f"❌ Location 片段中未找到访问令牌: {location}")
202 |                 return False
203 | 
204 |         except Exception as e:
205 |             self.logger.error(f"❌ 获取 Astro 访问令牌失败: {e}")
206 |             return False
207 | 
208 |     async def _query_epg(self, start_date: str, duration: int, channel_count: int, first_id: str) -> dict:
209 |         """Query EPG data from Astro API"""
210 |         headers = self.get_default_headers({
211 |             "Referer": self.referer,
212 |             "Authorization": f"Bearer {self.access_token}",
213 |             "Accept-Language": "zh"
214 |         })
215 | 
216 |         params = {
217 |             "startDateTime": start_date,
218 |             "channelId": first_id,
219 |             "limit": channel_count,
220 |             "genreId": "",
221 |             "isPlayable": "true",
222 |             "duration": duration,
223 |             "clientToken": self.client_token
224 |         }
225 | 
226 |         response = self.http_client.get(
227 |             self.grid_url,
228 |             headers=headers,
229 |             params=params
230 |         )
231 | 
232 |         if response.status_code == 200:
233 |             return response.json()
234 |         else:
235 |             self.logger.warning(f"⚠️ EPG 查询失败,状态码: {response.status_code}")
236 |             return {}
237 | 
238 |     def _get_date_params(self, date_delta: int) -> tuple:
239 |         """Get date string and duration for EPG query"""
240 |         now = datetime.now(ZoneInfo("Asia/Shanghai")) + timedelta(days=date_delta)
241 | 
242 |         if date_delta == 0:
243 |             # For today, round down to nearest half-hour
244 |             minute = now.minute
245 |             rounded_minute = 0 if minute < 30 else 30
246 |             target_time = now.replace(minute=rounded_minute, second=0, microsecond=0)
247 | 
248 |             # Until midnight tomorrow
249 |             next_day = (target_time + timedelta(days=1)).replace(hour=0, minute=0, second=0, microsecond=0)
250 |             dur_seconds = (next_day - target_time).total_seconds()
251 |             duration = math.ceil(dur_seconds / 3600)  # Round up to hours
252 |         else:
253 |             # For other days, start from midnight
254 |             target_time = datetime.combine(now.date(), time(0, 0), tzinfo=ZoneInfo("Asia/Shanghai"))
255 |             duration = 24
256 | 
257 |         # Convert to UTC ISO string
258 |         target_time_utc = target_time.astimezone(timezone.utc)
259 |         iso_str = target_time_utc.strftime('%Y-%m-%dT%H:%M:%S.000Z')
260 | 
261 |         return iso_str, duration
262 | 
263 |     def _parse_program_time(self, start_time_str: str, duration: int) -> tuple:
264 |         """Parse program start time and calculate end time"""
265 |         # Parse UTC start time
266 |         start_time_utc = datetime.strptime(start_time_str, "%Y-%m-%dT%H:%M:%S.000Z")
267 |         start_time_utc = pytz.utc.localize(start_time_utc)
268 | 
269 |         # Calculate end time
270 |         end_time_utc = start_time_utc + timedelta(seconds=duration)
271 | 
272 |         # Convert to Shanghai timezone
273 |         shanghai_tz = pytz.timezone('Asia/Shanghai')
274 |         start_time_local = start_time_utc.astimezone(shanghai_tz)
275 |         end_time_local = end_time_utc.astimezone(shanghai_tz)
276 | 
277 |         return start_time_local, end_time_local
278 | 
279 | 
280 | # Create platform instance
281 | astro_platform = AstroPlatform()
282 | 
283 | 
284 | # Legacy functions for backward compatibility
285 | async def get_astro_epg():
286 |     """Legacy function - fetch Astro EPG data"""
287 |     try:
288 |         channels = await astro_platform.fetch_channels()
289 |         programs = await astro_platform.fetch_programs(channels)
290 | 
291 |         # Convert to legacy format
292 |         raw_channels = []
293 |         raw_programs = []
294 | 
295 |         for channel in channels:
296 |             raw_channels.append({
297 |                 "channelName": channel.name,
298 |                 "channelId": channel.channel_id,
299 |                 "logo": channel.extra_data.get("logo", "")
300 |             })
301 | 
302 |         for program in programs:
303 |             channel_name = next((ch.name for ch in channels if ch.channel_id == program.channel_id), "")
304 |             raw_programs.append({
305 |                 "channelName": channel_name,
306 |                 "programName": program.title,
307 |                 "description": program.description,
308 |                 "start": program.start_time,
309 |                 "end": program.end_time
310 |             })
311 | 
312 |         return raw_channels, raw_programs
313 | 
314 |     except Exception as e:
315 |         logger.error(f"❌ 旧版 get_astro_epg 函数错误: {e}", exc_info=True)
316 |         return [], []
317 | 
318 | 
319 | def find_channel_name_by_id(channels, channel_id):
320 |     """Legacy utility function"""
321 |     for channel in channels:
322 |         if str(channel.get('channelId')) == str(channel_id):
323 |             return channel.get('channelName')
324 |     return f"Channel {channel_id}"
325 | 
326 | 
327 | def utc_to_local(start_time, duration):
328 |     """Legacy utility function"""
329 |     return astro_platform._parse_program_time(start_time, duration)
330 | 
331 | 
332 | def extract_fragment_params(location_url):
333 |     """Legacy utility function"""
334 |     parsed = urlparse(location_url)
335 |     fragment = parsed.fragment
336 |     params = dict()
337 |     for item in fragment.split("&"):
338 |         if "=" in item:
339 |             key, value = item.split("=", 1)
340 |             params[key] = value
341 |     return params
342 | 
343 | 
344 | def get_access_token():
345 |     """Legacy utility function - synchronous version (limited functionality)"""
346 |     logger.warning("⚠️ 调用了旧版 get_access_token - 请使用异步版本以获得更好的可靠性")
347 |     try:
348 |         import asyncio
349 |         loop = asyncio.get_event_loop()
350 |         if loop.is_running():
351 |             logger.warning("⚠️ 在异步上下文中无法运行同步令牌获取")
352 |             return None
353 |         else:
354 |             success = loop.run_until_complete(astro_platform._get_access_token())
355 |             return astro_platform.access_token if success else None
356 |     except Exception as e:
357 |         logger.error(f"❌ 旧版 get_access_token 错误: {e}")
358 |         return None
--------------------------------------------------------------------------------
/app/epg_platform/CN_epg_pw.py:
--------------------------------------------------------------------------------
  1 | import xml.etree.ElementTree as ET
  2 | from typing import List
  3 | 
  4 | from ..logger import get_logger
  5 | from .base import BaseEPGPlatform, Channel, Program
  6 | 
  7 | logger = get_logger(__name__)
  8 | 
  9 | 
 10 | class CNEpgPlatform(BaseEPGPlatform):
 11 |     """CN (epg.pw) EPG platform implementation"""
 12 | 
 13 |     def __init__(self):
 14 |         super().__init__("cn")
 15 |         self.epg_url = "https://epg.pw/xmltv/epg_CN.xml"
 16 | 
 17 |     async def fetch_channels(self) -> List[Channel]:
 18 |         """Fetch channel list from CN EPG XML"""
 19 |         self.logger.info("📡 正在从 EPG.PW获取CN频道列表")
 20 | 
 21 |         response = self.http_client.get(self.epg_url)
 22 | 
 23 |         # Parse XML to extract channels
 24 |         root = ET.fromstring(response.text)
 25 |         channels = []
 26 | 
 27 |         for channel_elem in root.findall('channel'):
 28 |             channel_id = channel_elem.get('id', '')
 29 |             display_name_elem = channel_elem.find('display-name')
 30 |             channel_name = display_name_elem.text if display_name_elem is not None else channel_id
 31 | 
 32 |             if channel_id:
 33 |                 channels.append(Channel(
 34 |                     channel_id=channel_id,
 35 |                     name=channel_name,
 36 |                     raw_data={'xml_element': channel_elem}
 37 |                 ))
 38 | 
 39 |         self.logger.info(f"📺 从 CN EPG 发现 {len(channels)} 个频道")
 40 |         return channels
 41 | 
 42 |     async def fetch_programs(self, channels: List[Channel]) -> List[Program]:
 43 |         """Fetch program data from CN EPG XML"""
 44 |         self.logger.info(f"📡 正在抓取 {len(channels)} 个频道的节目数据")
 45 | 
 46 |         response = self.http_client.get(self.epg_url)
 47 |         root = ET.fromstring(response.text)
 48 |         programs = []
 49 | 
 50 |         # Create a set of valid channel IDs for quick lookup
 51 |         valid_channel_ids = {ch.channel_id for ch in channels}
 52 | 
 53 |         for programme_elem in root.findall('programme'):
 54 |             try:
 55 |                 channel_id = programme_elem.get('channel', '')
 56 | 
 57 |                 # Only include programs for channels we have
 58 |                 if channel_id in valid_channel_ids:
 59 |                     start_time_str = programme_elem.get('start', '')
 60 |                     stop_time_str = programme_elem.get('stop', '')
 61 | 
 62 |                     title_elem = programme_elem.find('title')
 63 |                     title = title_elem.text if title_elem is not None else ''
 64 | 
 65 |                     desc_elem = programme_elem.find('desc')
 66 |                     description = desc_elem.text if desc_elem is not None else ''
 67 | 
 68 |                     # Parse time strings (format: YYYYMMDDHHMMSS +0800)
 69 |                     start_time = self._parse_epg_time(start_time_str)
 70 |                     stop_time = self._parse_epg_time(stop_time_str)
 71 | 
 72 |                     if start_time and stop_time:
 73 |                         programs.append(Program(
 74 |                             channel_id=channel_id,
 75 |                             title=title,
 76 |                             start_time=start_time,
 77 |                             end_time=stop_time,
 78 |                             description=description,
 79 |                             raw_data={'xml_element': programme_elem}
 80 |                         ))
 81 | 
 82 |             except Exception as e:
 83 |                 self.logger.warning(f"⚠️ 解析节目数据失败: {e}")
 84 |                 continue
 85 | 
 86 |         self.logger.info(f"📊 总共抓取了 {len(programs)} 个节目")
 87 |         return programs
 88 | 
 89 |     def _parse_epg_time(self, time_str: str):
 90 |         """Parse EPG time format (YYYYMMDDHHMMSS +0800)"""
 91 |         try:
 92 |             from datetime import datetime
 93 |             import pytz
 94 | 
 95 |             # Remove timezone info and parse base datetime
 96 |             time_part = time_str.split()[0] if ' ' in time_str else time_str
 97 |             dt = datetime.strptime(time_part, '%Y%m%d%H%M%S')
 98 | 
 99 |             # Add Shanghai timezone
100 |             shanghai_tz = pytz.timezone('Asia/Shanghai')
101 |             return shanghai_tz.localize(dt)
102 | 
103 |         except Exception as e:
104 |             self.logger.warning(f"⚠️ 解析时间字符串 '{time_str}' 失败: {e}")
105 |             return None
106 | 
107 |     async def get_raw_xml(self) -> str:
108 |         """Get the raw XML content from epg.pw"""
109 |         self.logger.info("📡 正在从 CN EPG 获取原始 XML")
110 | 
111 |         response = self.http_client.get(self.epg_url)
112 | 
113 |         # Parse and re-serialize to ensure clean XML
114 |         root = ET.fromstring(response.text)
115 |         return ET.tostring(root, encoding='utf-8').decode('utf-8')
116 | 
117 | 
118 | # Create platform instance
119 | cn_epg_platform = CNEpgPlatform()
120 | 
121 | 
122 | # Legacy function for backward compatibility
123 | async def get_cn_channels_epg():
124 |     """Legacy function - fetch CN EPG as raw XML"""
125 |     try:
126 |         return await cn_epg_platform.get_raw_xml()
127 |     except Exception as e:
128 |         logger.error(f"❌ 旧版 get_cn_channels_epg 函数错误: {e}", exc_info=True)
129 |         return ""
--------------------------------------------------------------------------------
/app/epg_platform/HOY.py:
--------------------------------------------------------------------------------
  1 | import xml.etree.ElementTree as ET
  2 | import pytz
  3 | from datetime import datetime
  4 | from typing import List
  5 | 
  6 | from ..logger import get_logger
  7 | from .base import BaseEPGPlatform, Channel, Program
  8 | 
  9 | logger = get_logger(__name__)
 10 | 
 11 | 
 12 | class HOYPlatform(BaseEPGPlatform):
 13 |     """HOY TV EPG platform implementation"""
 14 | 
 15 |     def __init__(self):
 16 |         super().__init__("hoy")
 17 |         self.channel_list_url = "https://api2.hoy.tv/api/v3/a/channel"
 18 | 
 19 |     async def fetch_channels(self) -> List[Channel]:
 20 |         """Fetch channel list from HOY TV API"""
 21 |         self.logger.info("📡 正在从 HOY TV 获取频道列表")
 22 | 
 23 |         response = self.http_client.get(self.channel_list_url)
 24 |         data = response.json()
 25 | 
 26 |         channels = []
 27 | 
 28 |         if data.get('code') == 200:
 29 |             for raw_channel in data.get('data', []):
 30 |                 name_info = raw_channel.get('name', {})
 31 |                 channel_name = name_info.get('zh_hk', name_info.get('en', 'Unknown'))
 32 | 
 33 |                 epg_url = raw_channel.get('epg')
 34 |                 logo = raw_channel.get('image')
 35 | 
 36 |                 if channel_name and epg_url:
 37 |                     channels.append(Channel(
 38 |                         channel_id=str(raw_channel.get('id', '')),
 39 |                         name=channel_name,
 40 |                         epg_url=epg_url,
 41 |                         logo=logo,
 42 |                         raw_data=raw_channel
 43 |                     ))
 44 | 
 45 |         self.logger.info(f"📺 从 HOY TV 发现 {len(channels)} 个频道")
 46 |         return channels
 47 | 
 48 |     async def fetch_programs(self, channels: List[Channel]) -> List[Program]:
 49 |         """Fetch program data for all HOY TV channels"""
 50 |         self.logger.info(f"📡 正在抓取 {len(channels)} 个 HOY TV 频道的节目数据")
 51 | 
 52 |         all_programs = []
 53 | 
 54 |         for channel in channels:
 55 |             try:
 56 |                 programs = await self._fetch_channel_programs(channel)
 57 |                 all_programs.extend(programs)
 58 |             except Exception as e:
 59 |                 self.logger.error(f"❌ 获取频道 {channel.name} 节目数据失败: {e}")
 60 |                 continue
 61 | 
 62 |         self.logger.info(f"📊 总共抓取了 {len(all_programs)} 个节目")
 63 |         return all_programs
 64 | 
 65 |     async def _fetch_channel_programs(self, channel: Channel) -> List[Program]:
 66 |         """Fetch program data for a specific HOY TV channel"""
 67 |         self.logger.info(f"🔍【HOY】 正在获取频道节目: {channel.name}")
 68 | 
 69 |         epg_url = channel.extra_data.get('epg_url')
 70 |         if not epg_url:
 71 |             self.logger.warning(f"⚠️ 频道 {channel.name} 未找到 EPG URL")
 72 |             return []
 73 | 
 74 |         response = self.http_client.get(epg_url)
 75 | 
 76 |         programs = self._parse_epg_xml(response.text, channel)
 77 | 
 78 |         self.logger.debug(f"📺 在 {channel.name} 中发现 {len(programs)} 个节目")
 79 |         return programs
 80 | 
 81 |     def _parse_epg_xml(self, xml_content: str, channel: Channel) -> List[Program]:
 82 |         """Parse EPG XML content for HOY TV"""
 83 |         try:
 84 |             root = ET.fromstring(xml_content)
 85 |         except ET.ParseError as e:
 86 |             self.logger.error(f"❌ 解析 {channel.name} 的 XML 失败: {e}")
 87 |             return []
 88 | 
 89 |         programs = []
 90 |         shanghai_tz = pytz.timezone('Asia/Shanghai')
 91 |         today = datetime.now(shanghai_tz).replace(hour=0, minute=0, second=0, microsecond=0)
 92 | 
 93 |         for channel_elem in root.findall('./Channel'):
 94 |             for epg_item in channel_elem.findall('./EpgItem'):
 95 |                 try:
 96 |                     # Parse time strings
 97 |                     start_time_elem = epg_item.find('./EpgStartDateTime')
 98 |                     end_time_elem = epg_item.find('./EpgEndDateTime')
 99 | 
100 |                     if start_time_elem is None or end_time_elem is None:
101 |                         continue
102 | 
103 |                     start_time_str = start_time_elem.text
104 |                     end_time_str = end_time_elem.text
105 | 
106 |                     # Parse and localize times
107 |                     start_time = shanghai_tz.localize(datetime.strptime(start_time_str, "%Y-%m-%d %H:%M:%S"))
108 |                     end_time = shanghai_tz.localize(datetime.strptime(end_time_str, "%Y-%m-%d %H:%M:%S"))
109 | 
110 |                     # Only include programs from today onwards
111 |                     if start_time.date() >= today.date():
112 |                         # Get episode information
113 |                         episode_info = epg_item.find('./EpisodeInfo')
114 |                         if episode_info is not None:
115 |                             short_desc_elem = episode_info.find('./EpisodeShortDescription')
116 |                             episode_index_elem = episode_info.find('./EpisodeIndex')
117 | 
118 |                             short_desc = short_desc_elem.text if short_desc_elem is not None else ""
119 |                             episode_index = episode_index_elem.text if episode_index_elem is not None else "0"
120 | 
121 |                             # Build program name
122 |                             program_name = short_desc
123 |                             if episode_index and int(episode_index) > 0:
124 |                                 program_name += f" 第{episode_index}集"
125 | 
126 |                             programs.append(Program(
127 |                                 channel_id=channel.channel_id,
128 |                                 title=program_name,
129 |                                 start_time=start_time,
130 |                                 end_time=end_time,
131 |                                 description="",
132 |                                 raw_data={
133 |                                     'short_desc': short_desc,
134 |                                     'episode_index': episode_index,
135 |                                     'start_time_str': start_time_str,
136 |                                     'end_time_str': end_time_str
137 |                                 }
138 |                             ))
139 | 
140 |                 except Exception as e:
141 |                     self.logger.warning(f"⚠️ 解析 EPG 项目失败: {e}")
142 |                     continue
143 | 
144 |         return programs
145 | 
146 | 
147 | # Create platform instance
148 | hoy_platform = HOYPlatform()
149 | 
150 | 
151 | # Legacy functions for backward compatibility
152 | def parse_epg_xml(xml_content, channel_name):
153 |     """Legacy function - parse EPG XML"""
154 |     try:
155 |         # Create a temporary channel object
156 |         channel = Channel(channel_id="temp", name=channel_name)
157 |         programs = hoy_platform._parse_epg_xml(xml_content, channel)
158 | 
159 |         # Convert to legacy format
160 |         results = []
161 |         for program in programs:
162 |             results.append({
163 |                 "channelName": channel_name,
164 |                 "programName": program.title,
165 |                 "description": program.description,
166 |                 "start": program.start_time,
167 |                 "end": program.end_time
168 |             })
169 | 
170 |         return results
171 |     except Exception as e:
172 |         logger.error(f"❌ 旧版 parse_epg_xml 错误: {e}")
173 |         return []
174 | 
175 | 
176 | async def get_hoy_lists():
177 |     """Legacy function - get HOY channel list"""
178 |     try:
179 |         channels = await hoy_platform.fetch_channels()
180 | 
181 |         # Convert to legacy format
182 |         channel_list = []
183 |         for channel in channels:
184 |             channel_list.append({
185 |                 "channelName": channel.name,
186 |                 "rawEpg": channel.extra_data.get('epg_url', ''),
187 |                 "logo": channel.extra_data.get('logo', '')
188 |             })
189 | 
190 |         return channel_list
191 |     except Exception as e:
192 |         logger.error(f"❌ 旧版 get_hoy_lists 错误: {e}")
193 |         return []
194 | 
195 | 
196 | async def get_hoy_epg():
197 |     """Legacy function - fetch HOY EPG data"""
198 |     try:
199 |         channels = await hoy_platform.fetch_channels()
200 |         programs = await hoy_platform.fetch_programs(channels)
201 | 
202 |         # Convert to legacy format
203 |         raw_channels = []
204 |         raw_programs = []
205 | 
206 |         for channel in channels:
207 |             raw_channels.append({
208 |                 "channelName": channel.name,
209 |                 "rawEpg": channel.extra_data.get('epg_url', ''),
210 |                 "logo": channel.extra_data.get('logo', '')
211 |             })
212 | 
213 |         for program in programs:
214 |             channel_name = next((ch.name for ch in channels if ch.channel_id == program.channel_id), "")
215 |             raw_programs.append({
216 |                 "channelName": channel_name,
217 |                 "programName": program.title,
218 |                 "description": program.description,
219 |                 "start": program.start_time,
220 |                 "end": program.end_time
221 |             })
222 | 
223 |         return raw_channels, raw_programs
224 | 
225 |     except Exception as e:
226 |         logger.error(f"❌ 旧版 get_hoy_epg 函数错误: {e}", exc_info=True)
227 |         return [], []
--------------------------------------------------------------------------------
/app/epg_platform/Hami.py:
--------------------------------------------------------------------------------
  1 | import pytz
  2 | from datetime import datetime, timedelta
  3 | from typing import List
  4 | 
  5 | from ..logger import get_logger
  6 | from .base import BaseEPGPlatform, Channel, Program
  7 | 
  8 | logger = get_logger(__name__)
  9 | 
 10 | 
 11 | class HamiPlatform(BaseEPGPlatform):
 12 |     """Hami Video EPG platform implementation"""
 13 | 
 14 |     def __init__(self):
 15 |         super().__init__("hami")
 16 |         self.base_url = "https://apl-hamivideo.cdn.hinet.net/HamiVideo"
 17 |         self.user_agent = "HamiVideo/7.12.806(Android 11;GM1910) OKHTTP/3.12.2"
 18 | 
 19 |     def get_platform_headers(self):
 20 |         """Get Hami-specific headers"""
 21 |         return {
 22 |             'X-ClientSupport-UserProfile': '1',
 23 |             'User-Agent': self.user_agent
 24 |         }
 25 | 
 26 |     async def fetch_channels(self) -> List[Channel]:
 27 |         """Fetch channel list from Hami Video API"""
 28 |         self.logger.info("📡 正在获取 Hami Video 频道列表")
 29 | 
 30 |         params = {
 31 |             "appVersion": "7.12.806",
 32 |             "deviceType": "1",
 33 |             "appOS": "android",
 34 |             "menuId": "162"
 35 |         }
 36 | 
 37 |         response = self.http_client.get(
 38 |             f"{self.base_url}/getUILayoutById.php",
 39 |             headers=self.get_platform_headers(),
 40 |             params=params
 41 |         )
 42 | 
 43 |         data = response.json()
 44 |         channels = []
 45 | 
 46 |         # Find the channel list category
 47 |         elements = []
 48 |         for info in data.get("UIInfo", []):
 49 |             if info.get("title") == "頻道一覽":
 50 |                 elements = info.get('elements', [])
 51 |                 break
 52 | 
 53 |         for element in elements:
 54 |             if element.get('title') and element.get('contentPk'):
 55 |                 channels.append(Channel(
 56 |                     channel_id=element['contentPk'],
 57 |                     name=element['title'],
 58 |                     content_pk=element['contentPk'],
 59 |                     raw_data=element
 60 |                 ))
 61 | 
 62 |         self.logger.info(f"📺 发现 {len(channels)} 个 Hami Video 频道")
 63 |         return channels
 64 | 
 65 |     async def fetch_programs(self, channels: List[Channel]) -> List[Program]:
 66 |         """Fetch program data for all channels"""
 67 |         self.logger.info(f"📡 正在抓取 {len(channels)} 个频道的节目数据")
 68 | 
 69 |         all_programs = []
 70 |         for channel in channels:
 71 |             try:
 72 |                 programs = await self._fetch_channel_programs(
 73 |                     channel.name,
 74 |                     channel.extra_data.get('content_pk')
 75 |                 )
 76 |                 all_programs.extend(programs)
 77 |             except Exception as e:
 78 |                 self.logger.error(f"❌ 获取频道 {channel.name} 节目数据失败: {e}")
 79 |                 continue
 80 | 
 81 |         self.logger.info(f"📊 总共抓取了 {len(all_programs)} 个节目")
 82 |         return all_programs
 83 | 
 84 |     async def _fetch_channel_programs(self, channel_name: str, content_pk: str) -> List[Program]:
 85 |         """Fetch program data for a specific channel"""
 86 |         self.logger.info(f"🔍【Hami】 正在获取频道节目: {channel_name}")
 87 | 
 88 |         programs = []
 89 | 
 90 |         # Fetch EPG data for 7 days
 91 |         for i in range(7):
 92 |             try:
 93 |                 date = datetime.now() + timedelta(days=i)
 94 |                 formatted_date = date.strftime('%Y-%m-%d')
 95 | 
 96 |                 params = {
 97 |                     "deviceType": "1",
 98 |                     "Date": formatted_date,
 99 |                     "contentPk": content_pk,
100 |                 }
101 | 
102 |                 response = self.http_client.get(
103 |                     f"{self.base_url}/getEpgByContentIdAndDate.php",
104 |                     headers=self.get_platform_headers(),
105 |                     params=params
106 |                 )
107 | 
108 |                 data = response.json()
109 |                 ui_info = data.get('UIInfo', [])
110 | 
111 |                 if ui_info and len(ui_info) > 0:
112 |                     elements = ui_info[0].get('elements', [])
113 | 
114 |                     for element in elements:
115 |                         program_info_list = element.get('programInfo', [])
116 |                         if program_info_list:
117 |                             program_info = program_info_list[0]
118 |                             hint_se = program_info.get('hintSE')
119 | 
120 |                             if hint_se:
121 |                                 try:
122 |                                     start_time, end_time = self._parse_hami_time(hint_se)
123 | 
124 |                                     programs.append(Program(
125 |                                         channel_id=content_pk,
126 |                                         title=program_info.get('programName', ''),
127 |                                         start_time=start_time,
128 |                                         end_time=end_time,
129 |                                         description="",
130 |                                         raw_data=program_info
131 |                                     ))
132 |                                 except Exception as e:
133 |                                     self.logger.warning(f"⚠️ 解析节目时间失败: {e}")
134 |                                     continue
135 | 
136 |             except Exception as e:
137 |                 self.logger.warning(f"⚠️ 获取 {channel_name} 第 {i} 天的 EPG 数据失败: {e}")
138 |                 continue
139 | 
140 |         self.logger.debug(f"📺 在 {channel_name} 中发现 {len(programs)} 个节目")
141 |         return programs
142 | 
143 |     def _parse_hami_time(self, time_range: str):
144 |         """Parse Hami time range string to datetime objects"""
145 |         start_time_str, end_time_str = time_range.split('~')
146 | 
147 |         start_time = datetime.strptime(start_time_str, "%Y-%m-%d %H:%M:%S")
148 |         end_time = datetime.strptime(end_time_str, "%Y-%m-%d %H:%M:%S")
149 | 
150 |         # Add Shanghai timezone
151 |         shanghai_tz = pytz.timezone('Asia/Shanghai')
152 |         start_time_shanghai = shanghai_tz.localize(start_time)
153 |         end_time_shanghai = shanghai_tz.localize(end_time)
154 | 
155 |         return start_time_shanghai, end_time_shanghai
156 | 
157 | 
158 | # Create platform instance
159 | hami_platform = HamiPlatform()
160 | 
161 | 
162 | # Legacy functions for backward compatibility
163 | async def request_channel_list():
164 |     """Legacy function - fetch channel list"""
165 |     try:
166 |         channels = await hami_platform.fetch_channels()
167 |         return [{"channelName": ch.name, "contentPk": ch.channel_id} for ch in channels]
168 |     except Exception as e:
169 |         logger.error(f"❌ 旧版 request_channel_list 函数错误: {e}", exc_info=True)
170 |         return []
171 | 
172 | 
173 | async def get_programs_with_retry(channel):
174 |     """Legacy function with retry logic - now handled by http_client"""
175 |     try:
176 |         programs = await request_epg(channel['channelName'], channel['contentPk'])
177 |         return programs
178 |     except Exception as e:
179 |         logger.error(f"❌ 请求 {channel['channelName']} EPG 数据错误: {e}")
180 |         return []
181 | 
182 | 
183 | async def request_all_epg():
184 |     """Legacy function - fetch all EPG data"""
185 |     try:
186 |         channels = await hami_platform.fetch_channels()
187 |         programs = await hami_platform.fetch_programs(channels)
188 | 
189 |         # Convert to legacy format
190 |         raw_channels = [{"channelName": ch.name} for ch in channels]
191 |         raw_programs = []
192 | 
193 |         for program in programs:
194 |             raw_programs.append({
195 |                 "channelName": next((ch.name for ch in channels if ch.channel_id == program.channel_id), ""),
196 |                 "programName": program.title,
197 |                 "description": program.description,
198 |                 "start": program.start_time,
199 |                 "end": program.end_time
200 |             })
201 | 
202 |         return raw_channels, raw_programs
203 | 
204 |     except Exception as e:
205 |         logger.error(f"❌ 旧版 request_all_epg 函数错误: {e}", exc_info=True)
206 |         return [], []
207 | 
208 | 
209 | async def request_epg(channel_name: str, content_pk: str):
210 |     """Legacy function - fetch EPG for specific channel"""
211 |     try:
212 |         programs = await hami_platform._fetch_channel_programs(channel_name, content_pk)
213 | 
214 |         # Convert to legacy format
215 |         result = []
216 |         for program in programs:
217 |             result.append({
218 |                 "channelName": channel_name,
219 |                 "programName": program.title,
220 |                 "description": program.description,
221 |                 "start": program.start_time,
222 |                 "end": program.end_time
223 |             })
224 | 
225 |         return result
226 |     except Exception as e:
227 |         logger.error(f"❌ 旧版 request_epg 函数错误: {e}", exc_info=True)
228 |         return []
229 | 
230 | 
231 | def hami_time_to_datetime(time_range: str):
232 |     """Legacy utility function"""
233 |     return hami_platform._parse_hami_time(time_range)
--------------------------------------------------------------------------------
/app/epg_platform/MyTvSuper.py:
--------------------------------------------------------------------------------
  1 | import pytz
  2 | from datetime import datetime, timedelta
  3 | from typing import List, Tuple
  4 | 
  5 | from ..config import Config
  6 | from ..logger import get_logger
  7 | from ..http_client import get_http_client
  8 | from ..utils import remove_brackets
  9 | from .base import BaseEPGPlatform, Channel, Program
 10 | 
 11 | logger = get_logger(__name__)
 12 | 
 13 | 
 14 | class MyTvSuperPlatform(BaseEPGPlatform):
 15 |     """MyTV Super (TVB) EPG platform implementation"""
 16 | 
 17 |     def __init__(self):
 18 |         super().__init__("tvb")
 19 |         self.base_url = "https://content-api.mytvsuper.com"
 20 | 
 21 |     async def fetch_channels(self) -> List[Channel]:
 22 |         """Fetch channel list from MyTV Super API"""
 23 |         self.logger.info("📺 正在从 MyTV Super 获取频道列表")
 24 | 
 25 |         headers = self.get_default_headers({
 26 |             "Origin": "https://www.mytvsuper.com",
 27 |             "Referer": "https://www.mytvsuper.com/",
 28 |             "Sec-CH-UA": '"Not(A:Brand";v="99", "Google Chrome";v="133", "Chromium";v="133"',
 29 |             "Sec-CH-UA-Mobile": "?0",
 30 |             "Sec-CH-UA-Platform": '"macOS"',
 31 |             "Sec-Fetch-Dest": "empty",
 32 |             "Sec-Fetch-Mode": "cors",
 33 |             "Sec-Fetch-Site": "same-site",
 34 |         })
 35 | 
 36 |         params = {
 37 |             "platform": "web",
 38 |             "country_code": "HK",
 39 |             "profile_class": "general",
 40 |         }
 41 | 
 42 |         response = self.http_client.get(
 43 |             f"{self.base_url}/v1/channel/list",
 44 |             headers=headers,
 45 |             params=params
 46 |         )
 47 | 
 48 |         data = response.json()
 49 |         channels = []
 50 | 
 51 |         for channel_data in data.get('channels', []):
 52 |             channel_name = remove_brackets(channel_data.get('name_tc', ''))
 53 |             if channel_name:
 54 |                 channels.append(Channel(
 55 |                     channel_id=channel_data.get('network_code', ''),
 56 |                     name=channel_name,
 57 |                     network_code=channel_data.get('network_code', ''),
 58 |                     raw_data=channel_data
 59 |                 ))
 60 | 
 61 |         self.logger.info(f"🎆 从 MyTV Super 发现 {len(channels)} 个频道")
 62 |         return channels
 63 | 
 64 |     async def fetch_programs(self, channels: List[Channel]) -> List[Program]:
 65 |         """Fetch program data for all channels"""
 66 |         self.logger.info(f"📡 正在获取 {len(channels)} 个频道的节目数据")
 67 | 
 68 |         all_programs = []
 69 |         for channel in channels:
 70 |             try:
 71 |                 programs = await self._fetch_channel_programs(
 72 |                     channel.extra_data.get('network_code'),
 73 |                     channel.name
 74 |                 )
 75 |                 all_programs.extend(programs)
 76 |             except Exception as e:
 77 |                 self.logger.error(f"❌ 获取频道 {channel.name} 的节目数据失败: {e}")
 78 |                 continue
 79 | 
 80 |         self.logger.info(f"📊 共获取 {len(all_programs)} 个节目")
 81 |         return all_programs
 82 | 
 83 |     async def _fetch_channel_programs(self, network_code: str, channel_name: str) -> List[Program]:
 84 |         """Fetch program data for a specific channel"""
 85 |         self.logger.info(f"🔍 【myTV SUPER】 正在获取频道节目: {channel_name}")
 86 | 
 87 |         # Get date range (today + 7 days)
 88 |         start_date = (datetime.now() - timedelta(days=1)).strftime('%Y%m%d')
 89 |         end_date = (datetime.now() + timedelta(days=7)).strftime('%Y%m%d')
 90 | 
 91 |         headers = self.get_default_headers({
 92 |             "Origin": "https://www.mytvsuper.com",
 93 |             "Referer": "https://www.mytvsuper.com/",
 94 |         })
 95 | 
 96 |         params = {
 97 |             "epg_platform": "web",
 98 |             "country_code": "HK",
 99 |             "network_code": network_code,
100 |             "from": start_date,
101 |             "to": end_date,
102 |         }
103 | 
104 |         response = self.http_client.get(
105 |             f"{self.base_url}/v1/epg",
106 |             headers=headers,
107 |             params=params
108 |         )
109 | 
110 |         data = response.json()
111 |         programs = []
112 | 
113 |         # Flatten the EPG data structure
114 |         total_epg = []
115 |         for day_data in data:
116 |             for item in day_data.get('item', []):
117 |                 epgs = item.get('epg', [])
118 |                 for epg in epgs:
119 |                     total_epg.append(epg)
120 | 
121 |         # Process each program
122 |         for i, epg_program in enumerate(total_epg):
123 |             try:
124 |                 # Parse start time
125 |                 start_time_str = epg_program.get('start_datetime')
126 |                 if not start_time_str:
127 |                     continue
128 | 
129 |                 start_time = datetime.strptime(start_time_str, "%Y-%m-%d %H:%M:%S")
130 | 
131 |                 # Calculate end time based on next program or default duration
132 |                 if i < len(total_epg) - 1:
133 |                     next_epg = total_epg[i + 1]
134 |                     next_start_time_str = next_epg.get('start_datetime')
135 |                     if next_start_time_str:
136 |                         end_time = datetime.strptime(next_start_time_str, "%Y-%m-%d %H:%M:%S")
137 |                     else:
138 |                         end_time = start_time + timedelta(minutes=30)
139 |                 else:
140 |                     end_time = start_time + timedelta(minutes=30)
141 | 
142 |                 # Convert to Asia/Shanghai timezone
143 |                 eastern_eight = pytz.timezone('Asia/Shanghai')
144 |                 start_time_with_tz = eastern_eight.localize(start_time)
145 |                 end_time_with_tz = eastern_eight.localize(end_time)
146 | 
147 |                 programs.append(Program(
148 |                     channel_id=network_code,
149 |                     title=epg_program.get('programme_title_tc', ''),
150 |                     start_time=start_time_with_tz,
151 |                     end_time=end_time_with_tz,
152 |                     description=epg_program.get('episode_synopsis_tc', ''),
153 |                     raw_data=epg_program
154 |                 ))
155 | 
156 |             except Exception as e:
157 |                 self.logger.warning(f"⚠️ 解析节目数据失败: {e}")
158 |                 continue
159 | 
160 |         self.logger.debug(f"🎯 为 {channel_name} 找到 {len(programs)} 个节目")
161 |         return programs
162 | 
163 | 
164 | # Create platform instance
165 | mytvsuper_platform = MyTvSuperPlatform()
166 | 
167 | 
168 | # Legacy function for backward compatibility
169 | async def get_channels(force: bool = False):
170 |     """Legacy function - fetch channels and programs from MyTV Super"""
171 |     try:
172 |         channels = await mytvsuper_platform.fetch_channels()
173 |         programs = await mytvsuper_platform.fetch_programs(channels)
174 | 
175 |         # Convert to legacy format
176 |         raw_channels = [{"channelName": ch.name} for ch in channels]
177 |         raw_programs = []
178 | 
179 |         for program in programs:
180 |             raw_programs.append({
181 |                 "channelName": next((ch.name for ch in channels if ch.channel_id == program.channel_id), ""),
182 |                 "programName": program.title,
183 |                 "description": program.description,
184 |                 "start": program.start_time,
185 |                 "end": program.end_time
186 |             })
187 | 
188 |         return raw_channels, raw_programs
189 | 
190 |     except Exception as e:
191 |         logger.error(f"💥 旧版 get_channels 函数错误: {e}", exc_info=True)
192 |         return [], []
193 | 
194 | 
195 | def utc8_to_utc(local_time: datetime):
196 |     """Convert UTC+8 time to UTC (legacy utility function)"""
197 |     eastern_eight = pytz.timezone('Asia/Shanghai')
198 |     local_time_with_tz = eastern_eight.localize(local_time)
199 |     utc_time = local_time_with_tz.astimezone(pytz.utc)
200 |     return utc_time
--------------------------------------------------------------------------------
/app/epg_platform/NowTV.py:
--------------------------------------------------------------------------------
  1 | import json
  2 | import pytz
  3 | import xml.etree.ElementTree as ET
  4 | from datetime import datetime
  5 | from typing import List
  6 | from bs4 import BeautifulSoup
  7 | 
  8 | from ..config import Config
  9 | from ..logger import get_logger
 10 | from .base import BaseEPGPlatform, Channel, Program
 11 | 
 12 | logger = get_logger(__name__)
 13 | 
 14 | 
 15 | class NowTVPlatform(BaseEPGPlatform):
 16 |     """NowTV EPG platform implementation"""
 17 | 
 18 |     def __init__(self):
 19 |         super().__init__("nowtv")
 20 |         self.base_url = "https://nowplayer.now.com"
 21 |         self.channels_cache = []
 22 |         self.channel_nums_cache = []
 23 | 
 24 |     async def fetch_channels(self) -> List[Channel]:
 25 |         """Fetch channel list from NowTV website"""
 26 |         self.logger.info("📡 正在从 NowTV 获取频道列表")
 27 | 
 28 |         headers = self.get_default_headers({
 29 |             'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7',
 30 |             'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
 31 |             'Referer': f'{self.base_url}/channels',
 32 |             'accept-language': 'zh-CN,zh;q=0.9,en-US;q=0.8,en;q=0.7,ar-EG;q=0.6,ar;q=0.5'
 33 |         })
 34 | 
 35 |         # Use cookies to set language to Chinese
 36 |         response = self.http_client.get(
 37 |             f"{self.base_url}/channels",
 38 |             headers=headers,
 39 |             cookies={'LANG': 'zh'}
 40 |         )
 41 | 
 42 |         soup = BeautifulSoup(response.text, 'html.parser')
 43 |         channels = []
 44 |         channel_nums = []
 45 | 
 46 |         # Find all channel items
 47 |         items = soup.find_all('div', class_='product-item')
 48 | 
 49 |         for item in items:
 50 |             # Get logo image URL
 51 |             img_tag = item.find('img')
 52 |             logo = img_tag['src'] if img_tag else None
 53 | 
 54 |             # Get channel name
 55 |             name_tag = item.find('p', class_='img-name')
 56 |             name = name_tag.text if name_tag else None
 57 | 
 58 |             # Get channel number
 59 |             channel_tag = item.find('p', class_='channel')
 60 |             channel_no = channel_tag.text.replace('CH', '') if channel_tag else None
 61 | 
 62 |             if name and channel_no:
 63 |                 channels.append(Channel(
 64 |                     channel_id=channel_no,
 65 |                     name=name,
 66 |                     channel_no=channel_no,
 67 |                     logo=logo,
 68 |                     channelNo=channel_no
 69 |                 ))
 70 |                 channel_nums.append(channel_no)
 71 | 
 72 |         # Cache for later use
 73 |         self.channels_cache = channels
 74 |         self.channel_nums_cache = channel_nums
 75 | 
 76 |         self.logger.info(f"📺 从 NowTV 发现 {len(channels)} 个频道")
 77 |         return channels
 78 | 
 79 |     async def fetch_programs(self, channels: List[Channel]) -> List[Program]:
 80 |         """Fetch program data for all channels"""
 81 |         self.logger.info(f"📡 正在抓取 {len(channels)} 个频道的节目数据")
 82 | 
 83 |         # Get channel numbers for EPG fetching
 84 |         channel_numbers = [ch.extra_data.get('channelNo') for ch in channels if ch.extra_data.get('channelNo')]
 85 | 
 86 |         if not channel_numbers:
 87 |             self.logger.warning("⚠️ 未找到用于 EPG 抓取的频道编号")
 88 |             return []
 89 | 
 90 |         # Fetch 7-day EPG data
 91 |         epg_data = await self._fetch_7day_epg(channel_numbers)
 92 |         programs = []
 93 | 
 94 |         # Process EPG data for each day
 95 |         for day in range(1, 8):  # Days 1-7
 96 |             day_epg = epg_data.get(day, [])
 97 | 
 98 |             for channel_index, channel_epg in enumerate(day_epg):
 99 |                 if channel_index < len(channel_numbers):
100 |                     channel_no = channel_numbers[channel_index]
101 |                     channel_name = self._find_channel_name(channels, channel_no)
102 | 
103 |                     for epg_item in channel_epg:
104 |                         try:
105 |                             start_timestamp = epg_item.get("start", 0) / 1000
106 |                             end_timestamp = epg_item.get("end", 0) / 1000
107 | 
108 |                             start_time = self._timestamp_to_datetime(start_timestamp)
109 |                             end_time = self._timestamp_to_datetime(end_timestamp)
110 | 
111 |                             programs.append(Program(
112 |                                 channel_id=channel_no,
113 |                                 title=epg_item.get("name", ""),
114 |                                 start_time=start_time,
115 |                                 end_time=end_time,
116 |                                 description="",
117 |                                 **epg_item
118 |                             ))
119 | 
120 |                         except Exception as e:
121 |                             self.logger.warning(f"⚠️ 解析节目数据失败: {e}")
122 |                             continue
123 | 
124 |         self.logger.info(f"📊 总共抓取了 {len(programs)} 个节目")
125 |         return programs
126 | 
127 |     async def _fetch_7day_epg(self, channel_numbers: List[str]) -> dict:
128 |         """Fetch 7-day EPG data from NowTV API"""
129 |         epg_cache = {}
130 | 
131 |         headers = self.get_default_headers({
132 |             'Accept': 'text/plain, */*; q=0.01',
133 |             'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
134 |             'Referer': f'{self.base_url}/tvguide',
135 |             'X-Requested-With': 'XMLHttpRequest',
136 |         })
137 | 
138 |         cookies = {'LANG': 'zh'}
139 | 
140 |         for day in range(1, 8):  # Days 1-7
141 |             try:
142 |                 # Build params manually to handle multiple values for same key
143 |                 params = []
144 |                 for channel_num in channel_numbers:
145 |                     params.append(('channelIdList[]', channel_num))
146 |                 params.append(('day', str(day)))
147 |                 self.logger.info(f"🔍【NowTV】 第 {day} 天的 EPG 请求")
148 |                 response = self.http_client.get(
149 |                     f'{self.base_url}/tvguide/epglist',
150 |                     headers=headers,
151 |                     cookies=cookies,
152 |                     params=params
153 |                 )
154 | 
155 |                 self.logger.debug(f"🔍 第 {day} 天的 EPG 请求: 状态码 {response.status_code}")
156 | 
157 |                 if response.status_code == 200:
158 |                     epg_cache[day] = response.json()
159 |                 else:
160 |                     self.logger.warning(f"⚠️ 获取第 {day} 天的 EPG 失败: 状态码 {response.status_code}")
161 |                     epg_cache[day] = []
162 | 
163 |             except Exception as e:
164 |                 self.logger.error(f"❌ 获取第 {day} 天的 EPG 错误: {e}")
165 |                 epg_cache[day] = []
166 | 
167 |         return epg_cache
168 | 
169 |     def _find_channel_name(self, channels: List[Channel], channel_no: str) -> str:
170 |         """Find channel name by channel number"""
171 |         for channel in channels:
172 |             if channel.extra_data.get('channelNo') == channel_no:
173 |                 return channel.name
174 |         return f"Channel {channel_no}"
175 | 
176 |     def _timestamp_to_datetime(self, timestamp: float) -> datetime:
177 |         """Convert timestamp to Shanghai timezone datetime"""
178 |         utc_dt = datetime.fromtimestamp(timestamp, tz=pytz.UTC)
179 |         target_tz = pytz.timezone('Asia/Shanghai')
180 |         local_dt = utc_dt.astimezone(target_tz)
181 |         return local_dt
182 | 
183 |     async def generate_epg_xml_direct(self, channel_numbers: List[str] = None) -> bytes:
184 |         """Generate EPG XML directly (legacy method)"""
185 |         if not channel_numbers:
186 |             channels = await self.fetch_channels()
187 |             channel_numbers = [ch.extra_data.get('channelNo') for ch in channels if ch.extra_data.get('channelNo')]
188 | 
189 |         epg_data = await self._fetch_7day_epg(channel_numbers)
190 |         channels = self.channels_cache if self.channels_cache else await self.fetch_channels()
191 | 
192 |         tv = ET.Element("tv", {"generator-info-name": f"{Config.APP_NAME} NowTV"})
193 | 
194 |         # Create channel elements
195 |         for channel_no in channel_numbers:
196 |             channel_name = self._find_channel_name(channels, channel_no)
197 |             channel_elem = ET.SubElement(tv, "channel", id=channel_name)
198 |             display_name = ET.SubElement(channel_elem, "display-name", lang="zh")
199 |             display_name.text = channel_name
200 | 
201 |         # Create programme elements
202 |         for day in range(1, 8):
203 |             day_epg = epg_data.get(day, [])
204 |             for channel_index, channel_epg in enumerate(day_epg):
205 |                 if channel_index < len(channel_numbers):
206 |                     channel_no = channel_numbers[channel_index]
207 |                     channel_name = self._find_channel_name(channels, channel_no)
208 | 
209 |                     for epg_item in channel_epg:
210 |                         try:
211 |                             start_timestamp = epg_item.get("start", 0) / 1000
212 |                             end_timestamp = epg_item.get("end", 0) / 1000
213 | 
214 |                             start_time_str = self._timestamp_to_timezone_str(start_timestamp)
215 |                             end_time_str = self._timestamp_to_timezone_str(end_timestamp)
216 | 
217 |                             programme = ET.SubElement(tv, "programme",
218 |                                                     channel=channel_name,
219 |                                                     start=start_time_str,
220 |                                                     stop=end_time_str)
221 |                             title = ET.SubElement(programme, "title", lang="zh")
222 |                             title.text = epg_item.get("name", "")
223 | 
224 |                         except Exception as e:
225 |                             self.logger.warning(f"⚠️ 创建节目元素失败: {e}")
226 |                             continue
227 | 
228 |         return ET.tostring(tv, encoding='utf-8')
229 | 
230 |     def _timestamp_to_timezone_str(self, timestamp: float) -> str:
231 |         """Convert timestamp to timezone string format"""
232 |         utc_dt = datetime.fromtimestamp(timestamp, tz=pytz.UTC)
233 |         target_tz = pytz.timezone('Asia/Shanghai')
234 |         local_dt = utc_dt.astimezone(target_tz)
235 |         return local_dt.strftime('%Y%m%d%H%M%S %z')
236 | 
237 | 
238 | # Create platform instance
239 | nowtv_platform = NowTVPlatform()
240 | 
241 | 
242 | # Legacy functions for backward compatibility
243 | def get_official_channel_list():
244 |     """Legacy function - get channel list (synchronous)"""
245 |     import asyncio
246 |     try:
247 |         loop = asyncio.get_event_loop()
248 |         if loop.is_running():
249 |             # If we're already in an async context, we can't use run()
250 |             # This is a limitation of the legacy sync function
251 |             logger.warning("⚠️ 在异步上下文中调用 get_official_channel_list - 返回空列表")
252 |             return []
253 |         else:
254 |             channels = loop.run_until_complete(nowtv_platform.fetch_channels())
255 |             nowtv_platform.channels_cache = channels
256 |             nowtv_platform.channel_nums_cache = [ch.extra_data.get('channelNo') for ch in channels]
257 |             return [ch.extra_data for ch in channels]
258 |     except Exception as e:
259 |         logger.error(f"❌ 旧版 get_official_channel_list 错误: {e}")
260 |         return []
261 | 
262 | 
263 | async def request_nowtv_today_epg():
264 |     """Legacy function - fetch NowTV EPG as XML"""
265 |     try:
266 |         channels = await nowtv_platform.fetch_channels()
267 |         channel_numbers = [ch.extra_data.get('channelNo') for ch in channels if ch.extra_data.get('channelNo')]
268 |         xml_bytes = await nowtv_platform.generate_epg_xml_direct(channel_numbers)
269 |         return xml_bytes
270 |     except Exception as e:
271 |         logger.error(f"❌ 旧版 request_nowtv_today_epg 错误: {e}", exc_info=True)
272 |         return b""
273 | 
274 | 
275 | async def get_now_tv_guide_to_epg(channel_numbers, cache_keyword):
276 |     """Legacy function - generate EPG XML"""
277 |     try:
278 |         xml_bytes = await nowtv_platform.generate_epg_xml_direct(channel_numbers)
279 |         return xml_bytes
280 |     except Exception as e:
281 |         logger.error(f"❌ 旧版 get_now_tv_guide_to_epg 错误: {e}", exc_info=True)
282 |         return b""
283 | 
284 | 
285 | def time_stamp_to_timezone_str(timestamp_s):
286 |     """Legacy utility function"""
287 |     return nowtv_platform._timestamp_to_timezone_str(timestamp_s)
288 | 
289 | 
290 | def find_channel_name(channels, channel_no):
291 |     """Legacy utility function"""
292 |     for item in channels:
293 |         if item.get("channelNo") == channel_no:
294 |             return item.get("name")
295 |     return f"Channel {channel_no}"
296 | 
297 | 
298 | async def fetch_7day_epg(channel_numbers):
299 |     """Legacy function - fetch 7-day EPG"""
300 |     try:
301 |         return await nowtv_platform._fetch_7day_epg(channel_numbers)
302 |     except Exception as e:
303 |         logger.error(f"❌ 旧版 fetch_7day_epg 错误: {e}", exc_info=True)
304 |         return {}
--------------------------------------------------------------------------------
/app/epg_platform/RTHK.py:
--------------------------------------------------------------------------------
  1 | import pytz
  2 | from datetime import datetime, timedelta
  3 | from typing import List
  4 | from bs4 import BeautifulSoup
  5 | 
  6 | from ..logger import get_logger
  7 | from .base import BaseEPGPlatform, Channel, Program
  8 | 
  9 | logger = get_logger(__name__)
 10 | 
 11 | 
 12 | class RTHKPlatform(BaseEPGPlatform):
 13 |     """RTHK EPG platform implementation"""
 14 | 
 15 |     def __init__(self):
 16 |         super().__init__("rthk")
 17 |         self.base_url = "https://www.rthk.hk/timetable"
 18 | 
 19 |         # RTHK channel definitions
 20 |         self.channel_definitions = [
 21 |             {"channelName": "RTHK31", "channelId": "tv31"},
 22 |             {"channelName": "RTHK32", "channelId": "tv32"},
 23 |             {"channelName": "RTHK33", "channelId": "tv33"},
 24 |             {"channelName": "RTHK34", "channelId": "tv34"},
 25 |             {"channelName": "RTHK35", "channelId": "tv35"},
 26 |         ]
 27 | 
 28 |     async def fetch_channels(self) -> List[Channel]:
 29 |         """Fetch channel list from RTHK (predefined channels)"""
 30 |         self.logger.info("📺 正在创建 RTHK 频道列表")
 31 | 
 32 |         channels = []
 33 |         for channel_def in self.channel_definitions:
 34 |             channels.append(Channel(
 35 |                 channel_id=channel_def["channelId"],
 36 |                 name=channel_def["channelName"],
 37 |                 raw_data=channel_def
 38 |             ))
 39 | 
 40 |         self.logger.info(f"📺 发现 {len(channels)} 个 RTHK 频道")
 41 |         return channels
 42 | 
 43 |     async def fetch_programs(self, channels: List[Channel]) -> List[Program]:
 44 |         """Fetch program data for all RTHK channels"""
 45 |         self.logger.info(f"📡 正在抓取 {len(channels)} 个 RTHK 频道的节目数据")
 46 | 
 47 |         all_programs = []
 48 | 
 49 |         for channel in channels:
 50 |             try:
 51 |                 programs = await self._fetch_channel_programs(channel)
 52 |                 all_programs.extend(programs)
 53 |             except Exception as e:
 54 |                 self.logger.error(f"❌ 获取频道 {channel.name} 节目数据失败: {e}")
 55 |                 continue
 56 | 
 57 |         self.logger.info(f"📊 总共抓取了 {len(all_programs)} 个节目")
 58 |         return all_programs
 59 | 
 60 |     async def _fetch_channel_programs(self, channel: Channel) -> List[Program]:
 61 |         """Fetch program data for a specific RTHK channel"""
 62 |         self.logger.info(f"🔍【RTHK】 正在获取频道节目: {channel.name}")
 63 | 
 64 |         url = f"{self.base_url}/{channel.channel_id}"
 65 | 
 66 |         response = self.http_client.get(url)
 67 | 
 68 |         programs = self._parse_epg_from_html(response.text, channel)
 69 | 
 70 |         self.logger.debug(f"📺 在 {channel.name} 中发现 {len(programs)} 个节目")
 71 |         return programs
 72 | 
 73 |     def _parse_epg_from_html(self, html_content: str, channel: Channel) -> List[Program]:
 74 |         """Parse EPG data from RTHK HTML page"""
 75 |         soup = BeautifulSoup(html_content, 'html.parser')
 76 |         programs = []
 77 | 
 78 |         # Today's date for filtering
 79 |         today = datetime.now().strftime("%Y%m%d")
 80 | 
 81 |         # Find all date blocks
 82 |         date_blocks = soup.find_all('div', class_='slideBlock')
 83 | 
 84 |         for block in date_blocks:
 85 |             date_str = block.get('date')
 86 | 
 87 |             # Only process today and future dates
 88 |             if date_str and date_str >= today:
 89 |                 try:
 90 |                     # Parse date
 91 |                     year = int(date_str[0:4])
 92 |                     month = int(date_str[4:6])
 93 |                     day = int(date_str[6:8])
 94 | 
 95 |                     # Find all programs for this date
 96 |                     program_blocks = block.find_all('div', class_='shdBlock')
 97 | 
 98 |                     for program_block in program_blocks:
 99 |                         try:
100 |                             program = self._parse_program_block(program_block, year, month, day, channel)
101 |                             if program:
102 |                                 programs.append(program)
103 |                         except Exception as e:
104 |                             self.logger.warning(f"⚠️ 解析节目块失败: {e}")
105 |                             continue
106 | 
107 |                 except Exception as e:
108 |                     self.logger.warning(f"⚠️ 解析日期块 {date_str} 失败: {e}")
109 |                     continue
110 | 
111 |         return programs
112 | 
113 |     def _parse_program_block(self, program_block, year: int, month: int, day: int, channel: Channel) -> Program:
114 |         """Parse a single program block from HTML"""
115 |         # Get time information
116 |         time_block = program_block.find('div', class_='shTimeBlock')
117 |         if not time_block:
118 |             return None
119 | 
120 |         time_elements = time_block.find_all('p', class_='timeDis')
121 |         if not time_elements:
122 |             return None
123 | 
124 |         start_time_str = time_elements[0].text.strip()
125 |         end_time_str = time_elements[2].text.strip() if len(time_elements) > 2 else None
126 | 
127 |         # Parse start time
128 |         try:
129 |             start_hour, start_min = map(int, start_time_str.split(':'))
130 |             start_datetime = datetime(year, month, day, start_hour, start_min)
131 |             start_datetime = pytz.timezone('Asia/Shanghai').localize(start_datetime)
132 |         except:
133 |             return None
134 | 
135 |         # Parse end time
136 |         if end_time_str:
137 |             try:
138 |                 end_hour, end_min = map(int, end_time_str.split(':'))
139 |                 end_datetime = datetime(year, month, day, end_hour, end_min)
140 | 
141 |                 # Handle day crossing
142 |                 if end_hour < start_hour or (end_hour == start_hour and end_min < start_min):
143 |                     end_datetime += timedelta(days=1)
144 | 
145 |                 end_datetime = pytz.timezone('Asia/Shanghai').localize(end_datetime)
146 |             except:
147 |                 end_datetime = start_datetime + timedelta(minutes=30)
148 |         else:
149 |             # Default to 30 minutes if no end time
150 |             end_datetime = start_datetime + timedelta(minutes=30)
151 | 
152 |         # Get program title
153 |         title_block = program_block.find('div', class_='shTitle')
154 |         if not title_block or not title_block.find('a'):
155 |             return None
156 | 
157 |         program_name = title_block.find('a').text.strip()
158 | 
159 |         # Get program description
160 |         sub_title_block = program_block.find('div', class_='shSubTitle')
161 |         description = ""
162 |         if sub_title_block and sub_title_block.find('a'):
163 |             description = sub_title_block.find('a').text.strip()
164 | 
165 |         return Program(
166 |             channel_id=channel.channel_id,
167 |             title=program_name,
168 |             start_time=start_datetime,
169 |             end_time=end_datetime,
170 |             description=description,
171 |             raw_data={
172 |                 'date': f"{year:04d}{month:02d}{day:02d}",
173 |                 'start_time_str': start_time_str,
174 |                 'end_time_str': end_time_str
175 |             }
176 |         )
177 | 
178 | 
179 | # Create platform instance
180 | rthk_platform = RTHKPlatform()
181 | 
182 | 
183 | # Legacy functions for backward compatibility
184 | rthk_channels = [
185 |     {"channelName": "RTHK31", "channelId": "tv31"},
186 |     {"channelName": "RTHK32", "channelId": "tv32"},
187 |     {"channelName": "RTHK33", "channelId": "tv33"},
188 |     {"channelName": "RTHK34", "channelId": "tv34"},
189 |     {"channelName": "RTHK35", "channelId": "tv35"},
190 | ]
191 | 
192 | 
193 | def parse_epg_from_html(html_content, channel_name):
194 |     """Legacy function - parse EPG from HTML"""
195 |     try:
196 |         # Create a temporary channel object
197 |         channel = Channel(channel_id="temp", name=channel_name)
198 |         programs = rthk_platform._parse_epg_from_html(html_content, channel)
199 | 
200 |         # Convert to legacy format
201 |         results = []
202 |         for program in programs:
203 |             results.append({
204 |                 "channelName": channel_name,
205 |                 "programName": program.title,
206 |                 "description": program.description,
207 |                 "start": program.start_time,
208 |                 "end": program.end_time
209 |             })
210 | 
211 |         return results
212 |     except Exception as e:
213 |         logger.error(f"❌ 旧版 parse_epg_from_html 错误: {e}")
214 |         return []
215 | 
216 | 
217 | async def get_rthk_epg():
218 |     """Legacy function - fetch RTHK EPG data"""
219 |     try:
220 |         channels = await rthk_platform.fetch_channels()
221 |         programs = await rthk_platform.fetch_programs(channels)
222 | 
223 |         # Convert to legacy format
224 |         raw_channels = []
225 |         raw_programs = []
226 | 
227 |         for channel in channels:
228 |             raw_channels.append({
229 |                 "channelName": channel.name,
230 |                 "channelId": channel.channel_id
231 |             })
232 | 
233 |         for program in programs:
234 |             channel_name = next((ch.name for ch in channels if ch.channel_id == program.channel_id), "")
235 |             raw_programs.append({
236 |                 "channelName": channel_name,
237 |                 "programName": program.title,
238 |                 "description": program.description,
239 |                 "start": program.start_time,
240 |                 "end": program.end_time
241 |             })
242 | 
243 |         return raw_channels, raw_programs
244 | 
245 |     except Exception as e:
246 |         logger.error(f"❌ 旧版 get_rthk_epg 函数错误: {e}", exc_info=True)
247 |         return [], []
--------------------------------------------------------------------------------
/app/epg_platform/Starhub.py:
--------------------------------------------------------------------------------
  1 | from datetime import datetime, timedelta
  2 | from typing import List
  3 | from zoneinfo import ZoneInfo
  4 | 
  5 | from ..logger import get_logger
  6 | from ..utils import has_chinese, utc_to_utc8_datetime
  7 | from .base import BaseEPGPlatform, Channel, Program
  8 | 
  9 | logger = get_logger(__name__)
 10 | 
 11 | 
 12 | class StarhubPlatform(BaseEPGPlatform):
 13 |     """StarHub EPG platform implementation"""
 14 | 
 15 |     def __init__(self):
 16 |         super().__init__("starhub")
 17 |         self.base_url = "https://waf-starhub-metadata-api-p001.ifs.vubiquity.com/v3.1/epg"
 18 |         self.channels_url = f"{self.base_url}/channels"
 19 |         self.schedules_url = f"{self.base_url}/schedules"
 20 | 
 21 |     async def fetch_channels(self) -> List[Channel]:
 22 |         """Fetch channel list from StarHub API"""
 23 |         self.logger.info("📡 正在从 StarHub 获取频道列表")
 24 | 
 25 |         headers = self.get_default_headers()
 26 | 
 27 |         params = {
 28 |             "locale": "zh",
 29 |             "locale_default": "en_US",
 30 |             "device": "1",
 31 |             "limit": "150",
 32 |             "page": "1"
 33 |         }
 34 | 
 35 |         response = self.http_client.get(
 36 |             self.channels_url,
 37 |             headers=headers,
 38 |             params=params
 39 |         )
 40 | 
 41 |         data = response.json()
 42 |         channels = []
 43 | 
 44 |         for resource in data.get('resources', []):
 45 |             if resource.get('metatype') == 'Channel':
 46 |                 channels.append(Channel(
 47 |                     channel_id=resource.get('id', ''),
 48 |                     name=resource.get('title', ''),
 49 |                     raw_data=resource
 50 |                 ))
 51 | 
 52 |         self.logger.info(f"📺 从 StarHub 发现 {len(channels)} 个频道")
 53 |         return channels
 54 | 
 55 |     async def fetch_programs(self, channels: List[Channel]) -> List[Program]:
 56 |         """Fetch program data for all StarHub channels"""
 57 |         self.logger.info(f"📡 正在抓取 {len(channels)} 个 StarHub 频道的节目数据")
 58 | 
 59 |         all_programs = []
 60 | 
 61 |         for channel in channels:
 62 |             try:
 63 |                 programs = await self._fetch_channel_programs(channel)
 64 |                 all_programs.extend(programs)
 65 |             except Exception as e:
 66 |                 self.logger.error(f"❌ 获取频道 {channel.name} 节目数据失败: {e}")
 67 |                 continue
 68 | 
 69 |         self.logger.info(f"📊 总共抓取了 {len(all_programs)} 个节目")
 70 |         return all_programs
 71 | 
 72 |     async def _fetch_channel_programs(self, channel: Channel) -> List[Program]:
 73 |         """Fetch program data for a specific StarHub channel"""
 74 |         self.logger.info(f"🔍【Starhub】 正在获取频道节目: {channel.name} (ID: {channel.channel_id})")
 75 | 
 76 |         # Calculate time range (today to 6 days later)
 77 |         tz = ZoneInfo('Asia/Shanghai')
 78 |         today_start = datetime.now(tz).replace(hour=0, minute=0, second=0, microsecond=0)
 79 |         six_days_later = today_start + timedelta(days=6)
 80 |         six_days_later_end = six_days_later.replace(hour=23, minute=59, second=59)
 81 | 
 82 |         today_timestamp = int(today_start.timestamp())
 83 |         six_days_later_timestamp = int(six_days_later_end.timestamp())
 84 | 
 85 |         headers = self.get_default_headers()
 86 | 
 87 |         params = {
 88 |             "locale": "zh",
 89 |             "locale_default": "en_US",
 90 |             "device": "1",
 91 |             "limit": "500",
 92 |             "page": "1",
 93 |             "in_channel_id": channel.channel_id,
 94 |             "gt_end": str(today_timestamp),      # Start time
 95 |             "lt_start": str(six_days_later_timestamp),  # End time
 96 |         }
 97 | 
 98 |         response = self.http_client.get(
 99 |             self.schedules_url,
100 |             headers=headers,
101 |             params=params
102 |         )
103 | 
104 |         data = response.json()
105 |         programs = []
106 | 
107 |         for resource in data.get('resources', []):
108 |             if resource.get('metatype') == 'Schedule':
109 |                 try:
110 |                     title = resource.get('title', '')
111 |                     description = resource.get('description', '')
112 |                     episode_number = resource.get('episode_number')
113 | 
114 |                     # Add episode number based on language
115 |                     if episode_number:
116 |                         if has_chinese(title) or has_chinese(description):
117 |                             title += f" 第{episode_number}集"
118 |                         else:
119 |                             title += f" Ep{episode_number}"
120 | 
121 |                     # Convert timestamps to datetime objects
122 |                     start_time = utc_to_utc8_datetime(resource.get('start'))
123 |                     end_time = utc_to_utc8_datetime(resource.get('end'))
124 | 
125 |                     programs.append(Program(
126 |                         channel_id=channel.channel_id,
127 |                         title=title,
128 |                         start_time=start_time,
129 |                         end_time=end_time,
130 |                         description=description,
131 |                         raw_data=resource
132 |                     ))
133 | 
134 |                 except Exception as e:
135 |                     self.logger.warning(f"⚠️ 解析节目数据失败: {e}")
136 |                     continue
137 | 
138 |         self.logger.debug(f"📺 在 {channel.name} 中发现 {len(programs)} 个节目")
139 |         return programs
140 | 
141 | 
142 | # Create platform instance
143 | starhub_platform = StarhubPlatform()
144 | 
145 | 
146 | # Legacy functions for backward compatibility
147 | def request_channels():
148 |     """Legacy function - get StarHub channel list (synchronous)"""
149 |     try:
150 |         import asyncio
151 |         loop = asyncio.get_event_loop()
152 |         if loop.is_running():
153 |             logger.warning("⚠️ 在异步上下文中调用旧版 request_channels - 返回空列表")
154 |             return []
155 |         else:
156 |             channels = loop.run_until_complete(starhub_platform.fetch_channels())
157 |             # Convert to legacy format
158 |             return [{"channelName": ch.name, "channelId": ch.channel_id} for ch in channels]
159 |     except Exception as e:
160 |         logger.error(f"❌ 旧版 request_channels 错误: {e}")
161 |         return []
162 | 
163 | 
164 | def request_epg(channel_id, channel_name):
165 |     """Legacy function - get EPG for specific channel (synchronous)"""
166 |     try:
167 |         import asyncio
168 |         loop = asyncio.get_event_loop()
169 |         if loop.is_running():
170 |             logger.warning("⚠️ 在异步上下文中调用旧版 request_epg - 返回空列表")
171 |             return []
172 |         else:
173 |             # Create a temporary channel object
174 |             channel = Channel(channel_id=channel_id, name=channel_name)
175 |             programs = loop.run_until_complete(starhub_platform._fetch_channel_programs(channel))
176 | 
177 |             # Convert to legacy format
178 |             program_list = []
179 |             for program in programs:
180 |                 program_list.append({
181 |                     "channelName": channel_name,
182 |                     "programName": program.title,
183 |                     "description": program.description,
184 |                     "start": program.start_time,
185 |                     "end": program.end_time
186 |                 })
187 | 
188 |             return program_list
189 |     except Exception as e:
190 |         logger.error(f"❌ 旧版 request_epg 错误: {e}")
191 |         return []
192 | 
193 | 
194 | async def get_starhub_epg():
195 |     """Legacy function - fetch StarHub EPG data"""
196 |     try:
197 |         channels = await starhub_platform.fetch_channels()
198 |         programs = await starhub_platform.fetch_programs(channels)
199 | 
200 |         # Convert to legacy format
201 |         raw_channels = []
202 |         raw_programs = []
203 | 
204 |         for channel in channels:
205 |             raw_channels.append({
206 |                 "channelName": channel.name,
207 |                 "channelId": channel.channel_id
208 |             })
209 | 
210 |         for program in programs:
211 |             channel_name = next((ch.name for ch in channels if ch.channel_id == program.channel_id), "")
212 |             raw_programs.append({
213 |                 "channelName": channel_name,
214 |                 "programName": program.title,
215 |                 "description": program.description,
216 |                 "start": program.start_time,
217 |                 "end": program.end_time
218 |             })
219 | 
220 |         return raw_channels, raw_programs
221 | 
222 |     except Exception as e:
223 |         logger.error(f"❌ 旧版 get_starhub_epg 函数错误: {e}", exc_info=True)
224 |         return [], []
--------------------------------------------------------------------------------
/app/epg_platform/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CharmingCheung/CharmingEPG/17c7d1f290062a7c3a7f39018302532208c81d1a/app/epg_platform/__init__.py
--------------------------------------------------------------------------------
/app/epg_platform/base.py:
--------------------------------------------------------------------------------
  1 | import os
  2 | from abc import ABC, abstractmethod
  3 | from datetime import datetime
  4 | from typing import List, Tuple, Dict, Any, Optional
  5 | 
  6 | from ..config import Config
  7 | from ..logger import get_logger
  8 | from ..http_client import get_http_client
  9 | from ..epg.EpgGenerator import generateEpg
 10 | 
 11 | logger = get_logger(__name__)
 12 | 
 13 | 
 14 | class Channel:
 15 |     """Data class for channel information"""
 16 |     def __init__(self, channel_id: str, name: str, **kwargs):
 17 |         self.channel_id = channel_id
 18 |         self.name = name
 19 |         self.extra_data = kwargs
 20 | 
 21 |     def __repr__(self):
 22 |         return f"Channel(id={self.channel_id}, name={self.name})"
 23 | 
 24 | 
 25 | class Program:
 26 |     """Data class for program information"""
 27 |     def __init__(self, channel_id: str, title: str, start_time: datetime,
 28 |                  end_time: datetime, description: str = "", **kwargs):
 29 |         self.channel_id = channel_id
 30 |         self.title = title
 31 |         self.start_time = start_time
 32 |         self.end_time = end_time
 33 |         self.description = description
 34 |         self.extra_data = kwargs
 35 | 
 36 |     def __repr__(self):
 37 |         return f"Program(channel={self.channel_id}, title={self.title}, start={self.start_time})"
 38 | 
 39 | 
 40 | class BaseEPGPlatform(ABC):
 41 |     """Base class for all EPG platforms with common functionality"""
 42 | 
 43 |     def __init__(self, platform_name: str):
 44 |         self.platform_name = platform_name
 45 |         self.logger = get_logger(f"platform.{platform_name}")
 46 |         self.http_client = get_http_client()
 47 | 
 48 |     @abstractmethod
 49 |     async def fetch_channels(self) -> List[Channel]:
 50 |         """Fetch channel list from the platform"""
 51 |         pass
 52 | 
 53 |     @abstractmethod
 54 |     async def fetch_programs(self, channels: List[Channel]) -> List[Program]:
 55 |         """Fetch program data for the given channels"""
 56 |         pass
 57 | 
 58 |     def get_epg_file_path(self, date_str: str = None) -> str:
 59 |         """Get the file path for EPG data"""
 60 |         if date_str is None:
 61 |             date_str = self._get_date_str()
 62 |         return Config.get_epg_file_path(self.platform_name, date_str)
 63 | 
 64 |     def _get_date_str(self) -> str:
 65 |         """Get current date string in YYYYMMDD format"""
 66 |         return datetime.now().strftime('%Y%m%d')
 67 | 
 68 |     def _ensure_directory_exists(self, file_path: str):
 69 |         """Create directory if it doesn't exist"""
 70 |         directory = os.path.dirname(file_path)
 71 |         if not os.path.exists(directory):
 72 |             os.makedirs(directory, exist_ok=True)
 73 |             self.logger.info(f"📌 创建目录: {directory}")
 74 | 
 75 |     async def generate_epg_xml(self, channels: List[Channel], programs: List[Program]) -> bytes:
 76 |         """Generate EPG XML from channels and programs data"""
 77 |         self.logger.info(f"🛠️ 正在生成EPG XML:{len(channels)}个频道,{len(programs)}个节目")
 78 |         return await generateEpg(channels, programs)
 79 | 
 80 |     async def save_epg_to_file(self, xml_content: bytes, file_path: str):
 81 |         """Save EPG XML content to file"""
 82 |         self._ensure_directory_exists(file_path)
 83 | 
 84 |         with open(file_path, "wb") as file:
 85 |             file.write(xml_content)
 86 | 
 87 |         file_size = len(xml_content)
 88 |         self.logger.info(f"💾 保存EPG数据到 {file_path} ({file_size} 字节)")
 89 | 
 90 |     def _delete_old_epg_files(self):
 91 |         """Delete old EPG files, keeping only today's file"""
 92 |         try:
 93 |             today_file = os.path.basename(self.get_epg_file_path())
 94 |             epg_dir = os.path.dirname(self.get_epg_file_path())
 95 | 
 96 |             if not os.path.exists(epg_dir):
 97 |                 return
 98 | 
 99 |             deleted_count = 0
100 |             for file_name in os.listdir(epg_dir):
101 |                 if file_name.endswith(".xml") and file_name != today_file:
102 |                     file_path = os.path.join(epg_dir, file_name)
103 |                     os.remove(file_path)
104 |                     deleted_count += 1
105 |                     self.logger.debug(f"🗑️ 删除旧EPG文件: {file_name}")
106 | 
107 |             if deleted_count > 0:
108 |                 self.logger.info(f"🧹 清理{self.platform_name}的{deleted_count}个旧EPG文件")
109 | 
110 |         except Exception as e:
111 |             self.logger.error(f"❌ 删除{self.platform_name}的旧EPG文件失败: {e}")
112 | 
113 |     async def update_epg(self, force: bool = False) -> bool:
114 |         """
115 |         Main method to update EPG data for this platform
116 | 
117 |         Args:
118 |             force: Force update even if today's file already exists
119 | 
120 |         Returns:
121 |             bool: True if update was successful, False otherwise
122 |         """
123 |         file_path = self.get_epg_file_path()
124 | 
125 |         # Check if today's EPG already exists
126 |         if not force and os.path.exists(file_path):
127 |             self.logger.info(f"✅ {self.platform_name}的今日EPG数据已存在,跳过更新")
128 |             return True
129 | 
130 |         try:
131 |             self.logger.info(f"🚀 开始EPG更新:{self.platform_name}")
132 | 
133 |             # Fetch data
134 |             channels = await self.fetch_channels()
135 |             if not channels:
136 |                 self.logger.warning(f"⚠️ {self.platform_name}未找到频道数据")
137 |                 return False
138 | 
139 |             programs = await self.fetch_programs(channels)
140 |             if not programs:
141 |                 self.logger.warning(f"⚠️ {self.platform_name}未找到节目数据")
142 |                 return False
143 | 
144 |             # Generate XML
145 |             xml_content = await self.generate_epg_xml(channels, programs)
146 | 
147 |             # Save to file
148 |             await self.save_epg_to_file(xml_content, file_path)
149 | 
150 |             # Clean up old files
151 |             self._delete_old_epg_files()
152 | 
153 |             self.logger.info(f"✨ 成功更新{self.platform_name}的EPG数据")
154 |             return True
155 | 
156 |         except Exception as e:
157 |             self.logger.error(f"❌ 更新{self.platform_name}的EPG数据失败: {e}", exc_info=True)
158 |             return False
159 | 
160 |     def get_default_headers(self, additional_headers: Optional[Dict[str, str]] = None) -> Dict[str, str]:
161 |         """Get default headers for HTTP requests"""
162 |         headers = {
163 |             "User-Agent": Config.DEFAULT_USER_AGENT,
164 |             "Accept": "application/json, text/plain, */*",
165 |             "Accept-Language": "zh-CN,zh-TW;q=0.9,zh;q=0.8,en-US;q=0.7,en;q=0.6",
166 |             "Cache-Control": "no-cache",
167 |         }
168 | 
169 |         if additional_headers:
170 |             headers.update(additional_headers)
171 | 
172 |         return headers
--------------------------------------------------------------------------------
/app/epg_platform/platform_template.py:
--------------------------------------------------------------------------------
  1 | # Template for refactoring platform modules to new architecture
  2 | # This shows the pattern for converting old platform modules
  3 | 
  4 | from typing import List
  5 | from datetime import datetime
  6 | 
  7 | from ..logger import get_logger
  8 | from .base import BaseEPGPlatform, Channel, Program
  9 | 
 10 | logger = get_logger(__name__)
 11 | 
 12 | 
 13 | class TemplatePlatform(BaseEPGPlatform):
 14 |     """Template EPG platform implementation"""
 15 | 
 16 |     def __init__(self):
 17 |         super().__init__("template")  # Replace with actual platform name
 18 |         self.base_url = "https://api.example.com"  # Replace with actual API URL
 19 | 
 20 |     async def fetch_channels(self) -> List[Channel]:
 21 |         """Fetch channel list from platform API"""
 22 |         self.logger.info("Fetching channel list from Template Platform")
 23 | 
 24 |         headers = self.get_default_headers({
 25 |             # Add platform-specific headers here
 26 |         })
 27 | 
 28 |         # Make API call to fetch channels
 29 |         response = self.http_client.get(
 30 |             f"{self.base_url}/channels",  # Replace with actual endpoint
 31 |             headers=headers
 32 |         )
 33 | 
 34 |         data = response.json()
 35 |         channels = []
 36 | 
 37 |         # Parse response and create Channel objects
 38 |         for channel_data in data.get('channels', []):  # Adjust based on response structure
 39 |             channels.append(Channel(
 40 |                 channel_id=channel_data.get('id', ''),
 41 |                 name=channel_data.get('name', ''),
 42 |                 # Add other channel properties as needed
 43 |                 raw_data=channel_data
 44 |             ))
 45 | 
 46 |         self.logger.info(f"Found {len(channels)} channels from Template Platform")
 47 |         return channels
 48 | 
 49 |     async def fetch_programs(self, channels: List[Channel]) -> List[Program]:
 50 |         """Fetch program data for all channels"""
 51 |         self.logger.info(f"Fetching program data for {len(channels)} channels")
 52 | 
 53 |         all_programs = []
 54 |         for channel in channels:
 55 |             try:
 56 |                 programs = await self._fetch_channel_programs(channel)
 57 |                 all_programs.extend(programs)
 58 |             except Exception as e:
 59 |                 self.logger.error(f"Failed to fetch programs for channel {channel.name}: {e}")
 60 |                 continue
 61 | 
 62 |         self.logger.info(f"Fetched {len(all_programs)} programs total")
 63 |         return all_programs
 64 | 
 65 |     async def _fetch_channel_programs(self, channel: Channel) -> List[Program]:
 66 |         """Fetch program data for a specific channel"""
 67 |         self.logger.debug(f"Fetching programs for channel: {channel.name}")
 68 | 
 69 |         headers = self.get_default_headers()
 70 | 
 71 |         # Make API call to fetch programs for this channel
 72 |         response = self.http_client.get(
 73 |             f"{self.base_url}/programs/{channel.channel_id}",  # Replace with actual endpoint
 74 |             headers=headers
 75 |         )
 76 | 
 77 |         data = response.json()
 78 |         programs = []
 79 | 
 80 |         # Parse response and create Program objects
 81 |         for program_data in data.get('programs', []):  # Adjust based on response structure
 82 |             try:
 83 |                 # Parse time data (adjust format as needed)
 84 |                 start_time = self._parse_time(program_data.get('start_time'))
 85 |                 end_time = self._parse_time(program_data.get('end_time'))
 86 | 
 87 |                 programs.append(Program(
 88 |                     channel_id=channel.channel_id,
 89 |                     title=program_data.get('title', ''),
 90 |                     start_time=start_time,
 91 |                     end_time=end_time,
 92 |                     description=program_data.get('description', ''),
 93 |                     raw_data=program_data
 94 |                 ))
 95 | 
 96 |             except Exception as e:
 97 |                 self.logger.warning(f"Failed to parse program data: {e}")
 98 |                 continue
 99 | 
100 |         self.logger.debug(f"Found {len(programs)} programs for {channel.name}")
101 |         return programs
102 | 
103 |     def _parse_time(self, time_str: str) -> datetime:
104 |         """Parse time string to datetime object (customize based on platform format)"""
105 |         # This is a placeholder - adjust the parsing logic for each platform
106 |         try:
107 |             return datetime.strptime(time_str, "%Y-%m-%d %H:%M:%S")
108 |         except:
109 |             return datetime.now()
110 | 
111 | 
112 | # Create platform instance
113 | template_platform = TemplatePlatform()
114 | 
115 | 
116 | # Legacy functions for backward compatibility (customize as needed)
117 | async def legacy_function_name():
118 |     """Legacy function - maintain backward compatibility"""
119 |     try:
120 |         channels = await template_platform.fetch_channels()
121 |         programs = await template_platform.fetch_programs(channels)
122 | 
123 |         # Convert to legacy format if needed
124 |         # Return in the format expected by existing code
125 | 
126 |         return channels, programs
127 |     except Exception as e:
128 |         logger.error(f"Error in legacy function: {e}", exc_info=True)
129 |         return [], []
--------------------------------------------------------------------------------
/app/file_manager.py:
--------------------------------------------------------------------------------
  1 | import os
  2 | import xml.etree.ElementTree as ET
  3 | from datetime import datetime
  4 | from typing import List, Optional
  5 | from fastapi import HTTPException
  6 | from fastapi.responses import Response
  7 | 
  8 | from .config import Config
  9 | from .logger import get_logger
 10 | 
 11 | logger = get_logger(__name__)
 12 | 
 13 | 
 14 | class EPGFileManager:
 15 |     """Manages EPG file operations including reading, writing, and aggregation"""
 16 | 
 17 |     @staticmethod
 18 |     def get_epg_file_path(platform: str, date_str: str = None) -> str:
 19 |         """Get the file path for EPG data"""
 20 |         if date_str is None:
 21 |             date_str = datetime.now().strftime('%Y%m%d')
 22 |         return Config.get_epg_file_path(platform, date_str)
 23 | 
 24 |     @staticmethod
 25 |     def ensure_directory_exists(file_path: str):
 26 |         """Create directory if it doesn't exist"""
 27 |         directory = os.path.dirname(file_path)
 28 |         if not os.path.exists(directory):
 29 |             os.makedirs(directory, exist_ok=True)
 30 |             logger.info(f"📌 创建目录: {directory}")
 31 | 
 32 |     @staticmethod
 33 |     def read_epg_file(platform: str, date_str: str = None) -> Optional[bytes]:
 34 |         """
 35 |         Read EPG file content for a platform
 36 | 
 37 |         Args:
 38 |             platform: Platform name
 39 |             date_str: Date string (YYYYMMDD), defaults to today
 40 | 
 41 |         Returns:
 42 |             File content as bytes or None if file doesn't exist
 43 |         """
 44 |         file_path = EPGFileManager.get_epg_file_path(platform, date_str)
 45 | 
 46 |         if not os.path.exists(file_path):
 47 |             logger.warning(f"⚠️ EPG文件未找到: {file_path}")
 48 |             return None
 49 | 
 50 |         try:
 51 |             with open(file_path, "rb") as file:
 52 |                 content = file.read()
 53 |                 logger.debug(f"✅ 成功读取EPG文件: {file_path} ({len(content)} 字节)")
 54 |                 return content
 55 |         except Exception as e:
 56 |             logger.error(f"❌ 读取EPG文件失败 {file_path}: {e}")
 57 |             return None
 58 | 
 59 |     @staticmethod
 60 |     def save_epg_file(platform: str, content: bytes, date_str: str = None) -> bool:
 61 |         """
 62 |         Save EPG content to file
 63 | 
 64 |         Args:
 65 |             platform: Platform name
 66 |             content: XML content as bytes
 67 |             date_str: Date string (YYYYMMDD), defaults to today
 68 | 
 69 |         Returns:
 70 |             True if successful, False otherwise
 71 |         """
 72 |         file_path = EPGFileManager.get_epg_file_path(platform, date_str)
 73 | 
 74 |         try:
 75 |             EPGFileManager.ensure_directory_exists(file_path)
 76 | 
 77 |             with open(file_path, "wb") as file:
 78 |                 file.write(content)
 79 | 
 80 |             logger.info(f"💾 保存EPG文件: {file_path} ({len(content)} 字节)")
 81 |             return True
 82 | 
 83 |         except Exception as e:
 84 |             logger.error(f"❌ 保存EPG文件失败 {file_path}: {e}")
 85 |             return False
 86 | 
 87 |     @staticmethod
 88 |     def delete_old_epg_files(platform: str, keep_current: bool = True) -> int:
 89 |         """
 90 |         Delete old EPG files for a platform
 91 | 
 92 |         Args:
 93 |             platform: Platform name
 94 |             keep_current: Whether to keep today's file
 95 | 
 96 |         Returns:
 97 |             Number of files deleted
 98 |         """
 99 |         try:
100 |             current_date = datetime.now().strftime('%Y%m%d')
101 |             current_file = f"{platform}_{current_date}.xml"
102 |             epg_dir = os.path.dirname(EPGFileManager.get_epg_file_path(platform))
103 | 
104 |             if not os.path.exists(epg_dir):
105 |                 return 0
106 | 
107 |             deleted_count = 0
108 |             for file_name in os.listdir(epg_dir):
109 |                 if file_name.endswith(".xml"):
110 |                     if not keep_current or file_name != current_file:
111 |                         file_path = os.path.join(epg_dir, file_name)
112 |                         os.remove(file_path)
113 |                         deleted_count += 1
114 |                         logger.debug(f"🗑️ 删除旧EPG文件: {file_name}")
115 | 
116 |             if deleted_count > 0:
117 |                 logger.info(f"🧹 清理{platform}的{deleted_count}个旧EPG文件")
118 | 
119 |             return deleted_count
120 | 
121 |         except Exception as e:
122 |             logger.error(f"❌ 删除{platform}的旧EPG文件失败: {e}")
123 |             return 0
124 | 
125 |     @staticmethod
126 |     def aggregate_epg_files(platforms: List[str]) -> Response:
127 |         """
128 |         Aggregate EPG files from multiple platforms
129 | 
130 |         Args:
131 |             platforms: List of platform names to aggregate
132 | 
133 |         Returns:
134 |             FastAPI Response with aggregated XML content
135 | 
136 |         Raises:
137 |             HTTPException: If no EPG data is available
138 |         """
139 |         logger.info(f"🔄 正在聚合平台EPG数据: {platforms}")
140 | 
141 |         merged_root = ET.Element("tv")
142 |         merged_root.set("generator-info-name", f"{Config.APP_NAME} v{Config.APP_VERSION}")
143 |         merged_root.set("generator-info-url", "https://github.com/your-repo/CharmingEPG")
144 | 
145 |         channels_seen = set()
146 |         total_channels = 0
147 |         total_programs = 0
148 | 
149 |         for platform in platforms:
150 |             content = EPGFileManager.read_epg_file(platform)
151 |             if not content:
152 |                 logger.warning(f"⚠️ 未找到平台的EPG数据: {platform}")
153 |                 continue
154 | 
155 |             try:
156 |                 platform_root = ET.fromstring(content)
157 | 
158 |                 # Process channels (first-come-first-served for duplicates)
159 |                 platform_channels = 0
160 |                 platform_programs = 0
161 | 
162 |                 for channel in platform_root.findall("./channel"):
163 |                     channel_id = channel.get("id")
164 |                     if channel_id and channel_id not in channels_seen:
165 |                         channels_seen.add(channel_id)
166 |                         merged_root.append(channel)
167 |                         platform_channels += 1
168 | 
169 |                         # Add all programs for this channel
170 |                         for programme in platform_root.findall(f"./programme[@channel='{channel_id}']"):
171 |                             merged_root.append(programme)
172 |                             platform_programs += 1
173 | 
174 |                 total_channels += platform_channels
175 |                 total_programs += platform_programs
176 | 
177 |                 logger.debug(
178 |                     f"🔀 从{platform}合并{platform_channels}个频道和{platform_programs}个节目"
179 |                 )
180 | 
181 |             except ET.ParseError as e:
182 |                 logger.error(f"❌ 解析平台{platform}的XML失败: {e}")
183 |                 continue
184 | 
185 |         if total_channels == 0:
186 |             logger.error("❌ 任何平台都未找到有效的EPG数据")
187 |             raise HTTPException(status_code=404, detail="No EPG data available")
188 | 
189 |         # Convert merged XML to string
190 |         merged_xml = ET.tostring(merged_root, encoding="utf-8", xml_declaration=True)
191 | 
192 |         logger.info(f"✨ 成功聚合{total_channels}个频道和{total_programs}个节目")
193 | 
194 |         return Response(
195 |             content=merged_xml,
196 |             media_type="application/xml",
197 |             headers={
198 |                 "Content-Disposition": "attachment; filename=epg.xml",
199 |                 "Cache-Control": f"public, max-age={Config.EPG_CACHE_TTL}, s-maxage={Config.EPG_CACHE_TTL}",
200 |                 "ETag": f'"epg-{datetime.now().strftime("%Y%m%d")}-{",".join(platforms)}"',
201 |                 "X-Total-Channels": str(total_channels),
202 |                 "X-Total-Programs": str(total_programs),
203 |                 "X-Platforms": ",".join(platforms)
204 |             }
205 |         )
206 | 
207 |     @staticmethod
208 |     def get_single_platform_epg(platform: str) -> Response:
209 |         """
210 |         Get EPG data for a single platform
211 | 
212 |         Args:
213 |             platform: Platform name
214 | 
215 |         Returns:
216 |             FastAPI Response with XML content
217 | 
218 |         Raises:
219 |             HTTPException: If EPG file is not found
220 |         """
221 |         content = EPGFileManager.read_epg_file(platform)
222 | 
223 |         if content is None:
224 |             logger.error(f"❌ 未找到平台{platform}的EPG文件")
225 |             raise HTTPException(
226 |                 status_code=404,
227 |                 detail=f"EPG data not available for platform: {platform}"
228 |             )
229 | 
230 |         # Parse XML to get channel and program counts for headers
231 |         try:
232 |             root = ET.fromstring(content)
233 |             channel_count = len(root.findall("./channel"))
234 |             program_count = len(root.findall("./programme"))
235 | 
236 |             logger.info(f"📡 为{platform}提供EPG服务: {channel_count}个频道,{program_count}个节目")
237 | 
238 |             return Response(
239 |                 content=content,
240 |                 media_type="application/xml",
241 |                 headers={
242 |                     "Content-Disposition": f"attachment; filename={platform}_epg.xml",
243 |                     "Cache-Control": f"public, max-age={Config.EPG_CACHE_TTL}, s-maxage={Config.EPG_CACHE_TTL}",
244 |                     "ETag": f'"epg-{platform}-{datetime.now().strftime("%Y%m%d")}"',
245 |                     "X-Platform": platform,
246 |                     "X-Total-Channels": str(channel_count),
247 |                     "X-Total-Programs": str(program_count)
248 |                 }
249 |             )
250 | 
251 |         except ET.ParseError:
252 |             logger.warning(f"⚠️ 平台{platform}的XML内容无效,按原样提供服务")
253 |             return Response(
254 |                 content=content,
255 |                 media_type="application/xml",
256 |                 headers={
257 |                     "Content-Disposition": f"attachment; filename={platform}_epg.xml",
258 |                     "Cache-Control": f"public, max-age={Config.EPG_CACHE_TTL}, s-maxage={Config.EPG_CACHE_TTL}",
259 |                     "ETag": f'"epg-{platform}-{datetime.now().strftime("%Y%m%d")}"',
260 |                     "X-Platform": platform
261 |                 }
262 |             )
--------------------------------------------------------------------------------
/app/http_client.py:
--------------------------------------------------------------------------------
  1 | import asyncio
  2 | from typing import Dict, Optional, Any
  3 | import aiohttp
  4 | import requests
  5 | from tenacity import retry, stop_after_attempt, wait_exponential, retry_if_exception_type
  6 | 
  7 | from .config import Config
  8 | from .logger import get_logger
  9 | 
 10 | logger = get_logger(__name__)
 11 | 
 12 | 
 13 | class HTTPError(Exception):
 14 |     """Custom HTTP error for better error handling"""
 15 |     def __init__(self, status_code: int, message: str, url: str):
 16 |         self.status_code = status_code
 17 |         self.message = message
 18 |         self.url = url
 19 |         super().__init__(f"HTTP {status_code}: {message} (URL: {url})")
 20 | 
 21 | 
 22 | class HTTPClient:
 23 |     """Shared HTTP client with retry logic and proper error handling"""
 24 | 
 25 |     def __init__(self):
 26 |         self.timeout = Config.HTTP_TIMEOUT
 27 |         self.proxies = Config.get_proxies()
 28 |         self.default_headers = {
 29 |             "User-Agent": Config.DEFAULT_USER_AGENT,
 30 |             "Accept": "application/json, text/plain, */*",
 31 |             "Accept-Language": "zh-CN,zh-TW;q=0.9,zh;q=0.8,en-US;q=0.7,en;q=0.6",
 32 |             "Cache-Control": "no-cache",
 33 |         }
 34 | 
 35 |     @retry(
 36 |         stop=stop_after_attempt(Config.HTTP_MAX_RETRIES),
 37 |         wait=wait_exponential(multiplier=Config.HTTP_RETRY_BACKOFF),
 38 |         retry=retry_if_exception_type((requests.RequestException, HTTPError)),
 39 |         reraise=True
 40 |     )
 41 |     def get(self, url: str, headers: Optional[Dict[str, str]] = None,
 42 |             params: Optional[Dict[str, Any]] = None, **kwargs) -> requests.Response:
 43 |         """
 44 |         Synchronous GET request with retry logic
 45 | 
 46 |         Args:
 47 |             url: Request URL
 48 |             headers: Additional headers
 49 |             params: Query parameters
 50 |             **kwargs: Additional requests arguments
 51 | 
 52 |         Returns:
 53 |             requests.Response object
 54 | 
 55 |         Raises:
 56 |             HTTPError: For HTTP errors (4xx, 5xx)
 57 |             requests.RequestException: For connection errors
 58 |         """
 59 |         request_headers = self.default_headers.copy()
 60 |         if headers:
 61 |             request_headers.update(headers)
 62 | 
 63 |         try:
 64 |             logger.debug(f"🌐 发起GET请求: {url}, params: {params} ,headers: {request_headers}")
 65 | 
 66 |             response = requests.get(
 67 |                 url,
 68 |                 headers=request_headers,
 69 |                 params=params,
 70 |                 proxies=self.proxies,
 71 |                 timeout=self.timeout,
 72 |                 **kwargs
 73 |             )
 74 | 
 75 |             # Check for HTTP errors
 76 |             if not response.ok:
 77 |                 raise HTTPError(
 78 |                     status_code=response.status_code,
 79 |                     message=response.reason or "Unknown error",
 80 |                     url=url
 81 |                 )
 82 | 
 83 |             logger.debug(f"✅ 成功获取: {url} (状态: {response.status_code})")
 84 |             return response
 85 | 
 86 |         except requests.RequestException as e:
 87 |             logger.error(f"❌ 请求失败 {url}: {e}")
 88 |             raise
 89 |         except HTTPError as e:
 90 |             logger.error(f"🚨 HTTP错误 {url}: {e}")
 91 |             raise
 92 | 
 93 |     @retry(
 94 |         stop=stop_after_attempt(Config.HTTP_MAX_RETRIES),
 95 |         wait=wait_exponential(multiplier=Config.HTTP_RETRY_BACKOFF),
 96 |         retry=retry_if_exception_type((aiohttp.ClientError, HTTPError)),
 97 |         reraise=True
 98 |     )
 99 |     async def async_get(self, url: str, headers: Optional[Dict[str, str]] = None,
100 |                        params: Optional[Dict[str, Any]] = None, **kwargs) -> aiohttp.ClientResponse:
101 |         """
102 |         Asynchronous GET request with retry logic
103 | 
104 |         Args:
105 |             url: Request URL
106 |             headers: Additional headers
107 |             params: Query parameters
108 |             **kwargs: Additional aiohttp arguments
109 | 
110 |         Returns:
111 |             aiohttp.ClientResponse object
112 | 
113 |         Raises:
114 |             HTTPError: For HTTP errors (4xx, 5xx)
115 |             aiohttp.ClientError: For connection errors
116 |         """
117 |         request_headers = self.default_headers.copy()
118 |         if headers:
119 |             request_headers.update(headers)
120 | 
121 |         connector_kwargs = {}
122 |         if self.proxies:
123 |             # Note: aiohttp proxy configuration differs from requests
124 |             logger.debug(f"🔌 使用代理: {self.proxies}")
125 | 
126 |         timeout = aiohttp.ClientTimeout(total=self.timeout)
127 | 
128 |         try:
129 |             logger.info(f"🌐 发起异步GET请求: {url}")
130 | 
131 |             async with aiohttp.ClientSession(
132 |                 timeout=timeout,
133 |                 headers=request_headers,
134 |                 **connector_kwargs
135 |             ) as session:
136 |                 async with session.get(url, params=params, **kwargs) as response:
137 |                     # Check for HTTP errors
138 |                     if not response.ok:
139 |                         raise HTTPError(
140 |                             status_code=response.status,
141 |                             message=response.reason or "Unknown error",
142 |                             url=url
143 |                         )
144 | 
145 |                     logger.debug(f"✅ 成功获取: {url} (状态: {response.status})")
146 |                     return response
147 | 
148 |         except aiohttp.ClientError as e:
149 |             logger.error(f"❌ 异步请求失败 {url}: {e}")
150 |             raise
151 |         except HTTPError as e:
152 |             logger.error(f"🚨 HTTP错误 {url}: {e}")
153 |             raise
154 | 
155 |     def post(self, url: str, data: Optional[Any] = None, json: Optional[Dict] = None,
156 |              headers: Optional[Dict[str, str]] = None, **kwargs) -> requests.Response:
157 |         """Synchronous POST request with retry logic"""
158 |         return self._request_with_retry("POST", url, data=data, json=json, headers=headers, **kwargs)
159 | 
160 |     async def async_post(self, url: str, data: Optional[Any] = None, json: Optional[Dict] = None,
161 |                         headers: Optional[Dict[str, str]] = None, **kwargs) -> aiohttp.ClientResponse:
162 |         """Asynchronous POST request with retry logic"""
163 |         return await self._async_request_with_retry("POST", url, data=data, json=json, headers=headers, **kwargs)
164 | 
165 |     @retry(
166 |         stop=stop_after_attempt(Config.HTTP_MAX_RETRIES),
167 |         wait=wait_exponential(multiplier=Config.HTTP_RETRY_BACKOFF),
168 |         retry=retry_if_exception_type((requests.RequestException, HTTPError)),
169 |         reraise=True
170 |     )
171 |     def _request_with_retry(self, method: str, url: str, **kwargs) -> requests.Response:
172 |         """Generic request method with retry logic"""
173 |         request_headers = self.default_headers.copy()
174 |         if kwargs.get('headers'):
175 |             request_headers.update(kwargs.pop('headers'))
176 | 
177 |         try:
178 |             logger.info(f"🌐 发起{method}请求: {url}")
179 | 
180 |             response = requests.request(
181 |                 method,
182 |                 url,
183 |                 headers=request_headers,
184 |                 proxies=self.proxies,
185 |                 timeout=self.timeout,
186 |                 **kwargs
187 |             )
188 | 
189 |             if not response.ok:
190 |                 raise HTTPError(
191 |                     status_code=response.status_code,
192 |                     message=response.reason or "Unknown error",
193 |                     url=url
194 |                 )
195 | 
196 |             logger.debug(f"✅ 成功完成{method}请求: {url} (状态: {response.status_code})")
197 |             return response
198 | 
199 |         except requests.RequestException as e:
200 |             logger.error(f"❌ {method}请求失败 {url}: {e}")
201 |             raise
202 | 
203 |     @retry(
204 |         stop=stop_after_attempt(Config.HTTP_MAX_RETRIES),
205 |         wait=wait_exponential(multiplier=Config.HTTP_RETRY_BACKOFF),
206 |         retry=retry_if_exception_type((aiohttp.ClientError, HTTPError)),
207 |         reraise=True
208 |     )
209 |     async def _async_request_with_retry(self, method: str, url: str, **kwargs) -> aiohttp.ClientResponse:
210 |         """Generic async request method with retry logic"""
211 |         request_headers = self.default_headers.copy()
212 |         if kwargs.get('headers'):
213 |             request_headers.update(kwargs.pop('headers'))
214 | 
215 |         timeout = aiohttp.ClientTimeout(total=self.timeout)
216 | 
217 |         try:
218 |             logger.info(f"🌐 发起异步{method}请求: {url}")
219 | 
220 |             async with aiohttp.ClientSession(timeout=timeout, headers=request_headers) as session:
221 |                 async with session.request(method, url, **kwargs) as response:
222 |                     if not response.ok:
223 |                         raise HTTPError(
224 |                             status_code=response.status,
225 |                             message=response.reason or "Unknown error",
226 |                             url=url
227 |                         )
228 | 
229 |                     logger.debug(f"✅ 成功完成异步{method}请求: {url} (状态: {response.status})")
230 |                     return response
231 | 
232 |         except aiohttp.ClientError as e:
233 |             logger.error(f"❌ 异步{method}请求失败 {url}: {e}")
234 |             raise
235 | 
236 | 
237 | # Global HTTP client instance
238 | http_client = HTTPClient()
239 | 
240 | 
241 | def get_http_client() -> HTTPClient:
242 |     """Get the global HTTP client instance"""
243 |     return http_client
--------------------------------------------------------------------------------
/app/logger.py:
--------------------------------------------------------------------------------
 1 | import sys
 2 | from loguru import logger
 3 | from .config import Config
 4 | 
 5 | 
 6 | def setup_logger():
 7 |     """Configure and setup application-wide logging"""
 8 | 
 9 |     # Remove default logger
10 |     logger.remove()
11 | 
12 |     # Console logging with colored output
13 |     logger.add(
14 |         sys.stderr,
15 |         level=Config.LOG_LEVEL,
16 |         format="{time:YYYY-MM-DD HH:mm:ss} | "
17 |                "{level: <8} | "
18 |                "{name}:{function}:{line} | "
19 |                "{message}",
20 |         colorize=True
21 |     )
22 | 
23 |     # File logging with rotation
24 |     logger.add(
25 |         Config.LOG_FILE,
26 |         level=Config.LOG_LEVEL,
27 |         format="{time:YYYY-MM-DD HH:mm:ss} | {level: <8} | {name}:{function}:{line} | {message}",
28 |         rotation=Config.LOG_ROTATION,
29 |         retention=Config.LOG_RETENTION,
30 |         compression="zip",
31 |         encoding="utf-8"
32 |     )
33 | 
34 |     return logger
35 | 
36 | 
37 | # Create global logger instance
38 | app_logger = setup_logger()
39 | 
40 | 
41 | def get_logger(name: str = None):
42 |     """Get a logger instance with optional name binding"""
43 |     if name:
44 |         return app_logger.bind(name=name)
45 |     return app_logger
--------------------------------------------------------------------------------
/app/main.py:
--------------------------------------------------------------------------------
  1 | import asyncio
  2 | from datetime import datetime
  3 | from typing import List
  4 | 
  5 | from apscheduler.schedulers.asyncio import AsyncIOScheduler
  6 | from fastapi import FastAPI, Query
  7 | 
  8 | from .config import Config
  9 | from .logger import get_logger
 10 | from .file_manager import EPGFileManager
 11 | from .epg_platform import MyTvSuper, Hami
 12 | from .epg_platform.Astro import get_astro_epg
 13 | from .epg_platform.CN_epg_pw import get_cn_channels_epg
 14 | from .epg_platform.HOY import get_hoy_epg
 15 | from .epg_platform.NowTV import request_nowtv_today_epg
 16 | from .epg_platform.RTHK import get_rthk_epg
 17 | from .epg_platform.Starhub import get_starhub_epg
 18 | 
 19 | logger = get_logger(__name__)
 20 | 
 21 | app = FastAPI(
 22 |     title=Config.APP_NAME,
 23 |     version=Config.APP_VERSION,
 24 |     description="Electronic Program Guide (EPG) aggregation service for Asian streaming platforms",
 25 |     openapi_url=None
 26 | )
 27 | 
 28 | 
 29 | @app.get("/")
 30 | async def root():
 31 |     """Health check endpoint"""
 32 |     enabled_platforms = [p["platform"] for p in Config.get_enabled_platforms()]
 33 |     return {
 34 |         "service": Config.APP_NAME,
 35 |         "version": Config.APP_VERSION,
 36 |         "status": "healthy",
 37 |         "enabled_platforms": enabled_platforms,
 38 |         "update_interval_minutes": Config.EPG_UPDATE_INTERVAL
 39 |     }
 40 | 
 41 | 
 42 | # Create scheduler instance
 43 | scheduler = AsyncIOScheduler()
 44 | 
 45 | 
 46 | @scheduler.scheduled_job('interval', minutes=Config.EPG_UPDATE_INTERVAL)
 47 | async def scheduled_epg_update():
 48 |     """Scheduled task to update EPG data from all enabled platforms"""
 49 |     logger.info(f"🚀 开始定时更新EPG数据 - {datetime.now()}")
 50 |     await update_all_enabled_platforms()
 51 | 
 52 | 
 53 | async def request_my_tv_super_epg():
 54 |     """Update MyTV Super EPG data"""
 55 |     platform = "tvb"
 56 |     logger.info(f"📺 正在更新平台EPG数据: {platform}")
 57 | 
 58 |     try:
 59 |         if EPGFileManager.read_epg_file(platform) is not None:
 60 |             logger.info(f"✅ 今日{platform}的EPG数据已存在,跳过更新")
 61 |             return
 62 | 
 63 |         channels, programs = await MyTvSuper.get_channels(force=True)
 64 |         if not channels:
 65 |             logger.warning(f"⚠️ 未找到{platform}的频道数据")
 66 |             return
 67 | 
 68 |         response_xml = await gen_channel(channels, programs)
 69 | 
 70 |         if EPGFileManager.save_epg_file(platform, response_xml):
 71 |             EPGFileManager.delete_old_epg_files(platform)
 72 |             logger.info(f"✨ 成功更新{platform}的EPG数据")
 73 |         else:
 74 |             logger.error(f"❌ 保存{platform}的EPG文件失败")
 75 | 
 76 |     except Exception as e:
 77 |         logger.error(f"💥 更新{platform}的EPG数据时发生错误: {e}", exc_info=True)
 78 | 
 79 | 
 80 | async def request_hami_epg():
 81 |     """Update Hami EPG data"""
 82 |     platform = "hami"
 83 |     logger.info(f"📺 正在更新平台EPG数据: {platform}")
 84 | 
 85 |     try:
 86 |         if EPGFileManager.read_epg_file(platform) is not None:
 87 |             logger.info(f"✅ 今日{platform}的EPG数据已存在,跳过更新")
 88 |             return
 89 | 
 90 |         channels, programs = await Hami.request_all_epg()
 91 |         if not channels:
 92 |             logger.warning(f"⚠️ 未找到{platform}的频道数据")
 93 |             return
 94 | 
 95 |         response_xml = await gen_channel(channels, programs)
 96 | 
 97 |         if EPGFileManager.save_epg_file(platform, response_xml):
 98 |             EPGFileManager.delete_old_epg_files(platform)
 99 |             logger.info(f"✨ 成功更新{platform}的EPG数据")
100 |         else:
101 |             logger.error(f"❌ 保存{platform}的EPG文件失败")
102 | 
103 |     except Exception as e:
104 |         logger.error(f"💥 更新{platform}的EPG数据时发生错误: {e}", exc_info=True)
105 | 
106 | 
107 | async def request_cn_epg():
108 |     """Update CN (epg.pw) EPG data"""
109 |     platform = "cn"
110 |     logger.info(f"📺 正在更新平台EPG数据: {platform}")
111 | 
112 |     try:
113 |         if EPGFileManager.read_epg_file(platform) is not None:
114 |             logger.info(f"✅ 今日{platform}的EPG数据已存在,跳过更新")
115 |             return
116 | 
117 |         response_xml = await get_cn_channels_epg()
118 |         if not response_xml:
119 |             logger.warning(f"⚠️ 未收到{platform}的EPG数据")
120 |             return
121 | 
122 |         # Convert string to bytes for consistent handling
123 |         xml_bytes = response_xml.encode('utf-8') if isinstance(response_xml, str) else response_xml
124 | 
125 |         if EPGFileManager.save_epg_file(platform, xml_bytes):
126 |             EPGFileManager.delete_old_epg_files(platform)
127 |             logger.info(f"✨ 成功更新{platform}的EPG数据")
128 |         else:
129 |             logger.error(f"❌ 保存{platform}的EPG文件失败")
130 | 
131 |     except Exception as e:
132 |         logger.error(f"💥 更新{platform}的EPG数据时发生错误: {e}", exc_info=True)
133 | 
134 | 
135 | async def request_astro_epg():
136 |     """Update Astro Go EPG data"""
137 |     platform = "astro"
138 |     logger.info(f"📺 正在更新平台EPG数据: {platform}")
139 | 
140 |     try:
141 |         if EPGFileManager.read_epg_file(platform) is not None:
142 |             logger.info(f"✅ 今日{platform}的EPG数据已存在,跳过更新")
143 |             return
144 | 
145 |         channels, programs = await get_astro_epg()
146 |         if not channels:
147 |             logger.warning(f"⚠️ 未找到{platform}的频道数据")
148 |             return
149 | 
150 |         response_xml = await gen_channel(channels, programs)
151 | 
152 |         if EPGFileManager.save_epg_file(platform, response_xml):
153 |             EPGFileManager.delete_old_epg_files(platform)
154 |             logger.info(f"✨ 成功更新{platform}的EPG数据")
155 |         else:
156 |             logger.error(f"❌ 保存{platform}的EPG文件失败")
157 | 
158 |     except Exception as e:
159 |         logger.error(f"💥 更新{platform}的EPG数据时发生错误: {e}", exc_info=True)
160 | 
161 | 
162 | async def request_rthk_epg():
163 |     """Update RTHK EPG data"""
164 |     platform = "rthk"
165 |     logger.info(f"📺 正在更新平台EPG数据: {platform}")
166 | 
167 |     try:
168 |         if EPGFileManager.read_epg_file(platform) is not None:
169 |             logger.info(f"✅ 今日{platform}的EPG数据已存在,跳过更新")
170 |             return
171 | 
172 |         channels, programs = await get_rthk_epg()
173 |         if not channels:
174 |             logger.warning(f"⚠️ 未找到{platform}的频道数据")
175 |             return
176 | 
177 |         response_xml = await gen_channel(channels, programs)
178 | 
179 |         if EPGFileManager.save_epg_file(platform, response_xml):
180 |             EPGFileManager.delete_old_epg_files(platform)
181 |             logger.info(f"✨ 成功更新{platform}的EPG数据")
182 |         else:
183 |             logger.error(f"❌ 保存{platform}的EPG文件失败")
184 | 
185 |     except Exception as e:
186 |         logger.error(f"💥 更新{platform}的EPG数据时发生错误: {e}", exc_info=True)
187 | 
188 | 
189 | async def request_hoy_epg():
190 |     """Update HOY EPG data"""
191 |     platform = "hoy"
192 |     logger.info(f"📺 正在更新平台EPG数据: {platform}")
193 | 
194 |     try:
195 |         if EPGFileManager.read_epg_file(platform) is not None:
196 |             logger.info(f"✅ 今日{platform}的EPG数据已存在,跳过更新")
197 |             return
198 | 
199 |         channels, programs = await get_hoy_epg()
200 |         if not channels:
201 |             logger.warning(f"⚠️ 未找到{platform}的频道数据")
202 |             return
203 | 
204 |         response_xml = await gen_channel(channels, programs)
205 | 
206 |         if EPGFileManager.save_epg_file(platform, response_xml):
207 |             EPGFileManager.delete_old_epg_files(platform)
208 |             logger.info(f"✨ 成功更新{platform}的EPG数据")
209 |         else:
210 |             logger.error(f"❌ 保存{platform}的EPG文件失败")
211 | 
212 |     except Exception as e:
213 |         logger.error(f"💥 更新{platform}的EPG数据时发生错误: {e}", exc_info=True)
214 | 
215 | 
216 | async def request_now_tv_epg():
217 |     """Update NowTV EPG data"""
218 |     platform = "nowtv"
219 |     logger.info(f"📺 正在更新平台EPG数据: {platform}")
220 | 
221 |     try:
222 |         if EPGFileManager.read_epg_file(platform) is not None:
223 |             logger.info(f"✅ 今日{platform}的EPG数据已存在,跳过更新")
224 |             return
225 | 
226 |         response_xml = await request_nowtv_today_epg()
227 |         if not response_xml:
228 |             logger.warning(f"⚠️ 未收到{platform}的EPG数据")
229 |             return
230 | 
231 |         if EPGFileManager.save_epg_file(platform, response_xml):
232 |             EPGFileManager.delete_old_epg_files(platform)
233 |             logger.info(f"✨ 成功更新{platform}的EPG数据")
234 |         else:
235 |             logger.error(f"❌ 保存{platform}的EPG文件失败")
236 | 
237 |     except Exception as e:
238 |         logger.error(f"💥 更新{platform}的EPG数据时发生错误: {e}", exc_info=True)
239 | 
240 | 
241 | async def request_starhub_epg():
242 |     """Update StarHub EPG data"""
243 |     platform = "starhub"
244 |     logger.info(f"📺 正在更新平台EPG数据: {platform}")
245 | 
246 |     try:
247 |         if EPGFileManager.read_epg_file(platform) is not None:
248 |             logger.info(f"✅ 今日{platform}的EPG数据已存在,跳过更新")
249 |             return
250 | 
251 |         channels, programs = await get_starhub_epg()
252 |         if not channels:
253 |             logger.warning(f"⚠️ 未找到{platform}的频道数据")
254 |             return
255 | 
256 |         response_xml = await gen_channel(channels, programs)
257 | 
258 |         if EPGFileManager.save_epg_file(platform, response_xml):
259 |             EPGFileManager.delete_old_epg_files(platform)
260 |             logger.info(f"✨ 成功更新{platform}的EPG数据")
261 |         else:
262 |             logger.error(f"❌ 保存{platform}的EPG文件失败")
263 | 
264 |     except Exception as e:
265 |         logger.error(f"💥 更新{platform}的EPG数据时发生错误: {e}", exc_info=True)
266 | 
267 | 
268 | @app.get("/epg/{platform}")
269 | async def get_platform_epg(platform: str):
270 |     """Get EPG data for a specific platform"""
271 |     logger.info(f"📡 提供平台EPG数据服务: {platform}")
272 |     return EPGFileManager.get_single_platform_epg(platform)
273 | 
274 | 
275 | @app.get("/epg")
276 | async def get_custom_aggregate_epg(platforms: str = Query(..., description="Comma-separated platform list in priority order")):
277 |     """
278 |     Get aggregated EPG data from custom platform selection
279 | 
280 |     Example: ?platforms=tvb,nowtv,hami
281 |     """
282 |     platform_list = [p.strip() for p in platforms.split(',') if p.strip()]
283 |     logger.info(f"📊 提供自定义聚合EPG数据服务: {platform_list}")
284 |     return EPGFileManager.aggregate_epg_files(platform_list)
285 | 
286 | 
287 | @app.get("/all")
288 | async def get_all_enabled_platforms_epg():
289 |     """Get aggregated EPG data from all enabled platforms (cached)"""
290 |     logger.info(f"🌐 提供all平台的缓存EPG数据服务")
291 |     return EPGFileManager.get_single_platform_epg("all")
292 | 
293 | 
294 | @app.get("/all.xml.gz")
295 | async def get_all_enabled_platforms_epg_gz():
296 |     """Get aggregated EPG data from all enabled platforms (cached, gzip compressed)"""
297 |     from fastapi.responses import FileResponse
298 |     import os
299 | 
300 |     logger.info(f"📦 提供all平台的gz压缩缓存EPG数据服务")
301 | 
302 |     gz_file_path = EPGFileManager.get_epg_file_path("all").replace(".xml", ".xml.gz")
303 | 
304 |     if not os.path.exists(gz_file_path):
305 |         logger.error(f"❌ 未找到all.gz压缩文件: {gz_file_path}")
306 |         from fastapi import HTTPException
307 |         raise HTTPException(
308 |             status_code=404,
309 |             detail="Compressed EPG data not available. Please wait for next update cycle."
310 |         )
311 | 
312 |     return FileResponse(
313 |         path=gz_file_path,
314 |         media_type="application/gzip",
315 |         headers={
316 |             "Content-Disposition": "attachment; filename=all.xml.gz",
317 |             "Cache-Control": f"public, max-age={Config.EPG_CACHE_TTL}, s-maxage={Config.EPG_CACHE_TTL}",
318 |             "ETag": f'"epg-all-gz-{datetime.now().strftime("%Y%m%d")}"'
319 |         },
320 |         filename="all.xml.gz"
321 |     )
322 | 
323 | 
324 | async def gen_channel(channels, programs):
325 |     """Generate EPG XML from channels and programs data"""
326 |     from .epg.EpgGenerator import generateEpg
327 |     return await generateEpg(channels, programs)
328 | 
329 | 
330 | async def generate_all_platforms_cache():
331 |     """Generate and cache merged EPG for all enabled platforms"""
332 |     enabled_platforms = [p["platform"] for p in Config.get_enabled_platforms()]
333 | 
334 |     if not enabled_platforms:
335 |         logger.warning("⚠️ 没有启用任何平台,无法生成all缓存")
336 |         return
337 | 
338 |     logger.info(f"🔄 开始生成all平台合并缓存: {enabled_platforms}")
339 | 
340 |     try:
341 |         # Use existing aggregate logic to merge all platforms
342 |         import xml.etree.ElementTree as ET
343 |         import gzip
344 | 
345 |         merged_root = ET.Element("tv")
346 |         merged_root.set("generator-info-name", f"{Config.APP_NAME} v{Config.APP_VERSION}")
347 |         merged_root.set("generator-info-url", "https://github.com/your-repo/CharmingEPG")
348 | 
349 |         channels_seen = set()
350 |         total_channels = 0
351 |         total_programs = 0
352 | 
353 |         for platform in enabled_platforms:
354 |             content = EPGFileManager.read_epg_file(platform)
355 |             if not content:
356 |                 logger.warning(f"⚠️ 未找到平台的EPG数据: {platform}")
357 |                 continue
358 | 
359 |             try:
360 |                 platform_root = ET.fromstring(content)
361 | 
362 |                 # Process channels (first-come-first-served for duplicates)
363 |                 platform_channels = 0
364 |                 platform_programs = 0
365 | 
366 |                 for channel in platform_root.findall("./channel"):
367 |                     channel_id = channel.get("id")
368 |                     if channel_id and channel_id not in channels_seen:
369 |                         channels_seen.add(channel_id)
370 |                         merged_root.append(channel)
371 |                         platform_channels += 1
372 | 
373 |                         # Add all programs for this channel
374 |                         for programme in platform_root.findall(f"./programme[@channel='{channel_id}']"):
375 |                             merged_root.append(programme)
376 |                             platform_programs += 1
377 | 
378 |                 total_channels += platform_channels
379 |                 total_programs += platform_programs
380 | 
381 |                 logger.debug(f"🔀 从{platform}合并{platform_channels}个频道和{platform_programs}个节目")
382 | 
383 |             except ET.ParseError as e:
384 |                 logger.error(f"❌ 解析平台{platform}的XML失败: {e}")
385 |                 continue
386 | 
387 |         if total_channels == 0:
388 |             logger.error("❌ 任何平台都未找到有效的EPG数据,无法生成all缓存")
389 |             return
390 | 
391 |         # Convert merged XML to bytes
392 |         merged_xml = ET.tostring(merged_root, encoding="utf-8", xml_declaration=True)
393 | 
394 |         # Save to cache file using "all" as platform name
395 |         if EPGFileManager.save_epg_file("all", merged_xml):
396 |             logger.info(f"✨ 成功生成all缓存: {total_channels}个频道和{total_programs}个节目")
397 |         else:
398 |             logger.error("❌ 保存all缓存文件失败")
399 | 
400 |         # Generate gzip compressed version
401 |         compressed_xml = gzip.compress(merged_xml, compresslevel=9)
402 |         gz_file_path = EPGFileManager.get_epg_file_path("all").replace(".xml", ".xml.gz")
403 | 
404 |         try:
405 |             EPGFileManager.ensure_directory_exists(gz_file_path)
406 |             with open(gz_file_path, "wb") as gz_file:
407 |                 gz_file.write(compressed_xml)
408 | 
409 |             compression_ratio = len(compressed_xml) / len(merged_xml) * 100
410 |             saved_ratio = 100 - compression_ratio
411 |             logger.info(f"📦 成功生成all.gz压缩缓存: {len(compressed_xml)} 字节 (压缩至原来的 {compression_ratio:.1f}%,节省 {saved_ratio:.1f}%)")
412 |         except Exception as gz_error:
413 |             logger.error(f"❌ 保存all.gz压缩文件失败: {gz_error}")
414 | 
415 |     except Exception as e:
416 |         logger.error(f"💥 生成all缓存时发生错误: {e}", exc_info=True)
417 | 
418 | 
419 | async def update_all_enabled_platforms():
420 |     """Update EPG data for all enabled platforms"""
421 |     enabled_platforms = Config.get_enabled_platforms()
422 | 
423 |     if not enabled_platforms:
424 |         logger.warning("⚠️ 没有启用任何平台")
425 |         return
426 | 
427 |     logger.info(f"🔄 开始更新{len(enabled_platforms)}个启用平台的EPG数据")
428 | 
429 |     tasks = [
430 |         globals()[conf["fetcher"]]()
431 |         for conf in enabled_platforms
432 |     ]
433 | 
434 |     # Execute all tasks in parallel
435 |     results = await asyncio.gather(*tasks, return_exceptions=True)
436 | 
437 |     # Process results and log any exceptions
438 |     success_count = 0
439 |     error_count = 0
440 |     any_platform_updated = False
441 | 
442 |     for i, result in enumerate(results):
443 |         platform_config = enabled_platforms[i]
444 |         platform_name = platform_config["name"]
445 | 
446 |         if isinstance(result, Exception):
447 |             error_count += 1
448 |             logger.error(f"❌ 更新{platform_name}的EPG数据失败: {result}", exc_info=True)
449 |         else:
450 |             success_count += 1
451 |             logger.debug(f"✅ 成功更新{platform_name}的EPG数据")
452 | 
453 |     logger.info(f"🎯 EPG数据更新完成: {success_count}个成功,{error_count}个失败")
454 | 
455 |     # Check if all cache exists, if not, we need to generate it
456 |     all_cache_exists = EPGFileManager.read_epg_file("all") is not None
457 | 
458 |     # Generate merged cache for /all endpoint if:
459 |     # 1. Cache doesn't exist (first run or new day)
460 |     # 2. At least one platform was updated successfully
461 |     if not all_cache_exists:
462 |         logger.info("📝 all缓存不存在,开始生成")
463 |         await generate_all_platforms_cache()
464 |     else:
465 |         logger.info("✅ all缓存已存在且所有平台均未更新,跳过重新生成")
466 | 
467 | 
468 | @app.on_event("startup")
469 | async def startup():
470 |     """Application startup event"""
471 |     logger.info(f"🚀 启动 {Config.APP_NAME} v{Config.APP_VERSION}")
472 |     logger.info(f"⏰ EPG更新间隔: {Config.EPG_UPDATE_INTERVAL} 分钟")
473 | 
474 |     enabled_platforms = [p["name"] for p in Config.get_enabled_platforms()]
475 |     logger.info(f"📺 已启用平台: {', '.join(enabled_platforms)}")
476 | 
477 |     # Start the scheduler
478 |     scheduler.start()
479 |     logger.info("⚡ 调度器已启动")
480 | 
481 |     # Trigger initial EPG update
482 |     asyncio.create_task(update_all_enabled_platforms())
483 |     logger.info("🎬 初始EPG数据更新已触发")
--------------------------------------------------------------------------------
/app/utils.py:
--------------------------------------------------------------------------------
 1 | import re
 2 | from datetime import datetime, time, timezone, timedelta
 3 | from zoneinfo import ZoneInfo
 4 | 
 5 | import pytz
 6 | 
 7 | 
 8 | def has_chinese(text):
 9 |     # 包含更多中文字符范围
10 |     pattern = r'[\u4e00-\u9fff\u3400-\u4dbf\U00020000-\U0002a6df\U0002a700-\U0002b73f\U0002b740-\U0002b81f\U0002b820-\U0002ceaf]'
11 |     return bool(re.search(pattern, text))
12 | 
13 | 
14 | def remove_brackets(text):
15 |     pattern = r'[((][^(())]*[))]'
16 |     while re.search(pattern, text):
17 |         text = re.sub(pattern, '', text)
18 |     return text.strip()
19 | 
20 | 
21 | def utc_and_duration_to_local(start_time, duration):
22 |     # 1. 字符串转UTC时间对象
23 |     start_time_utc = datetime.strptime(start_time, "%Y-%m-%dT%H:%M:%S.000Z")
24 |     start_time_utc = pytz.utc.localize(start_time_utc)
25 | 
26 |     # 2. 计算结束时间(UTC)
27 |     end_time_utc = start_time_utc + timedelta(seconds=duration)
28 | 
29 |     # 3. 转成上海时间
30 |     shanghai_tz = pytz.timezone('Asia/Shanghai')
31 |     start_time_shanghai = start_time_utc.astimezone(shanghai_tz)
32 |     end_time_shanghai = end_time_utc.astimezone(shanghai_tz)
33 |     return start_time_shanghai, end_time_shanghai
34 | 
35 | 
36 | def utc_to_utc8_datetime(utc_timestamp):
37 |     utc8_dt = datetime.fromtimestamp(utc_timestamp, tz=ZoneInfo('Asia/Shanghai'))
38 |     return utc8_dt
39 | 
--------------------------------------------------------------------------------
/build_and_push.sh:
--------------------------------------------------------------------------------
  1 | #!/bin/bash
  2 | 
  3 | # 多架构Docker镜像构建和推送脚本
  4 | # 支持AMD64和ARM64架构
  5 | 
  6 | set -e  # 遇到错误时退出
  7 | 
  8 | # 配置变量
  9 | DOCKER_USERNAME="charmingcheung000"  # 请填写你的Docker Hub用户名
 10 | IMAGE_NAME="charming-epg"
 11 | TAG="latest"
 12 | DOCKERFILE="Dockerfile"  # 统一的Dockerfile
 13 | 
 14 | # 颜色输出
 15 | RED='\033[0;31m'
 16 | GREEN='\033[0;32m'
 17 | YELLOW='\033[1;33m'
 18 | BLUE='\033[0;34m'
 19 | NC='\033[0m' # No Color
 20 | 
 21 | # 输出函数
 22 | log_info() {
 23 |     echo -e "${BLUE}[INFO]${NC} $1"
 24 | }
 25 | 
 26 | log_success() {
 27 |     echo -e "${GREEN}[SUCCESS]${NC} $1"
 28 | }
 29 | 
 30 | log_warning() {
 31 |     echo -e "${YELLOW}[WARNING]${NC} $1"
 32 | }
 33 | 
 34 | log_error() {
 35 |     echo -e "${RED}[ERROR]${NC} $1"
 36 | }
 37 | 
 38 | # 检查必要的工具
 39 | check_requirements() {
 40 |     log_info "检查必要的工具..."
 41 | 
 42 |     if ! command -v docker &> /dev/null; then
 43 |         log_error "Docker 未安装或不在PATH中"
 44 |         exit 1
 45 |     fi
 46 | 
 47 |     if ! docker buildx version &> /dev/null; then
 48 |         log_error "Docker buildx 未安装"
 49 |         exit 1
 50 |     fi
 51 | 
 52 |     log_success "工具检查完成"
 53 | }
 54 | 
 55 | # 获取用户输入
 56 | get_user_input() {
 57 |     if [ -z "$DOCKER_USERNAME" ]; then
 58 |         read -p "请输入你的Docker Hub用户名: " DOCKER_USERNAME
 59 |         if [ -z "$DOCKER_USERNAME" ]; then
 60 |             log_error "用户名不能为空"
 61 |             exit 1
 62 |         fi
 63 |     fi
 64 | 
 65 |     FULL_IMAGE_NAME="$DOCKER_USERNAME/$IMAGE_NAME:$TAG"
 66 |     log_info "将构建镜像: $FULL_IMAGE_NAME"
 67 | }
 68 | 
 69 | # 检查Dockerfile是否存在
 70 | check_dockerfile() {
 71 |     log_info "检查Dockerfile文件..."
 72 | 
 73 |     if [ ! -f "$DOCKERFILE" ]; then
 74 |         log_error "Dockerfile ($DOCKERFILE) 不存在"
 75 |         exit 1
 76 |     fi
 77 | 
 78 |     log_success "Dockerfile文件检查完成"
 79 | }
 80 | 
 81 | # 获取当前登录用户名的辅助函数
 82 | get_logged_user() {
 83 |     # 方法1: 从 docker info 获取
 84 |     local user_from_info=$(docker info 2>/dev/null | grep "Username:" | awk '{print $2}' 2>/dev/null || echo "")
 85 | 
 86 |     # 方法2: 从配置文件获取
 87 |     local user_from_config=""
 88 |     if [ -f "$HOME/.docker/config.json" ]; then
 89 |         # 尝试从配置文件中提取用户名(如果auths中有registry信息)
 90 |         user_from_config=$(cat "$HOME/.docker/config.json" 2>/dev/null | grep -o '"https://index.docker.io/v1/"' 2>/dev/null || echo "")
 91 |     fi
 92 | 
 93 |     # 方法3: 尝试通过简单的API调用验证
 94 |     local test_result=""
 95 |     if curl -s --max-time 5 -f "https://hub.docker.com/v2/users/$DOCKER_USERNAME/" >/dev/null 2>&1; then
 96 |         test_result="api_accessible"
 97 |     fi
 98 | 
 99 |     echo "$user_from_info"
100 | }
101 | 
102 | # 验证登录状态
103 | verify_login() {
104 |     log_info "验证登录状态..."
105 | 
106 |     # 尝试执行一个需要认证的操作来验证登录
107 |     local test_repo="$DOCKER_USERNAME/login-test-$(date +%s)"
108 | 
109 |     # 尝试推送一个简单的测试标签(不会真的推送,只是验证权限)
110 |     log_info "测试Docker Hub连接和权限..."
111 | 
112 |     # 创建一个临时的最小镜像来测试推送权限
113 |     echo "FROM scratch" > /tmp/test.dockerfile
114 | 
115 |     if docker buildx build --platform linux/amd64 -f /tmp/test.dockerfile -t "$test_repo" --dry-run . >/dev/null 2>&1; then
116 |         log_success "Docker登录验证成功"
117 |         rm -f /tmp/test.dockerfile
118 |         return 0
119 |     else
120 |         # 如果dry-run不支持,尝试其他方法
121 |         log_info "使用备用验证方法..."
122 | 
123 |         # 简单地检查是否能执行docker命令且显示Login Succeeded
124 |         if docker system info >/dev/null 2>&1; then
125 |             log_success "Docker登录验证成功 (用户: $DOCKER_USERNAME)"
126 |             rm -f /tmp/test.dockerfile 2>/dev/null || true
127 |             return 0
128 |         else
129 |             log_error "Docker登录验证失败"
130 |             rm -f /tmp/test.dockerfile 2>/dev/null || true
131 |             return 1
132 |         fi
133 |     fi
134 | }
135 | 
136 | # 强制重新登录Docker Hub
137 | docker_login() {
138 |     log_info "准备登录Docker Hub..."
139 | 
140 |     # 显示当前登录用户(如果有的话)
141 |     local current_user=$(get_logged_user)
142 |     if [ -n "$current_user" ]; then
143 |         log_warning "当前登录用户: $current_user"
144 |     fi
145 | 
146 |     # 强制登出,清除所有缓存的凭据
147 |     log_info "正在清除Docker登录状态..."
148 |     docker logout > /dev/null 2>&1 || true
149 | 
150 |     # 清除Docker配置文件中的凭据(如果存在)
151 |     if [ -f "$HOME/.docker/config.json" ]; then
152 |         log_info "备份并清理Docker配置文件..."
153 |         # 备份原配置文件
154 |         cp "$HOME/.docker/config.json" "$HOME/.docker/config.json.backup.$(date +%s)" 2>/dev/null || true
155 |         # 清空auths部分,但保留其他配置
156 |         echo '{"auths": {}}' > "$HOME/.docker/config.json" 2>/dev/null || true
157 |     fi
158 | 
159 |     echo ""
160 |     log_warning "=== 强制手动登录模式 ==="
161 |     log_info "目标用户名: $DOCKER_USERNAME"
162 |     log_warning "请确保输入正确的用户名和密码!"
163 |     echo ""
164 | 
165 |     # 使用指定用户名强制手动登录
166 |     MAX_ATTEMPTS=3
167 |     ATTEMPT=1
168 | 
169 |     while [ $ATTEMPT -le $MAX_ATTEMPTS ]; do
170 |         echo "登录尝试 $ATTEMPT/$MAX_ATTEMPTS"
171 |         echo "请输入Docker Hub凭据:"
172 | 
173 |         # 强制交互式登录
174 |         if docker login -u $DOCKER_USERNAME; then
175 |             log_success "登录命令执行成功"
176 | 
177 |             # 验证登录状态
178 |             if verify_login; then
179 |                 log_success "Docker Hub 登录验证成功 - 用户: $DOCKER_USERNAME"
180 |                 return 0
181 |             else
182 |                 log_error "登录验证失败"
183 |             fi
184 |         else
185 |             log_error "登录失败"
186 |         fi
187 | 
188 |         if [ $ATTEMPT -eq $MAX_ATTEMPTS ]; then
189 |             log_error "达到最大尝试次数,登录失败"
190 |             exit 1
191 |         fi
192 | 
193 |         ATTEMPT=$((ATTEMPT + 1))
194 |         echo ""
195 |         log_warning "请重试..."
196 |         sleep 2
197 |     done
198 | }
199 | 
200 | # 设置buildx构建器
201 | setup_buildx() {
202 |     log_info "设置Docker buildx构建器..."
203 | 
204 |     BUILDER_NAME="multiarch-builder"
205 | 
206 |     # 检查构建器是否已存在
207 |     if docker buildx inspect "$BUILDER_NAME" &> /dev/null; then
208 |         log_info "构建器 $BUILDER_NAME 已存在,正在使用..."
209 |         docker buildx use "$BUILDER_NAME"
210 |     else
211 |         log_info "创建新的构建器: $BUILDER_NAME"
212 |         docker buildx create --name "$BUILDER_NAME" --use
213 |     fi
214 | 
215 |     log_info "启动构建器..."
216 |     docker buildx inspect --bootstrap
217 | 
218 |     log_success "构建器设置完成"
219 | }
220 | 
221 | # 统一构建多架构镜像
222 | build_multiarch() {
223 |     log_info "开始构建多架构镜像..."
224 |     log_info "支持架构: linux/amd64, linux/arm64"
225 |     echo ""
226 | 
227 |     log_info "构建参数:"
228 |     log_info "  Docker用户: $DOCKER_USERNAME"
229 |     log_info "  Dockerfile: $DOCKERFILE"
230 |     log_info "  镜像名称: $FULL_IMAGE_NAME"
231 |     log_info "  平台架构: linux/amd64,linux/arm64"
232 |     echo ""
233 | 
234 |     # 再次确认推送的镜像名称
235 |     log_warning "即将推送到: $FULL_IMAGE_NAME"
236 |     read -p "确认继续?(y/N): " confirm
237 |     if [[ ! $confirm =~ ^[Yy]$ ]]; then
238 |         log_info "用户取消操作"
239 |         exit 0
240 |     fi
241 | 
242 |     docker buildx build \
243 |         --platform linux/amd64,linux/arm64 \
244 |         --file "$DOCKERFILE" \
245 |         --tag "$FULL_IMAGE_NAME" \
246 |         --push \
247 |         .
248 | 
249 |     log_success "多架构镜像构建和推送完成!"
250 | }
251 | 
252 | # 验证镜像
253 | verify_image() {
254 |     log_info "验证多架构镜像..."
255 |     echo ""
256 | 
257 |     echo "=== 镜像架构信息 ==="
258 |     docker buildx imagetools inspect "$FULL_IMAGE_NAME"
259 |     echo ""
260 | 
261 |     log_success "镜像验证完成"
262 | }
263 | 
264 | # 清理函数
265 | cleanup() {
266 |     # 清理临时文件
267 |     rm -f /tmp/test.dockerfile 2>/dev/null || true
268 |     log_info "清理完成"
269 | }
270 | 
271 | # 主函数
272 | main() {
273 |     echo "=================================="
274 |     echo "  多架构Docker镜像构建脚本"
275 |     echo "  支持架构: AMD64, ARM64"
276 |     echo "  统一Dockerfile构建"
277 |     echo "  强制手动登录模式"
278 |     echo "=================================="
279 |     echo ""
280 | 
281 |     # 设置错误时的清理
282 |     trap cleanup EXIT
283 | 
284 |     check_requirements
285 |     get_user_input
286 |     check_dockerfile
287 |     docker_login  # 强制重新登录
288 |     setup_buildx
289 |     build_multiarch
290 |     verify_image
291 | 
292 |     echo ""
293 |     log_success "所有操作完成!"
294 |     echo "Docker用户: $DOCKER_USERNAME"
295 |     echo "镜像名称: $FULL_IMAGE_NAME"
296 |     echo "支持架构: linux/amd64, linux/arm64"
297 |     echo ""
298 |     echo "使用方法:"
299 |     echo "  docker pull $FULL_IMAGE_NAME"
300 |     echo "  docker run $FULL_IMAGE_NAME"
301 |     echo ""
302 |     echo "查看镜像架构:"
303 |     echo "  docker buildx imagetools inspect $FULL_IMAGE_NAME"
304 | }
305 | 
306 | # 运行主函数
307 | main "$@"
308 | 
--------------------------------------------------------------------------------
/docker-compose.example.yml:
--------------------------------------------------------------------------------
 1 | version: '3.3'
 2 | services:
 3 |   charming_epg:
 4 |     build: .
 5 |     ports:
 6 |       - "30008:80"
 7 |     environment:
 8 |       - EPG_ENABLE_CN=true
 9 |       - EPG_ENABLE_TVB=true
10 |       - EPG_ENABLE_NOWTV=false
11 |       - EPG_ENABLE_HAMI=true
12 |       - EPG_ENABLE_ASTRO=false
13 |       - EPG_ENABLE_RTHK=false
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
 1 | protobuf>=4.25.8
 2 | requests==2.32.4
 3 | fastapi~=0.115.8
 4 | uvicorn~=0.34.0
 5 | pydantic~=2.10.6
 6 | python-dotenv~=1.0.1
 7 | pytz~=2025.1
 8 | asyncpg~=0.30.0
 9 | APScheduler~=3.11.0
10 | loguru~=0.7.3
11 | beautifulsoup4~=4.13.3
12 | tenacity~=9.0.0
13 | aiohttp~=3.11.0
--------------------------------------------------------------------------------