├── docs ├── _static │ └── empty.txt ├── requirements.txt ├── fdroid_dl │ ├── model.rst │ ├── json.rst │ ├── update.rst │ ├── download.rst │ └── processor.rst ├── Makefile ├── index.rst ├── cli.rst └── conf.py ├── fdroid_dl ├── VERSION ├── __init__.py ├── json │ ├── __init__.py │ └── encoder.py ├── processor │ ├── __init__.py │ └── index.py ├── model │ ├── __init__.py │ ├── metadata.py │ ├── repoconfig.py │ ├── appmetadata.py │ ├── config.py │ └── index.py ├── update │ ├── __init__.py │ ├── src.py │ ├── selector.py │ ├── update.py │ ├── apk.py │ ├── index.py │ └── metadata.py ├── download │ ├── __init__.py │ ├── verifieddownload.py │ └── futuressession.py └── __main__.py ├── test ├── testdata │ └── empty.json ├── __init__.py └── test_config.py ├── __init__.py ├── .travis.yml ├── pyproject.toml ├── .bumpversion.cfg ├── tox.ini ├── LICENSE ├── setup.py ├── .gitignore ├── README.md └── .pylintrc /docs/_static/empty.txt: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /fdroid_dl/VERSION: -------------------------------------------------------------------------------- 1 | 0.1.0 2 | -------------------------------------------------------------------------------- /test/testdata/empty.json: -------------------------------------------------------------------------------- 1 | { 2 | 3 | 4 | } 5 | -------------------------------------------------------------------------------- /__init__.py: -------------------------------------------------------------------------------- 1 | """.""" 2 | __ALL__ = ['fdroid_dl'] 3 | -------------------------------------------------------------------------------- /fdroid_dl/__init__.py: -------------------------------------------------------------------------------- 1 | """.""" 2 | 3 | __version__ = "0.1.0" 4 | 5 | __ALL__ = ['__version__'] 6 | -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | sphinx 2 | sphinxcontrib-napoleon 3 | sphinx_rtd_theme 4 | requests-futures 5 | PyYAML 6 | click 7 | -------------------------------------------------------------------------------- /docs/fdroid_dl/model.rst: -------------------------------------------------------------------------------- 1 | Model's 2 | =================================== 3 | 4 | .. automodule:: fdroid_dl.model 5 | :members: 6 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | sudo: false 2 | language: python 3 | python: 4 | - 2.7 5 | - 3.5 6 | - 3.6 7 | install: pip install tox-travis 8 | script: tox 9 | -------------------------------------------------------------------------------- /docs/fdroid_dl/json.rst: -------------------------------------------------------------------------------- 1 | Generic JSON Encoder 2 | =================================== 3 | 4 | .. automodule:: fdroid_dl.json 5 | :members: 6 | -------------------------------------------------------------------------------- /docs/fdroid_dl/update.rst: -------------------------------------------------------------------------------- 1 | Update Interface 2 | =================================== 3 | 4 | .. automodule:: fdroid_dl.update 5 | :members: 6 | -------------------------------------------------------------------------------- /docs/fdroid_dl/download.rst: -------------------------------------------------------------------------------- 1 | Download Interface 2 | =================================== 3 | 4 | .. automodule:: fdroid_dl.download 5 | :members: 6 | -------------------------------------------------------------------------------- /docs/fdroid_dl/processor.rst: -------------------------------------------------------------------------------- 1 | Index Postprocessor 2 | =================================== 3 | 4 | .. automodule:: fdroid_dl.processor 5 | :members: 6 | -------------------------------------------------------------------------------- /fdroid_dl/json/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | from .encoder import GenericJSONEncoder 5 | 6 | __all__ = ['GenericJSONEncoder'] 7 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = [ 3 | "setuptools >= 35.0.2", 4 | "setuptools_scm >= 2.0.0, <3" 5 | ] 6 | build-backend = "setuptools.build_meta" 7 | -------------------------------------------------------------------------------- /fdroid_dl/processor/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | from .index import IndexFileProcessor 5 | 6 | __all__ = ['IndexFileProcessor'] 7 | -------------------------------------------------------------------------------- /test/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import unicode_literals 2 | 3 | # Allow direct execution 4 | import os 5 | import sys 6 | sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) 7 | -------------------------------------------------------------------------------- /.bumpversion.cfg: -------------------------------------------------------------------------------- 1 | [bumpversion] 2 | current_version = 0.1.0 3 | commit = True 4 | message = Bump version: {current_version} → {new_version} 5 | 6 | [bumpversion:file:setup.py] 7 | 8 | [bumpversion:file:fdroid_dl/VERSION] 9 | 10 | [bumpversion:file:fdroid_dl/__init__.py] 11 | 12 | -------------------------------------------------------------------------------- /fdroid_dl/model/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | from .config import Config 5 | from .repoconfig import RepoConfig 6 | from .appmetadata import AppMetadata 7 | from .metadata import Metadata 8 | from .index import Index 9 | 10 | __all__ = ['Config', 'RepoConfig', 'AppMetadata', 'Metadata', 'Index'] 11 | -------------------------------------------------------------------------------- /fdroid_dl/update/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | ''' 4 | Manages updates 5 | ''' 6 | 7 | from .update import Update 8 | from .index import IndexUpdate 9 | from .metadata import MetadataUpdate 10 | from .apk import ApkUpdate 11 | from .src import SrcUpdate 12 | 13 | __all__ = ['Update', 'IndexUpdate', 'MetadataUpdate', 'ApkUpdate', 'SrcUpdate'] 14 | -------------------------------------------------------------------------------- /fdroid_dl/update/src.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | import logging 5 | 6 | 7 | LOGGER = logging.getLogger('update.SrcUpdate') 8 | class SrcUpdate(object): 9 | def __init__(self, config, download_timeout=600, max_workers=10): 10 | self.__config = config 11 | self.__download_timeout = download_timeout 12 | self.__max_workers = max_workers 13 | 14 | def update(self): 15 | pass 16 | -------------------------------------------------------------------------------- /fdroid_dl/json/encoder.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | import logging 5 | from json import JSONEncoder 6 | 7 | 8 | LOGGER = logging.getLogger('json.GenericJSONEncoder') 9 | class GenericJSONEncoder(JSONEncoder): 10 | ''' custom encoder so we can serialize Config to json ''' 11 | def default(self, obj): 12 | if hasattr(obj, '__json__'): 13 | return getattr(obj, '__json__') 14 | return JSONEncoder.default(self, obj) 15 | -------------------------------------------------------------------------------- /fdroid_dl/download/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | from .futuressession import FuturesSessionFlex 5 | from .verifieddownload import FuturesSessionVerifiedDownload 6 | 7 | # disable insecure warning 8 | # https://stackoverflow.com/questions/27981545/suppress-insecurerequestwarning-unverified-https-request-is-being-made-in-pytho 9 | import urllib3 10 | urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) 11 | 12 | __all__ = ['FuturesSessionFlex', 'FuturesSessionVerifiedDownload'] 13 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | SPHINXPROJ = fdroid-dl 8 | SOURCEDIR = . 9 | BUILDDIR = _build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | install: 18 | pip install -r requirements.txt 19 | 20 | all: 21 | make clean && make html 22 | 23 | # Catch-all target: route all unknown targets to Sphinx using the new 24 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 25 | %: Makefile 26 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 27 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | # tox (https://tox.readthedocs.io/) is a tool for running tests 2 | # in multiple virtualenvs. This configuration file will run the 3 | # test suite on all supported python versions. To use it, "pip install tox" 4 | # and then run "tox" from this directory. 5 | 6 | [tox] 7 | envlist = py27, py35, py36, py37 8 | 9 | [tox:.package] 10 | basepython = python3 11 | 12 | [pytest] 13 | junit_suite_name = fdroid-dl 14 | log_format = %(asctime)s %(levelname)s %(message)s 15 | log_date_format = %Y-%m-%d %H:%M:%S 16 | python_classes = *Suite 17 | python_functions = test_* 18 | testpaths = test 19 | 20 | [testenv] 21 | deps = 22 | toml 23 | pytest 24 | mock 25 | requests-mock 26 | hypothesis 27 | pylint 28 | requests-futures 29 | PyYAML 30 | commands = 31 | pylint fdroid_dl 32 | pytest 33 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. fdroid-dl documentation master file, created by 2 | sphinx-quickstart on Fri Jul 20 15:18:50 2018. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to fdroid-dl's documentation! 7 | ===================================== 8 | 9 | fdroid-dl is a python based f-droid mirror generation and update utility. 10 | Point at one or more existing f-droid repositories and the utility will 11 | download the metadata (pictures, descriptions,..) for you and place it in your 12 | local system. 13 | 14 | .. code-block:: none 15 | 16 | Usage: fdroid-dl [OPTIONS] COMMAND [ARGS]... 17 | 18 | # fdroid-dl update && fdroid update 19 | 20 | .. toctree:: 21 | :maxdepth: 2 22 | :caption: Contents: 23 | 24 | cli 25 | fdroid_dl/model 26 | fdroid_dl/update 27 | fdroid_dl/download 28 | fdroid_dl/json 29 | fdroid_dl/processor 30 | 31 | Indices and tables 32 | ================== 33 | 34 | * :ref:`genindex` 35 | * :ref:`modindex` 36 | * :ref:`search` 37 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2018 t4skforce 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /test/test_config.py: -------------------------------------------------------------------------------- 1 | import os 2 | import unittest 3 | from fdroid_dl.model import Config 4 | try: 5 | from unittest.mock import patch 6 | except ImportError: 7 | from mock import patch 8 | 9 | class ConfigTestSuite(unittest.TestCase): 10 | 11 | @patch('os.makedirs') 12 | @patch('os.path.exists') 13 | @patch.object(Config, 'save') 14 | def test_prepare_dirs(self, save, exists, makedirs): 15 | exists.return_value = False 16 | with Config() as c: 17 | exists.assert_any_call('./repo') 18 | exists.assert_any_call('./metadata') 19 | exists.assert_any_call('.cache') 20 | makedirs.assert_any_call('./repo') 21 | makedirs.assert_any_call('./metadata') 22 | makedirs.assert_any_call('.cache') 23 | self.assertEqual(c.filename, 'fdroid-dl.json') 24 | self.assertEqual(c.repo_dir, './repo') 25 | self.assertEqual(c.metadata_dir, './metadata') 26 | self.assertEqual(c.cache_dir, '.cache') 27 | self.assertIsNot(c.get('f-droid'), None) 28 | self.assertEqual([x for x in iter(c)], 29 | [x for x in iter(Config.DEFAULTS)]) 30 | assert not save.called 31 | assert save.called 32 | -------------------------------------------------------------------------------- /fdroid_dl/update/selector.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | import logging 5 | import requests 6 | 7 | 8 | LOGGER = logging.getLogger('update.Selector') 9 | class Selector(object): 10 | def __init__(self, config): 11 | self.__config = config 12 | 13 | def __meta_repos(self): 14 | for repo in self.__config.repos: 15 | if not 'error' in repo: # ignore repos with download error 16 | yield repo 17 | 18 | def all_apps(self, dupes=False, session=None): 19 | yielded = set() 20 | for repo in self.__meta_repos(): 21 | Selector.apply_session_settings(repo, session) 22 | for selector in repo.apps: 23 | if not repo.index is None: 24 | for appid in repo.index.find_appids(selector): 25 | if not appid in yielded or dupes: 26 | yielded.add(appid) 27 | yield (repo, appid) 28 | 29 | @staticmethod 30 | def apply_session_settings(repo, session): 31 | ''' apply security settings for basic auth and ssl verification ''' 32 | if not session is None: 33 | if not repo.auth is None or repo.verify is False: 34 | thread_session = requests.Session() 35 | thread_session.auth = repo.auth 36 | thread_session.verify = repo.verify 37 | session.map(repo.url, thread_session) 38 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # coding: utf-8 3 | import warnings 4 | import sys 5 | import setuptools 6 | 7 | with open("README.md", "r") as fh: 8 | long_description = fh.read() 9 | 10 | setuptools.setup( 11 | name="fdroid-dl", 12 | version="0.1.0", 13 | author="t4skforce", 14 | author_email="7422037+t4skforce@users.noreply.github.com", 15 | description="fdroid-dl is a f-droid (offline) mirror generation and update utility", 16 | long_description=long_description, 17 | long_description_content_type="text/markdown", 18 | url="https://github.com/t4skforce/fdroid-dl", 19 | entry_points={'console_scripts': ['fdroid-dl=fdroid_dl.__main__:main']}, 20 | packages=setuptools.find_packages(), 21 | install_requires=[ 22 | 'requests-futures>=0.9.7', 23 | 'PyYAML>=3.13', 24 | 'click>=6.7' 25 | ], 26 | python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, <4', 27 | classifiers=( 28 | "Development Status :: 2 - Pre-Alpha", 29 | "Environment :: Console", 30 | "Programming Language :: Python :: 2.7", 31 | "Programming Language :: Python :: 3.5", 32 | "Programming Language :: Python :: 3.6", 33 | "Programming Language :: Python :: 3.7", 34 | "Programming Language :: Python :: 3.8", 35 | "Natural Language :: English", 36 | "License :: OSI Approved :: MIT License", 37 | "Operating System :: OS Independent", 38 | ), 39 | ) 40 | -------------------------------------------------------------------------------- /fdroid_dl/update/update.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | import logging 5 | from .index import IndexUpdate 6 | from .metadata import MetadataUpdate 7 | from .apk import ApkUpdate 8 | from .src import SrcUpdate 9 | 10 | LOGGER = logging.getLogger('update.Update') 11 | class Update(object): 12 | ''' handels downloading of repo related data ''' 13 | def __init__(self, config, max_workers=10, head_timeout=10, index_timeout=60, download_timeout=60): 14 | self.__config = config 15 | self.__head_timeout = head_timeout 16 | self.__index_timeout = index_timeout 17 | self.__download_timeout = download_timeout 18 | self.__max_workers = max_workers 19 | self.__index = IndexUpdate(config, head_timeout=head_timeout, index_timeout=index_timeout, max_workers=max_workers) 20 | self.__meta = None 21 | self.__apk = ApkUpdate(config, download_timeout=download_timeout, max_workers=max_workers) 22 | self.__src = SrcUpdate(config, download_timeout=download_timeout, max_workers=max_workers) 23 | 24 | def index(self): 25 | self.__index.download(*self.__index.required(self.__config.repos, timeout=self.__head_timeout), timeout=self.__index_timeout) 26 | return self 27 | 28 | def metadata(self): 29 | self.__meta = MetadataUpdate(self.__config, download_timeout=self.__download_timeout, max_workers=self.__max_workers) 30 | self.__meta.update_yaml() 31 | self.__meta.update_assets() 32 | return self 33 | 34 | def apk(self): 35 | self.__apk.update() 36 | return self 37 | 38 | def src(self): 39 | self.__src.update() 40 | return self 41 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | MANIFEST 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | .hypothesis/ 48 | .pytest_cache/ 49 | 50 | # Translations 51 | *.mo 52 | *.pot 53 | 54 | # Django stuff: 55 | *.log 56 | local_settings.py 57 | db.sqlite3 58 | 59 | # Flask stuff: 60 | instance/ 61 | .webassets-cache 62 | 63 | # Scrapy stuff: 64 | .scrapy 65 | 66 | # Sphinx documentation 67 | docs/_build/ 68 | 69 | # PyBuilder 70 | target/ 71 | 72 | # Jupyter Notebook 73 | .ipynb_checkpoints 74 | 75 | # pyenv 76 | .python-version 77 | 78 | # celery beat schedule file 79 | celerybeat-schedule 80 | 81 | # SageMath parsed files 82 | *.sage.py 83 | 84 | # Environments 85 | .env 86 | .venv 87 | env/ 88 | venv/ 89 | ENV/ 90 | env.bak/ 91 | venv.bak/ 92 | 93 | # Spyder project settings 94 | .spyderproject 95 | .spyproject 96 | 97 | # Rope project settings 98 | .ropeproject 99 | 100 | # mkdocs documentation 101 | /site 102 | 103 | # mypy 104 | .mypy_cache/ 105 | 106 | # fdroid-dl 107 | fdroid-dl.json 108 | metadata/ 109 | repo/ 110 | .cache/ 111 | logs/ 112 | junit.xml 113 | docs/_static/* 114 | !docs/_static/empty.txt 115 | -------------------------------------------------------------------------------- /fdroid_dl/processor/index.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | import logging 5 | import time 6 | import json 7 | import os 8 | from datetime import timedelta 9 | from concurrent.futures import as_completed, ThreadPoolExecutor 10 | from ..model import Index 11 | 12 | 13 | LOGGER = logging.getLogger('processor.IndexFileProcessor') 14 | class IndexFileProcessor(ThreadPoolExecutor): 15 | 16 | def __init__(self, *args, **kwargs): 17 | super(IndexFileProcessor, self).__init__(*args, **kwargs) 18 | self.__futures = [] 19 | 20 | @staticmethod 21 | def __persist_format(file, repo): 22 | file.seek(0) # reset fp to beginning 23 | startchar = file.read(1).decode("utf-8") 24 | if startchar == '<': 25 | repo['format'] = 'xml' 26 | else: 27 | repo['format'] = 'json' 28 | file.seek(0) 29 | 30 | @staticmethod 31 | def __process(file, repo, *args): 32 | start = time.time() 33 | index = None 34 | IndexFileProcessor.__persist_format(file, repo) 35 | with file as file_handle: 36 | if repo['format'] == 'json': 37 | index = Index.from_json(file_handle, key=repo.url).monkeypatch().save(filename=repo.filename) 38 | else: 39 | index = Index.from_xml(file_handle, key=repo.url).monkeypatch().save(filename=repo.filename) 40 | elapsed = time.time() - start 41 | return (index, timedelta(seconds=elapsed))+args 42 | 43 | def process(self, file, repo, *args): 44 | if not hasattr(file, 'read'): 45 | raise IOError('file error!') 46 | if repo is None: 47 | raise AttributeError('repo is missing') 48 | args = (file, repo) + args 49 | self.__futures.append(self.submit(IndexFileProcessor.__process, *args)) 50 | 51 | def completed(self): 52 | for future in as_completed(self.__futures): 53 | yield future 54 | 55 | ####################### 56 | # implement "with" 57 | ####################### 58 | def __enter__(self): 59 | return self 60 | 61 | def __exit__(self, type, value, traceback): 62 | self.shutdown() 63 | -------------------------------------------------------------------------------- /docs/cli.rst: -------------------------------------------------------------------------------- 1 | Command Line Interface 2 | ================================== 3 | 4 | **Main command parameters** 5 | 6 | .. code-block:: none 7 | 8 | Usage: fdroid-dl [OPTIONS] COMMAND [ARGS]... 9 | 10 | Is a python based f-droid mirror generation and update utility. Point at 11 | one or more existing f-droid repositories and the utility will download 12 | the metadata (pictures, descriptions,..) for you and place it in your 13 | local system. 14 | 15 | Simply run "fdroid-dl update && fdroid update" in your folder with repo 16 | and you are set. 17 | 18 | Options: 19 | -d, --debug enable debug level logging 20 | -c, --config FILE location of your fdroid-dl.json configuration file 21 | [default: fdroid-dl.json] 22 | -r, --repo DIRECTORY location of your fdroid repository to store the 23 | apk files [default: ./repo] 24 | -m, --metadata DIRECTORY location of your fdroid metadata to store the 25 | asset files [default: ./metadata] 26 | --cache DIRECTORY location for fdroid-dl to store cached data 27 | [default: ./.cache] 28 | --help Show this message and exit. 29 | 30 | Commands: 31 | update starts updating process 32 | 33 | **Update command parameters** 34 | 35 | .. code-block:: none 36 | 37 | Usage: fdroid-dl update [OPTIONS] COMMAND [ARGS]... 38 | 39 | Options: 40 | --index / --no-index download repository index files [default: True] 41 | --metadata / --no-metadata download metadata assset files [default: True] 42 | --apk / --no-apk download apk files [default: True] 43 | --apk-versions INTEGER how many versions of apk to download [default: 44 | 1] 45 | --src / --no-src download src files [default: True] 46 | --threads INTEGER configure number of parallel threads used for 47 | download [default: 10] 48 | --head-timeout INTEGER maximum time in seconds a HEAD request is 49 | allowed to take [default: 10] 50 | --index-timeout INTEGER maximum time in seconds index file download is 51 | allowed to take [default: 60] 52 | --download-timeout INTEGER maximum time in seconds file download is allowed 53 | to take [default: 60] 54 | --help Show this message and exit. 55 | -------------------------------------------------------------------------------- /fdroid_dl/update/apk.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | import logging 5 | import time 6 | import os.path 7 | from datetime import timedelta 8 | try: 9 | from urllib.parse import urlparse 10 | except ImportError: 11 | from urlparse import urlparse 12 | from .selector import Selector 13 | from ..download import FuturesSessionVerifiedDownload, FuturesSessionFlex 14 | 15 | LOGGER = logging.getLogger('update.ApkUpdate') 16 | class ApkUpdate(Selector): 17 | def __init__(self, config, download_timeout=600, max_workers=10): 18 | super(ApkUpdate, self).__init__(config) 19 | self.__config = config 20 | self.__download_timeout = download_timeout 21 | self.__max_workers = max_workers 22 | 23 | def all_packages(self, session=None): 24 | downloads = {} 25 | logging.info("collecting apps to download") 26 | start = time.time() 27 | apkcnt = 0 28 | # search pkgs 29 | for repo, appid in self.all_apps(dupes=True): 30 | Selector.apply_session_settings(repo, session) 31 | packages = repo.index.get('packages', {}).get(appid, []) 32 | if not appid in downloads: 33 | downloads[appid] = [] 34 | for pkg in packages: 35 | if not pkg.get('apkName') is None and not pkg.get('hash') is None and not pkg.get('hashType') is None: 36 | downloads[appid].append(pkg) 37 | apkcnt += 1 38 | elapsed = time.time() - start 39 | logging.info("found (%s) apk files to download (%s)", apkcnt, timedelta(seconds=elapsed)) 40 | 41 | for appid in downloads.keys(): 42 | # sort newest first 43 | packages = sorted(downloads[appid], key=lambda e: e.get('versionCode', 0), reverse=True) 44 | for idx, pkg in enumerate(packages): 45 | if idx >= self.__config.apk_versions: 46 | break # only download number of confgured files 47 | url = pkg.get('apkName') 48 | filename = os.path.basename(str(urlparse(url).path)) 49 | filepath = os.path.join(self.__config.repo_dir, filename) 50 | yield (url, filepath, pkg.get('hash'), pkg.get('hashType')) 51 | 52 | def update(self): 53 | meta = self.__config.metadata 54 | LOGGER.info("UPDATING apk files") 55 | start = time.time() 56 | cnt = 0 57 | ecnt = 0 58 | with FuturesSessionVerifiedDownload(max_workers=self.__max_workers) as session: 59 | for url, filename, fhash, hash_type in self.all_packages(session=session): 60 | if os.path.exists(filename): 61 | start2 = time.time() 62 | if FuturesSessionVerifiedDownload.verify(filename, hash_type, fhash): 63 | elapsed = time.time() - start2 64 | LOGGER.info("hash verified %s [%s] (%s) ✔", os.path.basename(filename), timedelta(seconds=elapsed), FuturesSessionFlex.h_size(os.stat(filename).st_size)) 65 | else: 66 | session.download(url, filename, timeout=self.__download_timeout, hash=fhash, hash_type=hash_type) 67 | else: 68 | session.download(url, filename, timeout=self.__download_timeout, hash=fhash, hash_type=hash_type) 69 | 70 | dlsum = 0 71 | for success, filename, dbytes, hbytes, elapsed in session.completed(): 72 | if success: 73 | cnt += 1 74 | dlsum += dbytes 75 | else: 76 | ecnt += 1 77 | elapsed = time.time() - start 78 | LOGGER.info("UPDATED apk files, files(%s) errors(%s) [%s] (%s)", cnt, ecnt, FuturesSessionFlex.h_size(dlsum), timedelta(seconds=elapsed)) 79 | -------------------------------------------------------------------------------- /fdroid_dl/model/metadata.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | try: 5 | from collections.abc import MutableMapping 6 | except ImportError: 7 | from collections import MutableMapping 8 | import logging 9 | import json 10 | import hashlib 11 | import copy 12 | import re 13 | from ..json import GenericJSONEncoder 14 | from .index import Index 15 | from .appmetadata import AppMetadata 16 | 17 | LOGGER = logging.getLogger('model.Metadata') 18 | class Metadata(MutableMapping): 19 | def __init__(self, repoman, config={}, default_locale='en-US'): 20 | self.__repoman = repoman 21 | self.__config = config 22 | self.__store = {} 23 | self.__default_locale = default_locale 24 | self.__load() 25 | 26 | def __load(self): 27 | # load from config file 28 | for key in self.__config.keys(): 29 | self.__config[key] = AppMetadata(key, self.__config[key], default_locale=self.__default_locale) 30 | self.__store[key] = AppMetadata(key, self.__config[key], default_locale=self.__default_locale) 31 | # load from index files 32 | 33 | def load(self, index): 34 | if not isinstance(index, Index) and not isinstance(index, dict) and not isinstance(index, str): 35 | raise NotImplementedError("I can only load instances of Index at the moment") 36 | if isinstance(index, str): 37 | index = self.__repoman.index(index) 38 | apps = index.get('apps', []) 39 | for app in apps: 40 | self.add(app) 41 | 42 | def load_all(self): 43 | for index in self.__repoman.indices: 44 | LOGGER.info("loading index: %s", index.key) 45 | apps = index.get('apps', []) 46 | for app in apps: 47 | appid = app.get('packageName', None) 48 | if not appid is None: 49 | self.add(AppMetadata(appid, app, default_locale=self.__default_locale)) 50 | LOGGER.info("loaded index: %s apps: %s", index.key, len(apps)) 51 | return self 52 | 53 | def add(self, appmetadata): 54 | if appmetadata is None: 55 | return 56 | if appmetadata.id is None: 57 | KeyError('cant add metadata without id! %s'%appmetadata) 58 | if appmetadata.id in self.__store: 59 | self.__store[appmetadata.id].merge(appmetadata) 60 | else: 61 | self.__store[appmetadata.id] = appmetadata 62 | return self 63 | 64 | def find_all(self, key): 65 | if key is None: 66 | raise KeyError("key must not be empty") 67 | ret_val = set() 68 | if key.startswith('regex:'): 69 | regc = re.compile(key[6:], re.I|re.S) 70 | for k in self.__store: 71 | match = regc.match(k) 72 | if not match is None: 73 | ret_val.add(self.__store[k]) 74 | elif key in ['*', '.*', 'all']: 75 | for k in self.__store: 76 | ret_val.add(self.__store[k]) 77 | elif key in self: 78 | ret_val.add(self[key]) 79 | return list(ret_val) 80 | 81 | def __repr__(self): 82 | return ""%str(json.dumps(self.__store, indent=4, cls=GenericJSONEncoder)) 83 | 84 | @property 85 | def __json__(self): 86 | ''' make Metadata json serializable ''' 87 | return self.__config 88 | 89 | ####################### 90 | # implement "dict" 91 | ####################### 92 | def __getitem__(self, key): 93 | return self.__store[key] 94 | def __setitem__(self, key, value): 95 | self.__store[key] = value 96 | def __delitem__(self, key): 97 | del self.__store[key] 98 | def __iter__(self): 99 | return iter(self.__store) 100 | def __len__(self): 101 | return len(self.__store) 102 | -------------------------------------------------------------------------------- /fdroid_dl/__main__.py: -------------------------------------------------------------------------------- 1 | """main entrypoint into fdroid-dl.""" 2 | 3 | import logging 4 | import click 5 | from .model import Config 6 | from .update import Update 7 | 8 | logging.basicConfig(level=logging.INFO, 9 | format='%(asctime)s - %(name)s - %(levelname)s - %(message)s') 10 | LOGGER = logging.getLogger("fdroid-dl") 11 | 12 | @click.group(invoke_without_command=True) 13 | @click.option('-d', '--debug', is_flag=True, default=False, help='enable debug level logging') 14 | @click.option('-c', '--config', default='fdroid-dl.json', type=click.Path(dir_okay=False, writable=True, resolve_path=True), show_default=True, help='location of your fdroid-dl.json configuration file') 15 | @click.option('-r', '--repo', default='./repo', type=click.Path(file_okay=False, writable=True, resolve_path=True), show_default=True, help='location of your fdroid repository to store the apk files') 16 | @click.option('-m', '--metadata', default='./metadata', type=click.Path(file_okay=False, writable=True, resolve_path=True), show_default=True, help='location of your fdroid metadata to store the asset files') 17 | @click.option('--cache', default='./.cache', type=click.Path(file_okay=False, writable=True, resolve_path=True), show_default=True, help='location for fdroid-dl to store cached data') 18 | @click.pass_context 19 | def main(ctx, debug, config, repo, metadata, cache): 20 | """ 21 | Is a python based f-droid mirror generation and update utility. 22 | Point at one or more existing f-droid repositories and the utility will download the metadata (pictures, descriptions,..) 23 | for you and place it in your local system. 24 | 25 | Simply run "fdroid-dl update && fdroid update" in your folder with repo and you are set. 26 | """ 27 | if ctx.obj is None: 28 | ctx.obj = {} 29 | ctx.obj['debug'] = debug 30 | ctx.obj['config'] = config 31 | ctx.obj['repo'] = repo 32 | ctx.obj['metadata'] = metadata 33 | ctx.obj['cache_dir'] = cache 34 | if debug: 35 | LOGGER.setLevel(logging.DEBUG) 36 | LOGGER.info('Debug mode is %s', ('on' if debug else 'off')) 37 | if ctx.invoked_subcommand is None: 38 | with click.Context(main) as cctx: 39 | click.echo(main.get_help(cctx)) 40 | 41 | 42 | @main.group(name='update', invoke_without_command=True, short_help='starts updating process') 43 | @click.option('--index/--no-index', default=True, show_default=True, help='download repository index files') 44 | @click.option('--metadata/--no-metadata', default=True, show_default=True, help='download metadata assset files') 45 | @click.option('--apk/--no-apk', default=True, show_default=True, help='download apk files') 46 | @click.option('--apk-versions', default=1, type=int, show_default=True, help='how many versions of apk to download') 47 | @click.option('--src/--no-src', default=True, show_default=True, help='download src files') 48 | @click.option('--threads', default=10, type=int, show_default=True, help='configure number of parallel threads used for download') 49 | @click.option('--head-timeout', default=10, type=int, show_default=True, help='maximum time in seconds a HEAD request is allowed to take') 50 | @click.option('--index-timeout', default=60, type=int, show_default=True, help='maximum time in seconds index file download is allowed to take') 51 | @click.option('--download-timeout', default=60, type=int, show_default=True, help='maximum time in seconds file download is allowed to take') 52 | @click.pass_context 53 | def update(ctx, index, metadata, apk, apk_versions, src, threads, head_timeout, index_timeout, download_timeout): 54 | if apk_versions <= 0: 55 | apk_versions = 1 56 | with Config(ctx.obj['config'], cache_dir=ctx.obj['cache_dir'], apk_versions=apk_versions) as cfg: 57 | update = Update(cfg, max_workers=threads, head_timeout=head_timeout, index_timeout=index_timeout, download_timeout=download_timeout) 58 | if index: 59 | update.index() 60 | if metadata: 61 | update.metadata() 62 | if apk: 63 | update.apk() 64 | if src: 65 | update.src() 66 | 67 | if __name__ == '__main__': 68 | main() 69 | -------------------------------------------------------------------------------- /fdroid_dl/model/repoconfig.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | import logging 5 | try: 6 | from collections.abc import MutableMapping 7 | except ImportError: 8 | from collections import MutableMapping 9 | import copy 10 | import json 11 | import os, os.path 12 | import hashlib 13 | try: 14 | from urllib.parse import urlparse, urljoin 15 | except ImportError: 16 | from urlparse import urlparse, urljoin 17 | import shutil 18 | from tempfile import NamedTemporaryFile 19 | from .metadata import Metadata 20 | from .index import Index 21 | from ..json import GenericJSONEncoder 22 | 23 | 24 | LOGGER = logging.getLogger('model.RepoConfig') 25 | class RepoConfig(MutableMapping): 26 | EMPTY = {"apps":[]} 27 | 28 | def __init__(self, url, cfg, config): 29 | self.__config = config 30 | self.__url = self.__clean_url(url) 31 | # py3.5 only :( -> self.__store = {**RepoConfig.EMPTY, **cfg} 32 | self.__store = RepoConfig.EMPTY.copy() 33 | self.__store.update(cfg) 34 | self.__store['id'] = hashlib.sha1(str(self.__url).encode('UTF-8')).hexdigest() 35 | 36 | def __clean_url(self, url): 37 | purl = urlparse(url) 38 | url = "%s://%s%s"%(purl.scheme, purl.netloc, purl.path) 39 | if url.endswith('index.xml'): 40 | url = url[:-9] 41 | if url.endswith('index.jar'): 42 | url = url[:-9] 43 | if url.endswith('index-v1.jar'): 44 | url = url[:-12] 45 | if url.endswith('index-v1.json'): 46 | url = url[:-13] 47 | if not url.endswith('/'): 48 | url += '/' 49 | return url 50 | 51 | @property 52 | def index(self): 53 | try: 54 | return self.__config.index(self.url) 55 | except KeyError as keyerr: 56 | LOGGER.error(str(keyerr)) 57 | return None 58 | 59 | @property 60 | def auth(self): 61 | auth = self.__store.get('auth', None) 62 | if auth is None or len(auth) != 2: 63 | return None 64 | return (auth[0], auth[1]) 65 | 66 | @property 67 | def verify(self): 68 | return self.__store.get('ssl_verify', True) 69 | 70 | # pylint: disable=C0103 71 | @property 72 | def id(self): 73 | return self.__store.get('id', None) 74 | 75 | @property 76 | def key(self): 77 | return self.url 78 | 79 | @property 80 | def url(self): 81 | return str(self.__url) 82 | 83 | @property 84 | def hash(self): 85 | return self.__store.get('hash', None) 86 | 87 | @property 88 | def filename(self): 89 | return os.path.join(self.__config.cache_dir, self.id+".cache") 90 | 91 | @property 92 | def format(self): 93 | if 'format' in self.__store: 94 | return self.__store['format'] 95 | return None 96 | 97 | @property 98 | def url_index(self): 99 | return urljoin(self.__url, 'index.jar') 100 | 101 | @property 102 | def url_index_v1(self): 103 | return urljoin(self.__url, 'index-v1.jar') 104 | 105 | @property 106 | def src_download(self): 107 | if 'src_download' in self.__store: 108 | return self.__store['src_download'] is True 109 | return self.__config.src_download is True 110 | 111 | @property 112 | def metadata_download(self): 113 | if 'metadata_download' in self.__store: 114 | return self.__store['metadata_download'] is True 115 | return self.__config.metadata_download is True 116 | 117 | @property 118 | def default_locale(self): 119 | if 'default_locale' in self.__store: 120 | return str(self.__store['default_locale']) 121 | return self.__config.default_locale 122 | 123 | @property 124 | def apps(self): 125 | if 'apps' in self.__store: 126 | if isinstance(self.__store['apps'], str): 127 | yield self.__store['apps'] 128 | elif isinstance(self.__store['apps'], list): 129 | for app in self.__store['apps']: 130 | yield app 131 | 132 | @property 133 | def __json__(self): 134 | ''' make RepoConfig json serializable ''' 135 | return self.__store 136 | 137 | def __repr__(self): return ": %s"%(self.__url, self.__store) 138 | 139 | ####################### 140 | # implement hash 141 | ####################### 142 | def __hash__(self): 143 | return self.__url.__hash__() 144 | 145 | ####################### 146 | # implement "dict" 147 | ####################### 148 | def __getitem__(self, key): 149 | return self.__store[key] 150 | def __setitem__(self, key, value): 151 | self.__store[key] = value 152 | def __delitem__(self, key): 153 | del self.__store[key] 154 | def __iter__(self): 155 | return iter(self.__store) 156 | def __len__(self): 157 | return len(self.__store) 158 | -------------------------------------------------------------------------------- /fdroid_dl/download/verifieddownload.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | import logging 5 | from tempfile import NamedTemporaryFile 6 | from concurrent.futures import as_completed 7 | import os.path 8 | import time 9 | import shutil 10 | import hashlib 11 | from datetime import timedelta 12 | from .futuressession import FuturesSessionFlex 13 | 14 | 15 | LOGGER = logging.getLogger('download.FuturesSessionVerifiedDownload') 16 | class FuturesSessionVerifiedDownload(FuturesSessionFlex): 17 | BLOCKSIZE = 65536 18 | 19 | def __init__(self, *args, **kwargs): 20 | super(FuturesSessionVerifiedDownload, self).__init__(*args, **kwargs) 21 | self.__futures = [] 22 | self.__files = {} 23 | 24 | def download(self, url, filename, timeout=600, hash_type=None, hash=None): 25 | request = self.get(url, stream=True, timeout=timeout) 26 | request.filename = filename 27 | request.hash_type = hash_type 28 | request.hash = hash 29 | request.request_url = url 30 | self.__futures.append(request) 31 | 32 | @staticmethod 33 | def verify(filename, hash_type, hash): 34 | tmphash = hashlib.new(hash_type) 35 | with open(filename, 'rb') as file: 36 | while True: 37 | byte = file.read(FuturesSessionVerifiedDownload.BLOCKSIZE) 38 | if not byte: 39 | break 40 | tmphash.update(byte) 41 | file_hash = tmphash.hexdigest() 42 | return file_hash == hash 43 | 44 | def completed(self): 45 | for future in as_completed(self.__futures): 46 | url = future.request_url 47 | filename = future.filename 48 | foldername = os.path.dirname(filename) 49 | hash_type = future.hash_type 50 | hash = future.hash 51 | try: 52 | response = future.result() 53 | response.raise_for_status() 54 | start = time.time() 55 | with NamedTemporaryFile(mode='wb') as tmp: 56 | for chunk in response.iter_content(chunk_size=FuturesSessionVerifiedDownload.BLOCKSIZE): 57 | if chunk: 58 | tmp.write(chunk) 59 | tmp.flush() 60 | bytes = os.stat(tmp.name).st_size 61 | hbytes = FuturesSessionVerifiedDownload.h_size(bytes) 62 | if not hash_type is None: 63 | elapsed = time.time() - start 64 | LOGGER.info("downloaded %s [%s] (%s) ✔", response.request.url, timedelta(seconds=elapsed), hbytes) 65 | tmp.seek(0) 66 | if FuturesSessionVerifiedDownload.verify(tmp.name, hash_type, hash): 67 | if not os.path.exists(foldername): 68 | os.makedirs(foldername) 69 | shutil.copy(tmp.name, filename) 70 | elapsed = time.time() - start 71 | LOGGER.info("hash verified %s [%s] (%s) ✔", response.request.url, timedelta(seconds=elapsed), hbytes) 72 | yield (True, filename, bytes, hbytes, timedelta(seconds=elapsed)) 73 | else: 74 | elapsed = time.time() - start 75 | LOGGER.warning("hash verification failed %s [%s] (%s) ❌", response.request.url, timedelta(seconds=elapsed), hbytes) 76 | yield (False, filename, bytes, hbytes, timedelta(seconds=elapsed)) 77 | else: 78 | if not os.path.exists(foldername): 79 | os.makedirs(foldername) 80 | shutil.copy(tmp.name, filename) 81 | elapsed = time.time() - start 82 | LOGGER.info("downloaded %s [%s] (%s) ✔", response.request.url, timedelta(seconds=elapsed), hbytes) 83 | yield (True, filename, bytes, hbytes, timedelta(seconds=elapsed)) 84 | except Exception as ex: 85 | if logging.getLogger().isEnabledFor(logging.DEBUG): 86 | LOGGER.exception("Error downloading %s to file %s", url, filename) 87 | else: 88 | LOGGER.warning("Error downloading %s to file %s: %s", url, filename, str(ex)) 89 | elapsed = time.time() - start 90 | yield (False, filename, bytes, hbytes, timedelta(seconds=elapsed)) 91 | 92 | 93 | ####################### 94 | # implement "with" 95 | ####################### 96 | def __enter__(self): 97 | super(FuturesSessionVerifiedDownload, self).__enter__() 98 | return self 99 | 100 | def __exit__(self, type, value, traceback): 101 | super(FuturesSessionVerifiedDownload, self).__exit__(type, value, traceback) 102 | self.__futures = [] 103 | self.__files = {} 104 | -------------------------------------------------------------------------------- /fdroid_dl/model/appmetadata.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | import logging 5 | try: 6 | from collections.abc import MutableMapping 7 | except ImportError: 8 | from collections import MutableMapping 9 | import copy 10 | import json 11 | from ..json import GenericJSONEncoder 12 | 13 | 14 | LOGGER = logging.getLogger('model.AppMetadata') 15 | class AppMetadata(MutableMapping): 16 | def __init__(self, appid, cfg=dict(), json=dict(), default_locale='en-US'): 17 | if appid is None: 18 | raise KeyError("no appid defined") 19 | self.__id = appid 20 | self.__cfg = cfg 21 | if not json is None: 22 | self.__store = self.__merge(copy.deepcopy(cfg), copy.deepcopy(json)) 23 | else: 24 | self.__store = copy.deepcopy(cfg) 25 | self.__default_locale = default_locale 26 | 27 | def __merge(self, source, destination): 28 | for key, value in source.items(): 29 | if isinstance(value, dict): 30 | node = destination.setdefault(key, {}) 31 | self.__merge(value, node) 32 | else: 33 | destination[key] = value 34 | return destination 35 | 36 | # pylint: disable=C0103 37 | @property 38 | def id(self): 39 | return str(self.__id) 40 | 41 | @property 42 | def appid(self): 43 | return str(self.__id) 44 | 45 | def merge(self, appmetadata): 46 | ''' used for adding addition data from e.g. index ''' 47 | if not isinstance(appmetadata, AppMetadata) and not isinstance(appmetadata, dict): 48 | raise NotImplementedError("I can only merge two instances of AppMetadata at the moment") 49 | self.__store = self.__merge(self.__store, copy.deepcopy(appmetadata)) 50 | return self 51 | 52 | def update(self, appmetadata): 53 | ''' apply manual changes and save tose in config ''' 54 | if not isinstance(appmetadata, AppMetadata) and not isinstance(appmetadata, dict): 55 | raise NotImplementedError("I can only merge two instances of AppMetadata at the moment") 56 | self.__store = self.__merge(copy.deepcopy(appmetadata), self.__store) 57 | self.__cfg = self.__merge(copy.deepcopy(appmetadata), self.__cfg) 58 | return self 59 | 60 | def localized(self, locale=None): 61 | return self.__store.get('localized', {}).get(locale, {}) 62 | 63 | def full_description(self, locale=None): 64 | return self.localized(locale).get('description', None) 65 | 66 | def short_description(self, locale=None): 67 | return self.localized(locale).get('summary', None) 68 | 69 | def title(self, locale=None): 70 | return self.localized(locale).get('name', None) 71 | 72 | def feature_graphic(self, locale=None): 73 | return self.localized(locale).get('featureGraphic', None) 74 | 75 | def icon(self, locale=None): 76 | return self.localized(locale).get('icon', None) 77 | 78 | def promo_graphic(self, locale=None): 79 | return self.localized(locale).get('promoGraphic', None) 80 | 81 | def tv_banner(self, locale=None): 82 | return self.localized(locale).get('tvBanner', None) 83 | 84 | def phone_screenshots(self, locale=None): 85 | return self.localized(locale).get('phoneScreenshots', []) 86 | 87 | def seven_inch_screenshots(self, locale=None): 88 | return self.localized(locale).get('sevenInchScreenshots', []) 89 | 90 | def ten_inch_screenshots(self, locale=None): 91 | return self.localized(locale).get('tenInchScreenshots', []) 92 | 93 | def tv_screenshots(self, locale=None): 94 | return self.localized(locale).get('tvScreenshots', []) 95 | 96 | def wear_screenshots(self, locale=None): 97 | return self.localized(locale).get('wearScreenshots', []) 98 | 99 | @property 100 | def locales(self): 101 | if 'localized' in self: 102 | return list(self['localized'].keys()) 103 | return [] 104 | 105 | def __repr__(self): 106 | return ""%str(json.dumps(self.__store, indent=4, cls=GenericJSONEncoder)) 107 | 108 | @property 109 | def __json__(self): 110 | ''' make Metadata json serializable ''' 111 | return self.__cfg 112 | 113 | ####################### 114 | # implement hash 115 | ####################### 116 | def __hash__(self): 117 | return self.__id.__hash__() 118 | 119 | ####################### 120 | # implement "dict" 121 | ####################### 122 | def __getitem__(self, key): 123 | return self.__store[key] 124 | def __setitem__(self, key, value): 125 | self.__cfg[key] = value 126 | self.__store[key] = value 127 | def __delitem__(self, key): 128 | if key in self.__cfg: 129 | del self.__cfg[key] 130 | del self.__store[key] 131 | def __iter__(self): 132 | return iter(self.__store) 133 | def __len__(self): 134 | return len(self.__store) 135 | -------------------------------------------------------------------------------- /fdroid_dl/download/futuressession.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | import os 5 | import logging 6 | import hashlib 7 | import time 8 | import copy 9 | from datetime import timedelta 10 | from zipfile import ZipFile 11 | from tempfile import NamedTemporaryFile 12 | from requests.adapters import HTTPAdapter 13 | from requests_futures.sessions import FuturesSession 14 | 15 | 16 | LOGGER = logging.getLogger('download.FuturesSessionFlex') 17 | class FuturesSessionFlex(FuturesSession): 18 | BLOCKSIZE = 65536 19 | SUFFIXES = ['B', 'KB', 'MB', 'GB', 'TB', 'PB'] 20 | 21 | def __init__(self, max_workers=1, user_agent='Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)', *args, **kwargs): 22 | kwargs.update({'max_workers': max_workers}) 23 | super(FuturesSessionFlex, self).__init__(*args, **kwargs) 24 | self.__sessions = {} 25 | self.__sessions_keys = [] 26 | 27 | self.__fs_kwargs = {} 28 | self.__fs_kwargs.update(kwargs) 29 | 30 | _adapter_kwargs = {'pool_connections': max_workers, 'pool_maxsize': max_workers, 'pool_block': True} 31 | self.mount('https://', HTTPAdapter(**_adapter_kwargs)) 32 | self.mount('http://', HTTPAdapter(**_adapter_kwargs)) 33 | if self.headers is None: 34 | self.headers = {} 35 | self.headers.update({'User-Agent': user_agent}) 36 | 37 | def map(self, pattern='http://', session=None): 38 | ''' if called with session None -> default session for ctor is used ''' 39 | kwargs = copy.deepcopy(self.__fs_kwargs) 40 | kwargs['session'] = session 41 | if pattern not in self.__sessions: 42 | self.__sessions_keys.append(pattern) 43 | self.__sessions[pattern] = FuturesSessionFlex(*(), **kwargs) 44 | self.__sessions_keys = sorted(self.__sessions_keys, key=len, reverse=True) 45 | 46 | def set_headers(self, headers): 47 | self.headers.update(headers) 48 | 49 | @staticmethod 50 | def h_size(nbytes): 51 | i = 0 52 | while nbytes >= 1024 and i < len(FuturesSessionFlex.SUFFIXES)-1: 53 | nbytes /= 1024. 54 | i += 1 55 | fsize = ('%.2f' % nbytes).rstrip('0').rstrip('.') 56 | return '%s %s' % (fsize, FuturesSessionFlex.SUFFIXES[i]) 57 | 58 | # pylint: disable=W0613 59 | @staticmethod 60 | def add_size(response, *args, **kwargs): 61 | if 'Content-Length' in response.headers: 62 | response.size = int(response.headers.get('Content-Length', 0)) 63 | else: 64 | LOGGER.warning("Content-Length Header not provided by %s", response.url) 65 | response.size = len(response.content) 66 | response.h_size = FuturesSessionFlex.h_size(response.size) 67 | return response 68 | 69 | # pylint: disable=W0613 70 | @staticmethod 71 | def add_hash(response, *args, **kwargs): 72 | response.hash = None 73 | if response.ok: 74 | timestamp = str(time.time()) 75 | cache_var = response.headers.get('Last-Modified', timestamp) + response.headers.get('ETag', timestamp) 76 | response.hash = hashlib.sha1(str(cache_var).encode('UTF-8')).hexdigest() 77 | return response 78 | 79 | @staticmethod 80 | def extract_jar(response, *args, **kwargs): 81 | if response.ok: 82 | start = time.time() 83 | response = FuturesSessionFlex.add_size(response, *args, **kwargs) 84 | response.index = NamedTemporaryFile() 85 | with NamedTemporaryFile() as temp_file: 86 | for chunk in response.iter_content(chunk_size=FuturesSessionFlex.BLOCKSIZE): 87 | if chunk: 88 | temp_file.write(chunk) 89 | zip_file = ZipFile(temp_file) 90 | idxfile = 'index-v1.json' if 'index-v1.json' in zip_file.namelist() else 'index.xml' 91 | with zip_file.open(idxfile) as file: 92 | while True: 93 | byte = file.read(FuturesSessionFlex.BLOCKSIZE) 94 | if not byte: 95 | break 96 | response.index.write(byte) 97 | LOGGER.debug("%s - %s - (%s)", response.index.name, response.url, FuturesSessionFlex.h_size(os.stat(response.index.name).st_size)) 98 | elapsed = time.time() - start 99 | response.elapsed += timedelta(seconds=elapsed) 100 | return response 101 | 102 | def __lookup_fs_session(self, url): 103 | # fast direct matches 104 | if url in self.__sessions: 105 | return self.__sessions[url] 106 | # slower pattern search depends on pattern count and size 107 | for k in self.__sessions_keys: 108 | if url.find(k) == 0: 109 | return self.__sessions[k] 110 | return None 111 | 112 | def request(self, *args, **kwargs): 113 | session = self.__lookup_fs_session(args[1]) 114 | if not session is None: 115 | return session.request(*args, **kwargs) 116 | return super(FuturesSessionFlex, self).request(*args, **kwargs) 117 | 118 | def close(self): 119 | try: 120 | for key, session in self.__sessions.items(): 121 | session.close() 122 | except Exception: 123 | LOGGER.exception("Error closing sessions") 124 | -------------------------------------------------------------------------------- /fdroid_dl/update/index.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | import logging 5 | import os.path 6 | from concurrent.futures import as_completed 7 | import requests 8 | from ..download import FuturesSessionFlex 9 | from ..processor import IndexFileProcessor 10 | 11 | 12 | LOGGER = logging.getLogger('update.IndexUpdate') 13 | class IndexUpdate(object): 14 | def __init__(self, config, head_timeout=10, index_timeout=60, max_workers=10): 15 | self.config = config 16 | self.head_timeout = head_timeout 17 | self.index_timeout = index_timeout 18 | self.max_workers = max_workers 19 | 20 | def required(self, repos, timeout=60): 21 | # try new index 22 | head_futures = self.__future(repos=repos, timeout=timeout) 23 | # process result 24 | (new_index, notfound) = self.__head_response(head_futures) 25 | # retry old index 26 | head_futures = self.__future(repos=notfound, attr='url_index', timeout=timeout) 27 | # process result 28 | (old_index, notfound) = self.__head_response(head_futures) 29 | # new_index[] needs to be fetched with index-v1.jar 30 | # old_index[] needs to be fetched with index.jar 31 | return (new_index, old_index) 32 | 33 | def download(self, new_index, old_index, timeout=60): 34 | if new_index is None: 35 | raise AttributeError('new_index missing') 36 | if old_index is None: 37 | raise AttributeError('old_index missing') 38 | new_futures = self.__future(repos=new_index, timeout=timeout, http_method=FuturesSessionFlex.get, hooks={'response':[FuturesSessionFlex.extract_jar]}, stream=True) 39 | old_futures = self.__future(repos=old_index, timeout=timeout, http_method=FuturesSessionFlex.get, hooks={'response':[FuturesSessionFlex.extract_jar]}, attr='url_index', stream=True) 40 | self.__download_response(new_futures) 41 | self.__download_response(old_futures) 42 | 43 | def __future(self, repos=None, attr='url_index_v1', http_method=FuturesSessionFlex.head, hooks=None, timeout=60, **kwargs): 44 | if repos is None: 45 | raise AttributeError('repos missing %s'%repos) 46 | if hooks is None: 47 | hooks = {'response': [FuturesSessionFlex.add_hash]} 48 | futures = [] 49 | with FuturesSessionFlex(max_workers=self.max_workers) as session: 50 | for repo in repos: 51 | if not repo.auth is None or repo.verify is False: 52 | thread_session = requests.Session() 53 | thread_session.auth = repo.auth 54 | thread_session.verify = repo.verify 55 | session.map(getattr(repo, attr), thread_session) 56 | request = http_method(session, getattr(repo, attr), hooks=hooks, timeout=timeout, **kwargs) 57 | request.repo = repo # pass repo ref to future processing 58 | futures.append(request) 59 | return futures 60 | 61 | def __as_completed(self, futures): 62 | for future in as_completed(futures): 63 | repo = future.repo 64 | try: 65 | response = future.result() 66 | response.raise_for_status() 67 | yield (repo, response, True) 68 | except requests.exceptions.HTTPError as ex: 69 | if ex.response.status_code == 404: # try old index file 70 | if logging.getLogger().isEnabledFor(logging.DEBUG): 71 | LOGGER.exception() 72 | else: 73 | LOGGER.warning(str(ex)) 74 | yield (repo, response, False) 75 | else: 76 | repo['error'] = { 77 | 'code': ex.response.status_code if isinstance(ex, requests.exceptions.HTTPError) else 600, 78 | 'msg':str(ex) 79 | } 80 | LOGGER.exception() 81 | 82 | def __head_response(self, futures): 83 | success = [] 84 | notfound = [] 85 | for repo, response, okheader in self.__as_completed(futures): 86 | if okheader: 87 | # check cache 88 | LOGGER.info("HEAD %s (%s) ", response.url, response.elapsed) 89 | if not os.path.exists(repo.filename) or repo.hash != response.hash: 90 | if 'error' in repo: 91 | del repo['error'] 92 | repo['hash'] = response.hash 93 | success.append(repo) 94 | if not os.path.exists(repo.filename): 95 | LOGGER.warning("CACHE - (miss) - %s.cache file not found!", repo.id) 96 | else: 97 | LOGGER.info("CACHE - (miss) - %s - %s)", repo.key, response.hash) 98 | else: 99 | # skip do nothing for cache hits 100 | LOGGER.info("CACHE - (hit) - %s - %s)", repo.key, response.hash) 101 | else: 102 | notfound.append(repo) 103 | return (success, notfound) 104 | 105 | def __download_response(self, futures): 106 | with IndexFileProcessor(max_workers=self.max_workers) as ifp: 107 | for repo, response, success in self.__as_completed(futures): 108 | if response.ok: 109 | LOGGER.info("DOWNLOADED %s [%s] (%s) ", response.url, response.elapsed, response.h_size) 110 | ifp.process(response.index, repo, repo.url, response.h_size) 111 | for future in ifp.completed(): 112 | (index, elapsed, url, h_size) = future.result() 113 | repo_name = index.get('repo', {}).get('name') 114 | LOGGER.info("UPDATED %s - %s [%s] (%s) ", repo_name, url, elapsed, h_size) 115 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Configuration file for the Sphinx documentation builder. 4 | # 5 | # This file does only contain a selection of the most common options. For a 6 | # full list see the documentation: 7 | # http://www.sphinx-doc.org/en/master/config 8 | 9 | # -- Path setup -------------------------------------------------------------- 10 | 11 | # If extensions (or modules to document with autodoc) are in another directory, 12 | # add these directories to sys.path here. If the directory is relative to the 13 | # documentation root, use os.path.abspath to make it absolute, like shown here. 14 | # 15 | import sys 16 | import os 17 | sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) 18 | import datetime 19 | now = datetime.datetime.now() 20 | import fdroid_dl 21 | 22 | 23 | # -- Project information ----------------------------------------------------- 24 | 25 | project = 'fdroid-dl' 26 | copyright = '{}, t4skforce'.format(now.year) 27 | author = 't4skforce' 28 | 29 | # The short X.Y version 30 | version = fdroid_dl.__version__ 31 | # The full version, including alpha/beta/rc tags 32 | release = version 33 | 34 | 35 | # -- General configuration --------------------------------------------------- 36 | 37 | # If your documentation needs a minimal Sphinx version, state it here. 38 | # 39 | # needs_sphinx = '1.0' 40 | 41 | # Add any Sphinx extension module names here, as strings. They can be 42 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 43 | # ones. 44 | extensions = [ 45 | 'sphinx.ext.autodoc', 46 | 'sphinx.ext.todo', 47 | 'sphinx.ext.coverage', 48 | 'sphinx.ext.imgmath', 49 | 'sphinx.ext.viewcode', 50 | 'sphinx.ext.githubpages', 51 | 'sphinxcontrib.napoleon', 52 | 'sphinx.ext.intersphinx', 53 | ] 54 | 55 | intersphinx_mapping = { 56 | 'python': ('https://docs.python.org/3.4', None), 57 | 'requests': ('https://requests.readthedocs.io/en/latest/', None), 58 | 'click': ('https://click.readthedocs.io/en/latest/', None), 59 | } 60 | 61 | # Add any paths that contain templates here, relative to this directory. 62 | templates_path = ['_templates'] 63 | 64 | # The suffix(es) of source filenames. 65 | # You can specify multiple suffix as a list of string: 66 | # 67 | # source_suffix = ['.rst', '.md'] 68 | source_suffix = '.rst' 69 | 70 | # The master toctree document. 71 | master_doc = 'index' 72 | 73 | # The language for content autogenerated by Sphinx. Refer to documentation 74 | # for a list of supported languages. 75 | # 76 | # This is also used if you do content translation via gettext catalogs. 77 | # Usually you set "language" from the command line for these cases. 78 | language = None 79 | 80 | # List of patterns, relative to source directory, that match files and 81 | # directories to ignore when looking for source files. 82 | # This pattern also affects html_static_path and html_extra_path . 83 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] 84 | 85 | # The name of the Pygments (syntax highlighting) style to use. 86 | pygments_style = 'sphinx' 87 | 88 | 89 | # -- Options for HTML output ------------------------------------------------- 90 | 91 | # The theme to use for HTML and HTML Help pages. See the documentation for 92 | # a list of builtin themes. 93 | # 94 | html_theme = 'sphinx_rtd_theme' 95 | 96 | # Theme options are theme-specific and customize the look and feel of a theme 97 | # further. For a list of options available for each theme, see the 98 | # documentation. 99 | # 100 | # html_theme_options = {} 101 | 102 | # Add any paths that contain custom static files (such as style sheets) here, 103 | # relative to this directory. They are copied after the builtin static files, 104 | # so a file named "default.css" will overwrite the builtin "default.css". 105 | html_static_path = ['_static'] 106 | 107 | # Custom sidebar templates, must be a dictionary that maps document names 108 | # to template names. 109 | # 110 | # The default sidebars (for documents that don't match any pattern) are 111 | # defined by theme itself. Builtin themes are using these templates by 112 | # default: ``['localtoc.html', 'relations.html', 'sourcelink.html', 113 | # 'searchbox.html']``. 114 | # 115 | # html_sidebars = {} 116 | 117 | 118 | # -- Options for HTMLHelp output --------------------------------------------- 119 | 120 | # Output file base name for HTML help builder. 121 | htmlhelp_basename = 'fdroid-dldoc' 122 | 123 | 124 | # -- Options for LaTeX output ------------------------------------------------ 125 | 126 | latex_elements = { 127 | # The paper size ('letterpaper' or 'a4paper'). 128 | # 129 | # 'papersize': 'letterpaper', 130 | 131 | # The font size ('10pt', '11pt' or '12pt'). 132 | # 133 | # 'pointsize': '10pt', 134 | 135 | # Additional stuff for the LaTeX preamble. 136 | # 137 | # 'preamble': '', 138 | 139 | # Latex figure (float) alignment 140 | # 141 | # 'figure_align': 'htbp', 142 | } 143 | 144 | # Grouping the document tree into LaTeX files. List of tuples 145 | # (source start file, target name, title, 146 | # author, documentclass [howto, manual, or own class]). 147 | latex_documents = [ 148 | (master_doc, 'fdroid-dl.tex', 'fdroid-dl Documentation', 149 | 't4skforce', 'manual'), 150 | ] 151 | 152 | 153 | # -- Options for manual page output ------------------------------------------ 154 | 155 | # One entry per manual page. List of tuples 156 | # (source start file, name, description, authors, manual section). 157 | man_pages = [ 158 | (master_doc, 'fdroid-dl', 'fdroid-dl Documentation', 159 | [author], 1) 160 | ] 161 | 162 | 163 | # -- Options for Texinfo output ---------------------------------------------- 164 | 165 | # Grouping the document tree into Texinfo files. List of tuples 166 | # (source start file, target name, title, author, 167 | # dir menu entry, description, category) 168 | texinfo_documents = [ 169 | (master_doc, 'fdroid-dl', 'fdroid-dl Documentation', 170 | author, 'fdroid-dl', 'One line description of project.', 171 | 'Miscellaneous'), 172 | ] 173 | 174 | 175 | # -- Extension configuration ------------------------------------------------- 176 | 177 | # -- Options for todo extension ---------------------------------------------- 178 | 179 | # If true, `todo` and `todoList` produce output, else they produce nothing. 180 | todo_include_todos = True 181 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # fdroid-dl 2 | 3 | [![Build Status](https://api.travis-ci.org/t4skforce/fdroid-dl.svg)](https://travis-ci.org/t4skforce/fdroid-dl) 4 | [![Documentation Status](https://readthedocs.org/projects/fdroid-dl/badge/?version=latest)](https://fdroid-dl.readthedocs.io/en/latest/?badge=latest) 5 | 6 | 7 | Is a python based f-droid mirror generation and update utility. Point at one or more existing f-droid repositories and the utility will download the metadata (pictures, descriptions,..) for you and place it in your local system. Simply run ```fdroid-dl && fdroid-dl update``` and you are set. 8 | 9 | ## Motivation 10 | The idea is to have an no internet local copy of one or more repositories, without the need to compile the thousands of apps on your own build server but rather download them like the android client does. So this tool came into existence to simply download a whole repository and import the apps into your own locally installed one. At the time of writing a full offline copy including assets is ~7.5GB of the official repository of f-droid.org. 11 | 12 | # Installation 13 | fdroid-dl is available via pip, simply run ```pip install fdroid-dl``` and you can use ```fdroid-dl``` on your command line. [pypi.org - fdroid-dl](https://pypi.org/project/fdroid-dl/) 14 | 15 | # Documentation 16 | Can be found at [fdroid-dl.readthedocs.io](https://fdroid-dl.readthedocs.io/en/latest/) 17 | 18 | # Command Line Options 19 | ``` 20 | Usage: fdroid-dl [OPTIONS] COMMAND [ARGS]... 21 | 22 | Is a python based f-droid mirror generation and update utility. Point at 23 | one or more existing f-droid repositories and the utility will download 24 | the metadata (pictures, descriptions,..) for you and place it in your 25 | local system. 26 | 27 | Simply run "fdroid-dl update && fdroid update" in your folder with repo 28 | and you are set. 29 | 30 | Options: 31 | -d, --debug enable debug level logging 32 | -c, --config FILE location of your fdroid-dl.json configuration file 33 | [default: fdroid-dl.json] 34 | -r, --repo DIRECTORY location of your fdroid repository to store the 35 | apk files [default: ./repo] 36 | -m, --metadata DIRECTORY location of your fdroid metadata to store the 37 | asset files [default: ./metadata] 38 | --cache DIRECTORY location for fdroid-dl to store cached data 39 | [default: ./.cache] 40 | --help Show this message and exit. 41 | 42 | Commands: 43 | update starts updating process 44 | ``` 45 | ``` 46 | Usage: fdroid-dl update [OPTIONS] COMMAND [ARGS]... 47 | 48 | Options: 49 | --index / --no-index download repository index files [default: True] 50 | --metadata / --no-metadata download metadata assset files [default: True] 51 | --apk / --no-apk download apk files [default: True] 52 | --apk-versions INTEGER how many versions of apk to download [default: 53 | 1] 54 | --src / --no-src download src files [default: True] 55 | --threads INTEGER configure number of parallel threads used for 56 | download [default: 10] 57 | --head-timeout INTEGER maximum time in seconds a HEAD request is 58 | allowed to take [default: 10] 59 | --index-timeout INTEGER maximum time in seconds index file download is 60 | allowed to take [default: 60] 61 | --download-timeout INTEGER maximum time in seconds file download is allowed 62 | to take [default: 60] 63 | --help Show this message and exit. 64 | ``` 65 | 66 | # Configuration File 67 | 68 | # TODO 69 | - [x] Create backend to crawl existing repos 70 | - [x] Fetch info directly index.jar and index-v1.jar 71 | - [x] Compatibility with old and new repo styles 72 | - [x] Download multi threaded 73 | - [x] Verify apk checksum 74 | - [x] Local cache for index files 75 | - [ ] Source code download not implemented yet 76 | - [ ] Metadata update to do delta not full download all the time 77 | - [ ] Cleanup strategy for old apk files (maybe ```fdroid update``` does this already?) 78 | - [x] Create a CLI [python click](http://click.pocoo.org/5/) 79 | - [x] pip package [packaging.python.org](https://packaging.python.org/tutorials/packaging-projects/) 80 | - [x] CI builds for pip package 81 | - [ ] Documentation ;-) 82 | - [ ] Writing tests [pytest](https://docs.pytest.org/en/latest/) 83 | 84 | # CHANGELOG 85 | - WIP: Documentation added 86 | - WIP: Test added 87 | - requests-mock ? 88 | 89 | ## Ideas 90 | - requests-cache ? 91 | 92 | # Development 93 | ## Requirements 94 | * python 2.7.* or 3.5.* 95 | * pip 3.x 96 | * virtualenv 3.x 97 | 98 | ## install locally 99 | ``` 100 | # git clone https://github.com/t4skforce/fdroid-dl.git 101 | # cd fdroid-dl 102 | # virtualenv .env 103 | # source .env/bin/activate 104 | # python setup.py install 105 | # fdroid-dl --help 106 | # python3 -m fdroid_dl --help 107 | ``` 108 | 109 | ### References 110 | While this project was developed the following references where used 111 | 112 | #### F-Droid 113 | * Setup an F-Droid App Repo [f-droid.org](https://f-droid.org/en/docs/Setup_an_F-Droid_App_Repo/) 114 | * Build Metadata Reference [f-droid.org](https://f-droid.org/en/docs/Build_Metadata_Reference/) 115 | * All About Descriptions, Graphics, and Screenshots [f-droid.org](https://f-droid.org/en/docs/All_About_Descriptions_Graphics_and_Screenshots/) 116 | * How to Add a Repo to F-Droid [f-droid.org](https://f-droid.org/en/tutorials/add-repo/) 117 | * How to Send and Receive Apps Offline [f-droid.org](https://f-droid.org/en/tutorials/swap/) 118 | 119 | #### Python 120 | * Python Documentation [python.org](https://docs.python.org/3/) 121 | * PyYAML Documentation [pyyaml.org](https://pyyaml.org/wiki/PyYAMLDocumentation) 122 | * Requests: HTTP for Humans [python-requests.org](http://docs.python-requests.org/en/master/) 123 | * Suppress InsecureRequestWarning: Unverified HTTPS request is being made in Python2.6 [stackoverflow.com](https://stackoverflow.com/questions/27981545/suppress-insecurerequestwarning-unverified-https-request-is-being-made-in-pytho) 124 | * How to download large file in python with requests.py? [stackoverflow.com](https://stackoverflow.com/questions/16694907/how-to-download-large-file-in-python-with-requests-py) 125 | * Asynchronous Python HTTP Requests for Humans using Futures [requests-futures](https://github.com/ross/requests-futures) 126 | * Testing Your Code [python-guide.org](https://docs.python-guide.org/writing/tests/) 127 | -------------------------------------------------------------------------------- /fdroid_dl/update/metadata.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | import logging 5 | import time 6 | from datetime import timedelta 7 | import os.path 8 | try: 9 | from urllib.parse import urlparse 10 | except ImportError: 11 | from urlparse import urlparse 12 | import yaml 13 | from .selector import Selector 14 | from ..download import FuturesSessionVerifiedDownload 15 | 16 | 17 | LOGGER = logging.getLogger('update.MetadataUpdate') 18 | class MetadataUpdate(Selector): 19 | def __init__(self, config, download_timeout=600, max_workers=10): 20 | super(MetadataUpdate, self).__init__(config) 21 | self.__config = config 22 | self.__download_timeout = download_timeout 23 | self.__max_workers = max_workers 24 | self.__loaded = False 25 | self.__load_all() 26 | 27 | def __load_all(self): 28 | if not self.__is_loaded(): 29 | self.__config.metadata.load_all() 30 | self.__loaded = True 31 | 32 | def __is_loaded(self): 33 | return self.__loaded is True 34 | 35 | @staticmethod 36 | def _setyamlattr(yaml_key, json_key, yaml_data, app_meta): 37 | value = app_meta.get(json_key) 38 | if not value is None: 39 | yaml_data[yaml_key] = value 40 | 41 | def update_yaml(self): 42 | meta = self.__config.metadata 43 | LOGGER.info("UPDATING YAML metadata") 44 | start = time.time() 45 | cnt = 0 46 | for repo, appid in self.all_apps(): 47 | try: 48 | app_meta = meta[appid] 49 | yaml_file = os.path.join(self.__config.metadata_dir, appid+'.yml') 50 | yaml_data = {} 51 | if os.path.exists(yaml_file): 52 | with open(yaml_file, 'r') as yfl: 53 | yaml_data = yaml.load(yfl, Loader=yaml.SafeLoader) 54 | if yaml_data is None: 55 | yaml_data = {} 56 | MetadataUpdate._setyamlattr('Categories', 'categories', yaml_data, app_meta) 57 | MetadataUpdate._setyamlattr('AuthorName', 'authorName', yaml_data, app_meta) 58 | MetadataUpdate._setyamlattr('AuthorEmail', 'authorEmail', yaml_data, app_meta) 59 | MetadataUpdate._setyamlattr('License', 'license', yaml_data, app_meta) 60 | #self._setyamlattr('Name','name',yaml_data,app_meta) 61 | MetadataUpdate._setyamlattr('WebSite', 'webSite', yaml_data, app_meta) 62 | MetadataUpdate._setyamlattr('SourceCode', 'sourceCode', yaml_data, app_meta) 63 | MetadataUpdate._setyamlattr('IssueTracker', 'issueTracker', yaml_data, app_meta) 64 | MetadataUpdate._setyamlattr('Changelog', 'changelog', yaml_data, app_meta) 65 | MetadataUpdate._setyamlattr('Donate', 'donate', yaml_data, app_meta) 66 | MetadataUpdate._setyamlattr('FlattrID', 'flattr', yaml_data, app_meta) 67 | MetadataUpdate._setyamlattr('LiberapayID', 'liberapay', yaml_data, app_meta) 68 | MetadataUpdate._setyamlattr('Bitcoin', 'bitcoin', yaml_data, app_meta) 69 | MetadataUpdate._setyamlattr('Litecoin', 'litecoin', yaml_data, app_meta) 70 | MetadataUpdate._setyamlattr('AntiFeatures', 'antiFeatures', yaml_data, app_meta) 71 | with open(yaml_file, 'w') as stream: 72 | yaml.safe_dump(yaml_data, stream, default_flow_style=False, encoding='utf-8', allow_unicode=True) 73 | cnt += 1 74 | except Exception as ex: 75 | if logging.getLogger().isEnabledFor(logging.DEBUG): 76 | LOGGER.exception() 77 | else: 78 | LOGGER.warning(str(ex)) 79 | elapsed = time.time() - start 80 | LOGGER.info("UPDATED YAML metadata, %s files (%s)", cnt, timedelta(seconds=elapsed)) 81 | 82 | @staticmethod 83 | def __write_text(text, filename, foldername): 84 | if not text is None: 85 | text = text.encode('utf-8') 86 | if not os.path.exists(foldername): 87 | os.makedirs(foldername) 88 | with open(os.path.join(foldername, filename), "w") as text_file: 89 | text_file.write(text) 90 | 91 | def __download_image(self, url, filename, session): 92 | if not url is None: 93 | session.download(url, filename, timeout=self.__download_timeout) 94 | 95 | def __download_images(self, urls, foldername, session): 96 | for url in urls: 97 | filename = os.path.basename(urlparse(url).path) 98 | filename = os.path.join(foldername, filename) 99 | self.__download_image(url, filename, session) 100 | 101 | def update_assets(self): 102 | meta = self.__config.metadata 103 | LOGGER.info("UPDATING Assets metadata") 104 | start = time.time() 105 | cnt = 0 106 | ecnt = 0 107 | with FuturesSessionVerifiedDownload(max_workers=self.__max_workers) as session: 108 | for repo, appid in self.all_apps(session=session): 109 | try: 110 | loc_appid = os.path.join(self.__config.metadata_dir, appid) 111 | app_meta = meta[appid] 112 | for locale in app_meta.locales: 113 | loc_path = os.path.join(loc_appid, locale) 114 | imags_path = os.path.join(loc_path, 'images') 115 | # TODO: chech if we need to download really all images & clean folder before/after? 116 | MetadataUpdate.__write_text(app_meta.full_description(locale), 'full_description.txt', loc_path) 117 | MetadataUpdate.__write_text(app_meta.short_description(locale), 'short_description.txt', loc_path) 118 | MetadataUpdate.__write_text(app_meta.title(locale), 'title.txt', loc_path) 119 | 120 | self.__download_image(app_meta.icon(locale), os.path.join(imags_path, 'icon.png'), session) 121 | self.__download_image(app_meta.feature_graphic(locale), os.path.join(imags_path, 'featureGraphic.png'), session) 122 | self.__download_image(app_meta.promo_graphic(locale), os.path.join(imags_path, 'promoGraphic.png'), session) 123 | self.__download_image(app_meta.tv_banner(locale), os.path.join(imags_path, 'tvBanner.png'), session) 124 | 125 | self.__download_images(app_meta.phone_screenshots(locale), os.path.join(loc_path, 'phoneScreenshots'), session) 126 | self.__download_images(app_meta.seven_inch_screenshots(locale), os.path.join(loc_path, 'sevenInchScreenshots'), session) 127 | self.__download_images(app_meta.ten_inch_screenshots(locale), os.path.join(loc_path, 'tenInchScreenshots'), session) 128 | self.__download_images(app_meta.tv_screenshots(locale), os.path.join(loc_path, 'tvScreenshots'), session) 129 | self.__download_images(app_meta.wear_screenshots(locale), os.path.join(loc_path, 'wearScreenshots'), session) 130 | except Exception: 131 | LOGGER.exception("Error processing Asset download for %s", appid) 132 | for success, filename, bts, hbts, elapsed in session.completed(): 133 | if success: 134 | cnt += 1 135 | else: 136 | ecnt += 1 137 | elapsed = time.time() - start 138 | LOGGER.info("UPDATED Assets metadata, %s files, %s errors (%s)", cnt, ecnt, timedelta(seconds=elapsed)) 139 | -------------------------------------------------------------------------------- /fdroid_dl/model/config.py: -------------------------------------------------------------------------------- 1 | """Package exposing fdroid_dl.Config.""" 2 | # -*- coding: utf-8 -*- 3 | 4 | import logging 5 | try: 6 | from collections.abc import MutableMapping 7 | except ImportError: 8 | from collections import MutableMapping 9 | import copy 10 | import json 11 | import os 12 | import os.path 13 | import shutil 14 | from tempfile import NamedTemporaryFile 15 | from .repoconfig import RepoConfig 16 | from .metadata import Metadata 17 | from .index import Index 18 | from ..json import GenericJSONEncoder 19 | 20 | 21 | LOGGER = logging.getLogger('model.Config') 22 | class Config(MutableMapping): 23 | """ 24 | Main Class responsible for handling the f-droid.json config file. 25 | 26 | This Class represents the config file used by fdroid-dl and will default to 27 | https://f-droid.org/repo/ as bas repo and add at least the 28 | **org.fdroid.fdroid** app for download into the queue. 29 | """ 30 | 31 | DEFAULTS = { 32 | "f-droid": { 33 | 'https://f-droid.org/repo/': { 34 | "apps": ["org.fdroid.fdroid"] 35 | } 36 | }, 37 | "metadata": {} 38 | } 39 | 40 | def __init__(self, filename='fdroid-dl.json', repo_dir='./repo', 41 | metadata_dir='./metadata', cache_dir='.cache', apk_versions=1): 42 | """ 43 | Parameters 44 | ---------- 45 | filename : str 46 | filename including path where to store the generated config file 47 | repo_dir : str 48 | path to f-droid repo, apk files will be downloded here 49 | metadata_dir : str 50 | path to f-droid metadata directory, images and descriptions will 51 | be stored here 52 | cache_dir : str 53 | directory to store extracted and parsed index.json files from 54 | f-droid repo 55 | apk_versions: int 56 | how many versions of apk files should be downloaded 57 | """ 58 | self.__filename = filename 59 | self.__repo = repo_dir 60 | self.__metadata_dir = metadata_dir 61 | self.__cache_dir = cache_dir 62 | self.__store = {} 63 | self.__indices = {} 64 | self.__metadata = None 65 | self.__apk_versions = apk_versions 66 | self.__init_defaults() 67 | self.__prepare_fs() 68 | 69 | @property 70 | def filename(self): 71 | return str(self.__filename) 72 | 73 | @property 74 | def repo_dir(self): 75 | return str(self.__repo) 76 | 77 | @property 78 | def metadata_dir(self): 79 | return str(self.__metadata_dir) 80 | 81 | @property 82 | def cache_dir(self): 83 | return str(self.__cache_dir) 84 | 85 | @property 86 | def store(self): 87 | return str(self.__store) 88 | 89 | @property 90 | def metadata(self): 91 | return self.__metadata 92 | 93 | @property 94 | def size(self): 95 | return len(self.__store.keys()) 96 | 97 | @property 98 | def apk_versions(self): 99 | return int(self.__apk_versions) 100 | 101 | def __init_defaults(self): 102 | self.__store = copy.deepcopy(Config.DEFAULTS) 103 | for key in Config.DEFAULTS['f-droid'].keys(): 104 | cfg = RepoConfig(key, self.__store['f-droid'][key], self) 105 | self.__store['f-droid'][cfg.url] = cfg 106 | self.__metadata = Metadata(self, Config.DEFAULTS.get('metadata', {})) 107 | 108 | def __prepare_fs(self): 109 | if not os.path.exists(self.__repo): 110 | os.makedirs(self.__repo) 111 | if not os.path.exists(self.__metadata_dir): 112 | os.makedirs(self.__metadata_dir) 113 | if not os.path.exists(self.__cache_dir): 114 | os.makedirs(self.__cache_dir) 115 | 116 | def load(self, file=None): 117 | """ 118 | Loads given file handler or filename into Config object and resolves. 119 | 120 | """ 121 | if file is None: 122 | file = self.__filename 123 | if not hasattr(file, 'read'): 124 | if not os.path.exists(file): 125 | with open(file, 'w') as fl: 126 | fl.write('{}') 127 | file = open(file, 'r') 128 | 129 | if os.path.isfile(self.__filename): 130 | with open(self.__filename) as config_file: 131 | try: 132 | file_data = json.load(config_file) 133 | if 'f-droid' in file_data: 134 | for key in file_data['f-droid'].keys(): 135 | cfg = RepoConfig( 136 | key, file_data['f-droid'][key], self) 137 | self.__store['f-droid'][cfg.url] = cfg 138 | if 'metadata' in file_data: 139 | self.__metadata = Metadata(self, file_data['metadata']) 140 | self.__store['metadata'] = self.__metadata 141 | except Exception: 142 | LOGGER.exception("Fatal error reading %s", config_file) 143 | 144 | def save(self): 145 | with NamedTemporaryFile(mode='w') as tmp: 146 | json.dump(self.__store, tmp, sort_keys=True, 147 | indent=4, cls=GenericJSONEncoder) 148 | tmp.flush() 149 | shutil.copy(tmp.name, self.__filename) 150 | return self 151 | 152 | @property 153 | def repos(self): 154 | for key in self.__store['f-droid'].keys(): 155 | cfg = self.__store['f-droid'][key] 156 | if not isinstance(cfg, RepoConfig): 157 | self.__store['f-droid'][key] = RepoConfig(key, cfg, self) 158 | yield cfg 159 | 160 | @property 161 | def indices(self): 162 | for repo in self.repos: 163 | if repo.url in self.__indices: 164 | yield self.__indices[repo.url] 165 | elif os.path.exists(repo.filename): 166 | with Index.from_json(repo.filename, key=repo.url) as idx: 167 | self.__indices[repo.url] = idx 168 | yield idx 169 | 170 | def repo(self, url): 171 | """ 172 | Return RepoConfig based on url. 173 | 174 | Searches in config file for given url and returns the corresponding 175 | RepoConfig Class representation of it. if the repo is marked as 176 | having errors it will not be found. A KeyError is being raised if 177 | given url is not found or has an error node in the json file. 178 | 179 | Parameters 180 | ---------- 181 | key : str 182 | url of repo in config file 183 | 184 | Returns 185 | ------- 186 | fdroid_dl.model.Index 187 | index found for given key 188 | 189 | Raises 190 | ------ 191 | KeyError 192 | raised if given url is not found or is in error state 193 | 194 | """ 195 | if url in self.__store['f-droid']: 196 | cfg = self.__store['f-droid'][url] 197 | if not isinstance(cfg, RepoConfig): 198 | self.__store['f-droid'][url] = RepoConfig(url, cfg, self) 199 | if 'error' not in cfg: 200 | return cfg 201 | raise KeyError("repo with url: %s not found" % url) 202 | 203 | def index(self, url): 204 | """ 205 | Return Index based on url. 206 | 207 | Searches for downloaded index file based on repository url as 208 | configured in config file. 209 | 210 | Parameters 211 | ---------- 212 | url : str 213 | url of repo in config file 214 | 215 | Returns 216 | ------- 217 | fdroid_dl.model.Index 218 | index found for given url 219 | 220 | """ 221 | repo = self.repo(url) 222 | if repo.url in self.__indices: 223 | return self.__indices[repo.url] 224 | if os.path.exists(repo.filename): 225 | with Index.from_json(repo.filename, key=repo.url) as idx: 226 | self.__indices[repo.url] = idx 227 | return self.__indices[repo.url] 228 | raise KeyError( 229 | "index with url: %s not found on filesystem file: %s" % 230 | (url, repo.filename)) 231 | 232 | def __repr__(self): 233 | """.""" 234 | return "" % str(self.__store) 235 | ####################### 236 | # implement "dict" 237 | ####################### 238 | 239 | def __getitem__(self, key): 240 | """.""" 241 | return self.__store[key] 242 | 243 | def __setitem__(self, key, value): 244 | """.""" 245 | self.__store[key] = value 246 | 247 | def __delitem__(self, key): 248 | """.""" 249 | del self.__store[key] 250 | 251 | def __iter__(self): 252 | """.""" 253 | return iter(self.__store) 254 | 255 | def __len__(self): 256 | """.""" 257 | return len(self.__store) 258 | ####################### 259 | # implement "with" 260 | ####################### 261 | 262 | def __enter__(self): 263 | """.""" 264 | self.load() 265 | return self 266 | 267 | def __exit__(self, type, value, traceback): 268 | """.""" 269 | self.save() 270 | -------------------------------------------------------------------------------- /fdroid_dl/model/index.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | try: 5 | from collections.abc import MutableMapping 6 | except ImportError: 7 | from collections import MutableMapping 8 | import re 9 | import json 10 | import xml.etree.ElementTree as ET 11 | from time import mktime, strptime 12 | import logging 13 | try: 14 | from urllib.parse import urljoin 15 | except ImportError: 16 | from urlparse import urljoin 17 | from tempfile import NamedTemporaryFile 18 | import shutil 19 | from ..json import GenericJSONEncoder 20 | 21 | 22 | LOGGER = logging.getLogger('model.Index') 23 | class Index(MutableMapping): 24 | """ 25 | Main Class responsible for handling the f-droid's index.xml and 26 | index-v1.json files. 27 | """ 28 | 29 | def __init__(self, key=None, filename=None, format='json', default_locale='en-US', store=dict()): 30 | self.__key = key 31 | self.__filename = filename 32 | self.__format = format 33 | self.__default_locale = default_locale 34 | self.__store = store 35 | 36 | @classmethod 37 | def from_json(cls, source, **kwargs): 38 | if not hasattr(source, "read"): 39 | source = open(source, "r") 40 | kwargs['format'] = 'json' 41 | kwargs['filename'] = source.name 42 | return cls(**kwargs).load(source) 43 | 44 | @classmethod 45 | def from_xml(cls, source, **kwargs): 46 | if not hasattr(source, "read"): 47 | source = open(source, "r") 48 | kwargs['format'] = 'xml' 49 | kwargs['filename'] = source.name 50 | return cls(**kwargs).load(source) 51 | 52 | @property 53 | def filename(self): 54 | return str(self.__filename) 55 | 56 | @property 57 | def key(self): 58 | return str(self.__key) 59 | 60 | @property 61 | def repo_url(self): 62 | return str(self.__key) 63 | 64 | @property 65 | def format(self): 66 | return str(self.__format) 67 | 68 | @property 69 | def default_locale(self): 70 | return str(self.__default_locale) 71 | 72 | def find_appids(self, key): 73 | if key is None: 74 | raise KeyError("key must not be empty") 75 | ret_val = set() 76 | if key.startswith('regex:'): 77 | regexc = re.compile(key[6:], re.I | re.S) 78 | for app in self.__store['apps']: 79 | match = regexc.match(app['packageName']) 80 | if not match is None: 81 | ret_val.add(app['packageName']) 82 | elif key in ['*', '.*', 'all']: 83 | for app in self.__store['apps']: 84 | ret_val.add(app['packageName']) 85 | else: 86 | for app in self.__store['apps']: 87 | if app['packageName'] == key: 88 | ret_val.add(app['packageName']) 89 | return list(ret_val) 90 | 91 | def defaultnum(self, value): 92 | if value.isnumeric(): 93 | return int(value) 94 | return value 95 | 96 | def custom_func(self, xpath): 97 | if not xpath is None: 98 | regexc = re.compile(r'(.*?):(.*)$', re.I | re.S) 99 | match = regexc.match(xpath) 100 | if not match is None: 101 | return (match.group(1), match.group(2)) 102 | return (xpath, None) 103 | return (None, None) 104 | 105 | def run_custom_func(self, method, value): 106 | if method is None or value is None: 107 | return None 108 | try: 109 | return eval(method, {'mktime': mktime, 'strptime': strptime}, {'value': value}) 110 | except: 111 | LOGGER.exception("error in eval(%s)", method) 112 | 113 | def xmlattr(self, node, xml_node, key=None, xpath=None): 114 | if xpath is None and key is None: 115 | return None 116 | if xpath is None and not key is None: 117 | xpath = key 118 | value = self.xmlattrval(xml_node, xpath) 119 | if not value is None: 120 | if not key is None: 121 | node[key] = value 122 | else: 123 | node = value 124 | 125 | def xmlattrval(self, xml_node, xpath): 126 | if not xml_node is None: 127 | regexc = re.compile(r'^.*?\[@([^\]]+)\]$') 128 | match = regexc.match(xpath) 129 | if not match is None: 130 | attr_name = m.group(1) 131 | else: 132 | attr_name = xpath 133 | (xpath, func) = self.custom_func(xpath) 134 | if not xpath is None: 135 | xml_node = xml_node.find(xpath) 136 | if not xml_node is None: 137 | value = xml_node.get(attr_name, None) 138 | #print(xml_node,attr_name,value) 139 | if not value is None: 140 | if not func is None: 141 | return self.run_custom_func(func, self.defaultnum(value.strip())) 142 | else: 143 | return self.defaultnum(value.strip()) 144 | return None 145 | 146 | def xmltext(self, node, xml_node, key=None, xpath=None): 147 | if xpath is None and key is None: 148 | return None 149 | if xpath is None and not key is None: 150 | xpath = key 151 | value = self.xmltextval(xml_node, xpath) 152 | if not value is None: 153 | if not key is None: 154 | node[key] = value 155 | else: 156 | node = value 157 | 158 | def xmltextval(self, xml_node, xpath=None): 159 | (xpath, func) = self.custom_func(xpath) 160 | if not xpath is None: 161 | xml_node = xml_node.find(xpath) 162 | if not xml_node is None: 163 | value = xml_node.text 164 | if not value is None: 165 | if not func is None: 166 | return self.run_custom_func(func, self.defaultnum(value.strip())) 167 | else: 168 | return self.defaultnum(value.strip()) 169 | 170 | def xmllist(self, root, xpath): 171 | ret_val = [] 172 | regexc = re.compile(r'^.*?\[@([^\]]+)\]$') 173 | match = regexc.match(xpath) 174 | if not match is None: 175 | attr_name = match.group(1) 176 | matches = root.findall(xpath) 177 | if not matches is None: 178 | for match in matches: 179 | value = match.get(attr_name, None) 180 | if not value is None: 181 | ret_val.append(self.defaultnum(value.strip())) 182 | else: 183 | matches = root.findall(xpath) 184 | if not matches is None: 185 | for match in matches: 186 | value = self.xmltextval(match) 187 | if not value is None: 188 | ret_val.append(value) 189 | return ret_val 190 | 191 | def convert(self, root): 192 | self.__store = {} 193 | store = self.__store 194 | repo = root.find('repo') 195 | if not repo is None: 196 | srepo = store['repo'] = {} 197 | self.xmlattr(srepo, root, 'timestamp', './repo[@timestamp]') 198 | self.xmlattr(srepo, root, 'version', './repo[@version]') 199 | self.xmlattr(srepo, root, 'maxage', './repo[@maxage]') 200 | self.xmlattr(srepo, root, 'name', './repo[@name]') 201 | self.xmlattr(srepo, root, 'icon', './repo[@icon]') 202 | self.xmlattr(srepo, root, 'url', './repo[@address]') 203 | self.xmltext(srepo, root, 'description', './description') 204 | srepo['mirrors'] = self.xmllist(repo, './mirror') 205 | requ = store['requests'] = {} 206 | requ['install'] = self.xmllist(root, './install[@packageName]') 207 | requ['uninstall'] = self.xmllist(root, './uninstall[@packageName]') 208 | apps = store['apps'] = [] 209 | pkgs = store['packages'] = {} 210 | applications = root.findall('application[@id]') 211 | for xmlapp in applications: 212 | app_val = {} 213 | self.xmltext(app_val, xmlapp, 'authorEmail', './email') 214 | self.xmltext(app_val, xmlapp, 'authorName', './author') 215 | self.xmltext(app_val, xmlapp, 'authorWebSite', './web') 216 | self.xmltext(app_val, xmlapp, 'bitcoin', './bitcoin') 217 | self.xmltext(app_val, xmlapp, 'donate', './donate') 218 | self.xmltext(app_val, xmlapp, 'flattr', './flattr') 219 | self.xmltext(app_val, xmlapp, 'liberapay', './liberapay') 220 | self.xmltext(app_val, xmlapp, 'litecoin', './litecoin') 221 | cat = self.xmltextval(xmlapp, 'categories') 222 | if not cat is None and len(cat) > 0: 223 | app_val['categories'] = cat.split(',') 224 | afaet = self.xmltextval(xmlapp, 'antiFeatures') 225 | if not afaet is None and len(afaet) > 0: 226 | app_val['antiFeatures'] = afaet.split(',') 227 | self.xmltext(app_val, xmlapp, 'suggestedVersionName', 228 | './marketversion') 229 | self.xmltext(app_val, xmlapp, 'suggestedVersionCode', 230 | './marketvercode') 231 | self.xmltext(app_val, xmlapp, 'issueTracker', './tracker') 232 | self.xmltext(app_val, xmlapp, 'changelog', './changelog') 233 | self.xmltext(app_val, xmlapp, 'license', './license') 234 | self.xmltext(app_val, xmlapp, 'name', './name') 235 | self.xmltext(app_val, xmlapp, 'sourceCode', './source') 236 | self.xmltext(app_val, xmlapp, 'webSite', './web') 237 | self.xmltext(app_val, xmlapp, 'added', 238 | './added:int(mktime(strptime(value, "%Y-%m-%d")))') 239 | self.xmltext(app_val, xmlapp, 'icon', './icon') 240 | self.xmltext(app_val, xmlapp, 'packageName', './id') 241 | self.xmltext(app_val, xmlapp, 'lastUpdated', 242 | './lastupdated:int(mktime(strptime(value, "%Y-%m-%d")))') 243 | loc = app_val['localized'] = {} 244 | dloc = loc[self.default_locale] = {} 245 | self.xmltext(dloc, xmlapp, 'description', './desc') 246 | self.xmltext(dloc, xmlapp, 'summary', './summary') 247 | 248 | app_id = self.xmltextval(xmlapp, './id') 249 | if not app_id is None: 250 | pkg = pkgs[app_id] = [] 251 | packages = xmlapp.findall('./package') 252 | for xmlpkg in packages: 253 | pkg_val = {} 254 | self.xmltext( 255 | pkg_val, xmlpkg, 'added', './added:int(mktime(strptime(value, "%Y-%m-%d")))') 256 | self.xmltext(pkg_val, xmlpkg, 'apkName', './apkname') 257 | self.xmltext(pkg_val, xmlpkg, 'hash', './hash') 258 | self.xmlattr(pkg_val, xmlpkg, 'hashType', './hash[@type]') 259 | self.xmltext(pkg_val, xmlpkg, 'minSdkVersion', './sdkver') 260 | self.xmltext(pkg_val, xmlpkg, 'targetSdkVersion', 261 | './targetSdkVersion') 262 | pkg_val['packageName'] = app_id 263 | self.xmltext(pkg_val, xmlpkg, 'sig', './sig') 264 | self.xmltext(pkg_val, xmlpkg, 'versionName', './version') 265 | self.xmltext(pkg_val, xmlpkg, 'versionCode', 266 | './versioncode') 267 | self.xmltext(pkg_val, xmlpkg, 'size', './size') 268 | perm = xmlpkg.find('permissions') 269 | if not perm is None: 270 | user_permission = pkg_val['uses-permission'] = [] 271 | user_permission_split = perm.text.split(",") 272 | for user_permission_str in user_permission_split: 273 | user_permission.append(['android.permission.'+user_permission_str.strip(), None]) 274 | if len(pkg_val) > 0: 275 | pkg.append(pkg_val) 276 | if len(app_val) > 1: 277 | apps.append(app_val) 278 | 279 | def monkeypatch(self): 280 | ''' fixup metadata paths -> url ''' 281 | if not '_monkeypatched' in self.__store: 282 | if self.__key is None: 283 | raise AttributeError('key not defined') 284 | self.__store['_monkeypatched'] = True 285 | if 'packages' in self.__store: 286 | for key in self.__store['packages']: 287 | for pkg in self.__store['packages'][key]: 288 | pkg['apkName'] = urljoin(self.__key, pkg['apkName']) 289 | if not pkg.get('srcname') is None: 290 | pkg['srcname'] = urljoin( 291 | self.__key, pkg['srcname']) 292 | 293 | if 'apps' in self.__store: 294 | for app in self.__store['apps']: 295 | if 'icon' in app: 296 | app['icon'] = urljoin(self.__key, "icons/"+app['icon']) 297 | locs = app.get('localized', {}) 298 | for k in locs.keys(): 299 | loc = locs[k] 300 | if 'icon' in loc: 301 | loc['icon'] = urljoin( 302 | self.__key, app['packageName']+'/'+k+'/'+loc['icon']) 303 | if 'featureGraphic' in loc: 304 | loc['featureGraphic'] = urljoin( 305 | self.__key, app['packageName']+'/'+k+'/'+loc['featureGraphic']) 306 | if 'promoGraphic' in loc: 307 | loc['promoGraphic'] = urljoin( 308 | self.__key, app['packageName']+'/'+k+'/'+loc['promoGraphic']) 309 | if 'tvBanner' in loc: 310 | loc['tvBanner'] = urljoin( 311 | self.__key, app['packageName']+'/'+k+'/'+loc['tvBanner']) 312 | if 'phoneScreenshots' in loc: 313 | loc['phoneScreenshots'] = [urljoin( 314 | self.__key, app['packageName']+'/'+k+'/phoneScreenshots/'+value) for value in loc['phoneScreenshots']] 315 | if 'sevenInchScreenshots' in loc: 316 | loc['sevenInchScreenshots'] = [urljoin( 317 | self.__key, app['packageName']+'/'+k+'/sevenInchScreenshots/'+value) for value in loc['sevenInchScreenshots']] 318 | if 'tenInchScreenshots' in loc: 319 | loc['tenInchScreenshots'] = [urljoin( 320 | self.__key, app['packageName']+'/'+k+'/tenInchScreenshots/'+value) for value in loc['tenInchScreenshots']] 321 | if 'tvScreenshots' in loc: 322 | loc['tvScreenshots'] = [urljoin( 323 | self.__key, app['packageName']+'/'+k+'/tvScreenshots/'+value) for value in loc['tvScreenshots']] 324 | if 'wearScreenshots' in loc: 325 | loc['wearScreenshots'] = [urljoin( 326 | self.__key, app['packageName']+'/'+k+'/wearScreenshots/'+value) for value in loc['wearScreenshots']] 327 | return self 328 | 329 | def load(self, file, format=None): 330 | if not hasattr(file, 'read'): 331 | file = open(file, 'r') 332 | if not format is None: 333 | self.__format = format 334 | 335 | if self.__format == 'json': 336 | with file as idxfl: 337 | self.__store = json.load(idxfl) 338 | elif self.__format == 'xml': 339 | # we have no json verison we need som transformation to apply 340 | tree = ET.parse(file) 341 | file.close() 342 | root = tree.getroot() 343 | self.convert(root) 344 | self.__format = 'json' 345 | else: 346 | # try loading json anyway 347 | try: 348 | self.load(file, format='json') 349 | except: 350 | self.load(file, format='xml') 351 | return self 352 | 353 | def save(self, filename=None): 354 | if not filename is None: 355 | self.__filename = filename 356 | with NamedTemporaryFile(mode='w') as tmp: 357 | json.dump(self.__store, tmp, sort_keys=True, 358 | indent=4, cls=GenericJSONEncoder) 359 | tmp.flush() 360 | shutil.copy(tmp.name, self.__filename) 361 | return self 362 | 363 | def __repr__(self): 364 | return "" % str(json.dumps(self.__store, indent=4)) 365 | 366 | @property 367 | def __json__(self): 368 | ''' make Index json serializable ''' 369 | return self.__store 370 | ####################### 371 | # implement "dict" 372 | ####################### 373 | 374 | def __getitem__(self, key): 375 | return self.__store[key] 376 | 377 | def __setitem__(self, key, value): 378 | self.__store[key] = value 379 | 380 | def __delitem__(self, key): 381 | del self.__store[key] 382 | 383 | def __iter__(self): 384 | return iter(self.__store) 385 | 386 | def __len__(self): 387 | return len(self.__store) 388 | ####################### 389 | # implement "with" 390 | ####################### 391 | 392 | def __enter__(self): 393 | self.load(self.__filename) 394 | return self 395 | 396 | def __exit__(self, type, value, traceback): 397 | self.save() 398 | -------------------------------------------------------------------------------- /.pylintrc: -------------------------------------------------------------------------------- 1 | [MASTER] 2 | 3 | # A comma-separated list of package or module names from where C extensions may 4 | # be loaded. Extensions are loading into the active Python interpreter and may 5 | # run arbitrary code. 6 | extension-pkg-whitelist= 7 | 8 | # Add files or directories to the blacklist. They should be base names, not 9 | # paths. 10 | ignore=CVS 11 | 12 | # Add files or directories matching the regex patterns to the blacklist. The 13 | # regex matches against base names, not paths. 14 | ignore-patterns= 15 | 16 | # Python code to execute, usually for sys.path manipulation such as 17 | # pygtk.require(). 18 | #init-hook= 19 | 20 | # Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the 21 | # number of processors available to use. 22 | jobs=1 23 | 24 | # Control the amount of potential inferred values when inferring a single 25 | # object. This can help the performance when dealing with large functions or 26 | # complex, nested conditions. 27 | limit-inference-results=100 28 | 29 | # List of plugins (as comma separated values of python modules names) to load, 30 | # usually to register additional checkers. 31 | load-plugins= 32 | 33 | # Pickle collected data for later comparisons. 34 | persistent=yes 35 | 36 | # Specify a configuration file. 37 | #rcfile= 38 | 39 | # When enabled, pylint would attempt to guess common misconfiguration and emit 40 | # user-friendly hints instead of false-positive error messages. 41 | suggestion-mode=yes 42 | 43 | # Allow loading of arbitrary C extensions. Extensions are imported into the 44 | # active Python interpreter and may run arbitrary code. 45 | unsafe-load-any-extension=no 46 | 47 | 48 | [MESSAGES CONTROL] 49 | 50 | # Only show warnings with the listed confidence levels. Leave empty to show 51 | # all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED. 52 | confidence=INFERENCE 53 | 54 | # Disable the message, report, category or checker with the given id(s). You 55 | # can either give multiple identifiers separated by comma (,) or put this 56 | # option multiple times (only on the command line, not in the configuration 57 | # file where it should appear only once). You can also use "--disable=all" to 58 | # disable everything first and then reenable specific checks. For example, if 59 | # you want to run only the similarities checker, you can use "--disable=all 60 | # --enable=similarities". If you want to run only the classes checker, but have 61 | # no Warning level messages displayed, use "--disable=all --enable=classes 62 | # --disable=W". 63 | disable=print-statement, 64 | parameter-unpacking, 65 | unpacking-in-except, 66 | old-raise-syntax, 67 | backtick, 68 | long-suffix, 69 | old-ne-operator, 70 | old-octal-literal, 71 | import-star-module-level, 72 | non-ascii-bytes-literal, 73 | raw-checker-failed, 74 | bad-inline-option, 75 | locally-disabled, 76 | file-ignored, 77 | suppressed-message, 78 | useless-suppression, 79 | deprecated-pragma, 80 | use-symbolic-message-instead, 81 | apply-builtin, 82 | basestring-builtin, 83 | buffer-builtin, 84 | cmp-builtin, 85 | coerce-builtin, 86 | execfile-builtin, 87 | file-builtin, 88 | long-builtin, 89 | raw_input-builtin, 90 | reduce-builtin, 91 | standarderror-builtin, 92 | unicode-builtin, 93 | xrange-builtin, 94 | coerce-method, 95 | delslice-method, 96 | getslice-method, 97 | setslice-method, 98 | no-absolute-import, 99 | old-division, 100 | dict-iter-method, 101 | dict-view-method, 102 | next-method-called, 103 | metaclass-assignment, 104 | indexing-exception, 105 | raising-string, 106 | reload-builtin, 107 | oct-method, 108 | hex-method, 109 | nonzero-method, 110 | cmp-method, 111 | input-builtin, 112 | round-builtin, 113 | intern-builtin, 114 | unichr-builtin, 115 | map-builtin-not-iterating, 116 | zip-builtin-not-iterating, 117 | range-builtin-not-iterating, 118 | filter-builtin-not-iterating, 119 | using-cmp-argument, 120 | eq-without-hash, 121 | div-method, 122 | idiv-method, 123 | rdiv-method, 124 | exception-message-attribute, 125 | invalid-str-codec, 126 | sys-max-int, 127 | bad-python3-import, 128 | deprecated-string-function, 129 | deprecated-str-translate-call, 130 | deprecated-itertools-function, 131 | deprecated-types-field, 132 | next-method-defined, 133 | dict-items-not-iterating, 134 | dict-keys-not-iterating, 135 | dict-values-not-iterating, 136 | deprecated-operator-function, 137 | deprecated-urllib-function, 138 | xreadlines-attribute, 139 | deprecated-sys-function, 140 | exception-escape, 141 | comprehension-escape, 142 | line-too-long, 143 | C0111 144 | 145 | # Enable the message, report, category or checker with the given id(s). You can 146 | # either give multiple identifier separated by comma (,) or put this option 147 | # multiple time (only on the command line, not in the configuration file where 148 | # it should appear only once). See also the "--disable" option for examples. 149 | enable=c-extension-no-member 150 | 151 | 152 | [REPORTS] 153 | 154 | # Python expression which should return a note less than 10 (10 is the highest 155 | # note). You have access to the variables errors warning, statement which 156 | # respectively contain the number of errors / warnings messages and the total 157 | # number of statements analyzed. This is used by the global evaluation report 158 | # (RP0004). 159 | evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) 160 | 161 | # Template used to display messages. This is a python new-style format string 162 | # used to format the message information. See doc for all details. 163 | #msg-template= 164 | 165 | # Set the output format. Available formats are text, parseable, colorized, json 166 | # and msvs (visual studio). You can also give a reporter class, e.g. 167 | # mypackage.mymodule.MyReporterClass. 168 | output-format=text 169 | 170 | # Tells whether to display a full report or only the messages. 171 | reports=no 172 | 173 | # Activate the evaluation score. 174 | score=yes 175 | 176 | 177 | [REFACTORING] 178 | 179 | # Maximum number of nested blocks for function / method body 180 | max-nested-blocks=5 181 | 182 | # Complete name of functions that never returns. When checking for 183 | # inconsistent-return-statements if a never returning function is called then 184 | # it will be considered as an explicit return statement and no message will be 185 | # printed. 186 | never-returning-functions=sys.exit 187 | 188 | 189 | [FORMAT] 190 | 191 | # Expected format of line ending, e.g. empty (any line ending), LF or CRLF. 192 | expected-line-ending-format= 193 | 194 | # Regexp for a line that is allowed to be longer than the limit. 195 | ignore-long-lines=^\s*(# )??$ 196 | 197 | # Number of spaces of indent required inside a hanging or continued line. 198 | indent-after-paren=4 199 | 200 | # String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 201 | # tab). 202 | indent-string=' ' 203 | 204 | # Maximum number of characters on a single line. 205 | max-line-length=100 206 | 207 | # Maximum number of lines in a module. 208 | max-module-lines=1000 209 | 210 | # List of optional constructs for which whitespace checking is disabled. `dict- 211 | # separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}. 212 | # `trailing-comma` allows a space between comma and closing bracket: (a, ). 213 | # `empty-line` allows space-only lines. 214 | no-space-check=trailing-comma, 215 | dict-separator 216 | 217 | # Allow the body of a class to be on the same line as the declaration if body 218 | # contains single statement. 219 | single-line-class-stmt=no 220 | 221 | # Allow the body of an if to be on the same line as the test if there is no 222 | # else. 223 | single-line-if-stmt=no 224 | 225 | 226 | [TYPECHECK] 227 | 228 | # List of decorators that produce context managers, such as 229 | # contextlib.contextmanager. Add to this list to register other decorators that 230 | # produce valid context managers. 231 | contextmanager-decorators=contextlib.contextmanager 232 | 233 | # List of members which are set dynamically and missed by pylint inference 234 | # system, and so shouldn't trigger E1101 when accessed. Python regular 235 | # expressions are accepted. 236 | generated-members= 237 | 238 | # Tells whether missing members accessed in mixin class should be ignored. A 239 | # mixin class is detected if its name ends with "mixin" (case insensitive). 240 | ignore-mixin-members=yes 241 | 242 | # Tells whether to warn about missing members when the owner of the attribute 243 | # is inferred to be None. 244 | ignore-none=yes 245 | 246 | # This flag controls whether pylint should warn about no-member and similar 247 | # checks whenever an opaque object is returned when inferring. The inference 248 | # can return multiple potential results while evaluating a Python object, but 249 | # some branches might not be evaluated, which results in partial inference. In 250 | # that case, it might be useful to still emit no-member and other checks for 251 | # the rest of the inferred objects. 252 | ignore-on-opaque-inference=yes 253 | 254 | # List of class names for which member attributes should not be checked (useful 255 | # for classes with dynamically set attributes). This supports the use of 256 | # qualified names. 257 | ignored-classes=optparse.Values,thread._local,_thread._local 258 | 259 | # List of module names for which member attributes should not be checked 260 | # (useful for modules/projects where namespaces are manipulated during runtime 261 | # and thus existing member attributes cannot be deduced by static analysis. It 262 | # supports qualified module names, as well as Unix pattern matching. 263 | ignored-modules= 264 | 265 | # Show a hint with possible names when a member name was not found. The aspect 266 | # of finding the hint is based on edit distance. 267 | missing-member-hint=yes 268 | 269 | # The minimum edit distance a name should have in order to be considered a 270 | # similar match for a missing member name. 271 | missing-member-hint-distance=1 272 | 273 | # The total number of similar names that should be taken in consideration when 274 | # showing a hint for a missing member. 275 | missing-member-max-choices=1 276 | 277 | 278 | [SIMILARITIES] 279 | 280 | # Ignore comments when computing similarities. 281 | ignore-comments=yes 282 | 283 | # Ignore docstrings when computing similarities. 284 | ignore-docstrings=yes 285 | 286 | # Ignore imports when computing similarities. 287 | ignore-imports=yes 288 | 289 | # Minimum lines number of a similarity. 290 | min-similarity-lines=10 291 | 292 | 293 | [VARIABLES] 294 | 295 | # List of additional names supposed to be defined in builtins. Remember that 296 | # you should avoid defining new builtins when possible. 297 | additional-builtins= 298 | 299 | # Tells whether unused global variables should be treated as a violation. 300 | allow-global-unused-variables=yes 301 | 302 | # List of strings which can identify a callback function by name. A callback 303 | # name must start or end with one of those strings. 304 | callbacks=cb_, 305 | _cb 306 | 307 | # A regular expression matching the name of dummy variables (i.e. expected to 308 | # not be used). 309 | dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ 310 | 311 | # Argument names that match this expression will be ignored. Default to name 312 | # with leading underscore. 313 | ignored-argument-names=_.*|^ignored_|^unused_ 314 | 315 | # Tells whether we should check for unused import in __init__ files. 316 | init-import=no 317 | 318 | # List of qualified module names which can have objects that can redefine 319 | # builtins. 320 | redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io 321 | 322 | 323 | [STRING] 324 | 325 | # This flag controls whether the implicit-str-concat-in-sequence should 326 | # generate a warning on implicit string concatenation in sequences defined over 327 | # several lines. 328 | check-str-concat-over-line-jumps=no 329 | 330 | 331 | [LOGGING] 332 | 333 | # Format style used to check logging format string. `old` means using % 334 | # formatting, while `new` is for `{}` formatting. 335 | logging-format-style=old 336 | 337 | # Logging modules to check that the string format arguments are in logging 338 | # function parameter format. 339 | logging-modules=logging 340 | 341 | 342 | [BASIC] 343 | 344 | # Naming style matching correct argument names. 345 | argument-naming-style=snake_case 346 | 347 | # Regular expression matching correct argument names. Overrides argument- 348 | # naming-style. 349 | #argument-rgx= 350 | 351 | # Naming style matching correct attribute names. 352 | attr-naming-style=snake_case 353 | 354 | # Regular expression matching correct attribute names. Overrides attr-naming- 355 | # style. 356 | #attr-rgx= 357 | 358 | # Bad variable names which should always be refused, separated by a comma. 359 | bad-names=foo, 360 | bar, 361 | baz, 362 | toto, 363 | tutu, 364 | tata 365 | 366 | # Naming style matching correct class attribute names. 367 | class-attribute-naming-style=any 368 | 369 | # Regular expression matching correct class attribute names. Overrides class- 370 | # attribute-naming-style. 371 | #class-attribute-rgx= 372 | 373 | # Naming style matching correct class names. 374 | class-naming-style=PascalCase 375 | 376 | # Regular expression matching correct class names. Overrides class-naming- 377 | # style. 378 | #class-rgx= 379 | 380 | # Naming style matching correct constant names. 381 | const-naming-style=UPPER_CASE 382 | 383 | # Regular expression matching correct constant names. Overrides const-naming- 384 | # style. 385 | #const-rgx= 386 | 387 | # Minimum line length for functions/classes that require docstrings, shorter 388 | # ones are exempt. 389 | docstring-min-length=-1 390 | 391 | # Naming style matching correct function names. 392 | function-naming-style=snake_case 393 | 394 | # Regular expression matching correct function names. Overrides function- 395 | # naming-style. 396 | #function-rgx= 397 | 398 | # Good variable names which should always be accepted, separated by a comma. 399 | good-names=i, 400 | j, 401 | k, 402 | ex, 403 | Run, 404 | _ 405 | 406 | # Include a hint for the correct naming format with invalid-name. 407 | include-naming-hint=no 408 | 409 | # Naming style matching correct inline iteration names. 410 | inlinevar-naming-style=any 411 | 412 | # Regular expression matching correct inline iteration names. Overrides 413 | # inlinevar-naming-style. 414 | #inlinevar-rgx= 415 | 416 | # Naming style matching correct method names. 417 | method-naming-style=snake_case 418 | 419 | # Regular expression matching correct method names. Overrides method-naming- 420 | # style. 421 | #method-rgx= 422 | 423 | # Naming style matching correct module names. 424 | module-naming-style=snake_case 425 | 426 | # Regular expression matching correct module names. Overrides module-naming- 427 | # style. 428 | #module-rgx= 429 | 430 | # Colon-delimited sets of names that determine each other's naming style when 431 | # the name regexes allow several styles. 432 | name-group= 433 | 434 | # Regular expression which should only match function or class names that do 435 | # not require a docstring. 436 | no-docstring-rgx=^_ 437 | 438 | # List of decorators that produce properties, such as abc.abstractproperty. Add 439 | # to this list to register other decorators that produce valid properties. 440 | # These decorators are taken in consideration only for invalid-name. 441 | property-classes=abc.abstractproperty 442 | 443 | # Naming style matching correct variable names. 444 | variable-naming-style=snake_case 445 | 446 | # Regular expression matching correct variable names. Overrides variable- 447 | # naming-style. 448 | #variable-rgx= 449 | 450 | 451 | [MISCELLANEOUS] 452 | 453 | # List of note tags to take in consideration, separated by a comma. 454 | notes=FIXME, 455 | XXX, 456 | TODO 457 | 458 | 459 | [SPELLING] 460 | 461 | # Limits count of emitted suggestions for spelling mistakes. 462 | max-spelling-suggestions=4 463 | 464 | # Spelling dictionary name. Available dictionaries: none. To make it working 465 | # install python-enchant package.. 466 | spelling-dict= 467 | 468 | # List of comma separated words that should not be checked. 469 | spelling-ignore-words= 470 | 471 | # A path to a file that contains private dictionary; one word per line. 472 | spelling-private-dict-file= 473 | 474 | # Tells whether to store unknown words to indicated private dictionary in 475 | # --spelling-private-dict-file option instead of raising a message. 476 | spelling-store-unknown-words=no 477 | 478 | 479 | [IMPORTS] 480 | 481 | # Allow wildcard imports from modules that define __all__. 482 | allow-wildcard-with-all=no 483 | 484 | # Analyse import fallback blocks. This can be used to support both Python 2 and 485 | # 3 compatible code, which means that the block might have code that exists 486 | # only in one or another interpreter, leading to false positives when analysed. 487 | analyse-fallback-blocks=no 488 | 489 | # Deprecated modules which should not be used, separated by a comma. 490 | deprecated-modules=optparse,tkinter.tix 491 | 492 | # Create a graph of external dependencies in the given file (report RP0402 must 493 | # not be disabled). 494 | ext-import-graph= 495 | 496 | # Create a graph of every (i.e. internal and external) dependencies in the 497 | # given file (report RP0402 must not be disabled). 498 | import-graph= 499 | 500 | # Create a graph of internal dependencies in the given file (report RP0402 must 501 | # not be disabled). 502 | int-import-graph= 503 | 504 | # Force import order to recognize a module as part of the standard 505 | # compatibility libraries. 506 | known-standard-library= 507 | 508 | # Force import order to recognize a module as part of a third party library. 509 | known-third-party=enchant 510 | 511 | 512 | [DESIGN] 513 | 514 | # Maximum number of arguments for function / method. 515 | max-args=5 516 | 517 | # Maximum number of attributes for a class (see R0902). 518 | max-attributes=7 519 | 520 | # Maximum number of boolean expressions in an if statement. 521 | max-bool-expr=5 522 | 523 | # Maximum number of branch for function / method body. 524 | max-branches=12 525 | 526 | # Maximum number of locals for function / method body. 527 | max-locals=15 528 | 529 | # Maximum number of parents for a class (see R0901). 530 | max-parents=7 531 | 532 | # Maximum number of public methods for a class (see R0904). 533 | max-public-methods=20 534 | 535 | # Maximum number of return / yield for function / method body. 536 | max-returns=6 537 | 538 | # Maximum number of statements in function / method body. 539 | max-statements=50 540 | 541 | # Minimum number of public methods for a class (see R0903). 542 | min-public-methods=2 543 | 544 | 545 | [CLASSES] 546 | 547 | # List of method names used to declare (i.e. assign) instance attributes. 548 | defining-attr-methods=__init__, 549 | __new__, 550 | setUp 551 | 552 | # List of member names, which should be excluded from the protected access 553 | # warning. 554 | exclude-protected=_asdict, 555 | _fields, 556 | _replace, 557 | _source, 558 | _make 559 | 560 | # List of valid names for the first argument in a class method. 561 | valid-classmethod-first-arg=cls 562 | 563 | # List of valid names for the first argument in a metaclass class method. 564 | valid-metaclass-classmethod-first-arg=cls 565 | 566 | 567 | [EXCEPTIONS] 568 | 569 | # Exceptions that will emit a warning when being caught. Defaults to 570 | # "BaseException, Exception". 571 | overgeneral-exceptions=BaseException, 572 | Exception 573 | --------------------------------------------------------------------------------