├── .dockerignore
├── .github
├── dependabot.yml
└── workflows
│ └── python-publish.yml
├── .gitignore
├── .readthedocs.yml
├── .vscode
└── settings.json
├── Dockerfile-docs
├── LICENSE
├── MANIFEST.in
├── Makefile
├── README.md
├── docs-requirements.txt
├── docs
├── _static
│ └── custom.css
├── _templates
│ └── page.html
├── conf.py
└── index.rst
├── ipsw
├── __init__.py
├── api
│ ├── __init__.py
│ ├── client.py
│ ├── daemon.py
│ ├── dsc.py
│ ├── info.py
│ └── macho.py
├── client.py
├── constants.py
├── errors.py
├── models
│ ├── __init__.py
│ ├── configs.py
│ ├── dsc.py
│ ├── info.py
│ ├── macho.py
│ └── resource.py
├── transport
│ ├── __init__.py
│ ├── basehttpadapter.py
│ ├── npipeconn.py
│ ├── npipesocket.py
│ ├── sshconn.py
│ └── unixconn.py
├── types
│ ├── __init__.py
│ └── daemon.py
├── utils
│ ├── __init__.py
│ ├── config.py
│ ├── decorators.py
│ ├── json_stream.py
│ ├── proxy.py
│ ├── socket.py
│ └── utils.py
└── version.py
├── pyproject.toml
├── requirements.txt
├── scripts
└── release.sh
├── setup.cfg
├── setup.py
├── test-requirements.txt
└── tox.ini
/.dockerignore:
--------------------------------------------------------------------------------
1 | .git/
2 |
3 | build
4 | dist
5 | *.egg-info
6 | *.egg/
7 | *.pyc
8 | *.swp
9 |
10 | .tox
11 | .coverage
12 | html/*
13 | __pycache__
14 |
15 | # Compiled Documentation
16 | docs/_build
17 |
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 | updates:
3 | - package-ecosystem: "pip"
4 | directory: "/"
5 | schedule:
6 | interval: "weekly"
7 | labels:
8 | - "dependencies"
9 | commit-message:
10 | prefix: "chore"
11 | include: "scope"
12 | - package-ecosystem: "github-actions"
13 | directory: "/"
14 | schedule:
15 | interval: "weekly"
16 | labels:
17 | - "dependencies"
18 | commit-message:
19 | prefix: "chore"
20 | include: "scope"
21 | - package-ecosystem: "docker"
22 | directory: "/"
23 | schedule:
24 | interval: "weekly"
25 | labels:
26 | - "dependencies"
27 | commit-message:
28 | prefix: "chore"
29 | include: "scope"
30 |
--------------------------------------------------------------------------------
/.github/workflows/python-publish.yml:
--------------------------------------------------------------------------------
1 | # This workflow will upload a Python Package using Twine when a release is created
2 | # For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python#publishing-to-package-registries
3 |
4 | # This workflow uses actions that are not certified by GitHub.
5 | # They are provided by a third-party and are governed by
6 | # separate terms of service, privacy policy, and support
7 | # documentation.
8 |
9 | name: Upload PyPi
10 |
11 | on:
12 | release:
13 | types: [published]
14 |
15 | permissions:
16 | contents: read
17 |
18 | jobs:
19 | deploy:
20 |
21 | runs-on: ubuntu-latest
22 |
23 | steps:
24 | - uses: actions/checkout@v3
25 | - name: Set up Python
26 | uses: actions/setup-python@v4
27 | with:
28 | python-version: '3.x'
29 | - name: Install dependencies
30 | run: |
31 | python -m pip install --upgrade pip
32 | pip install build
33 | - name: Build package
34 | run: python -m build
35 | env:
36 | SETUPTOOLS_SCM_PRETEND_VERSION_FOR_IPSW: ${{ inputs.tag }}
37 | - name: Publish package
38 | uses: pypa/gh-action-pypi-publish@a56da0b891b3dc519c7ee3284aff1fad93cc8598
39 | with:
40 | user: __token__
41 | password: ${{ secrets.PYPI_API_TOKEN }}
42 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | pip-wheel-metadata/
24 | share/python-wheels/
25 | *.egg-info/
26 | .installed.cfg
27 | *.egg
28 | MANIFEST
29 |
30 | # PyInstaller
31 | # Usually these files are written by a python script from a template
32 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
33 | *.manifest
34 | *.spec
35 |
36 | # Installer logs
37 | pip-log.txt
38 | pip-delete-this-directory.txt
39 |
40 | # Unit test / coverage reports
41 | htmlcov/
42 | .tox/
43 | .nox/
44 | .coverage
45 | .coverage.*
46 | .cache
47 | nosetests.xml
48 | coverage.xml
49 | *.cover
50 | *.py,cover
51 | .hypothesis/
52 | .pytest_cache/
53 |
54 | # Translations
55 | *.mo
56 | *.pot
57 |
58 | # Django stuff:
59 | *.log
60 | local_settings.py
61 | db.sqlite3
62 | db.sqlite3-journal
63 |
64 | # Flask stuff:
65 | instance/
66 | .webassets-cache
67 |
68 | # Scrapy stuff:
69 | .scrapy
70 |
71 | # Sphinx documentation
72 | docs/_build/
73 |
74 | # PyBuilder
75 | target/
76 |
77 | # Jupyter Notebook
78 | .ipynb_checkpoints
79 |
80 | # IPython
81 | profile_default/
82 | ipython_config.py
83 |
84 | # pyenv
85 | .python-version
86 |
87 | # pipenv
88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
91 | # install all needed dependencies.
92 | #Pipfile.lock
93 |
94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
95 | __pypackages__/
96 |
97 | # Celery stuff
98 | celerybeat-schedule
99 | celerybeat.pid
100 |
101 | # SageMath parsed files
102 | *.sage.py
103 |
104 | # Environments
105 | .env
106 | .venv
107 | env/
108 | venv/
109 | ENV/
110 | env.bak/
111 | venv.bak/
112 |
113 | # Spyder project settings
114 | .spyderproject
115 | .spyproject
116 |
117 | # Rope project settings
118 | .ropeproject
119 |
120 | # mkdocs documentation
121 | /site
122 |
123 | # mypy
124 | .mypy_cache/
125 | .dmypy.json
126 | dmypy.json
127 |
128 | # Pyre type checker
129 | .pyre/
130 |
131 | # setuptools_scm
132 | _version.py
133 |
--------------------------------------------------------------------------------
/.readthedocs.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 |
3 | sphinx:
4 | configuration: docs/conf.py
5 |
6 | build:
7 | os: ubuntu-20.04
8 | tools:
9 | python: '3.10'
10 |
11 | python:
12 | install:
13 | - requirements: docs-requirements.txt
14 | - method: pip
15 | path: .
16 | extra_requirements:
17 | - ssh
18 |
--------------------------------------------------------------------------------
/.vscode/settings.json:
--------------------------------------------------------------------------------
1 | {
2 | "python.formatting.provider": "black"
3 | }
--------------------------------------------------------------------------------
/Dockerfile-docs:
--------------------------------------------------------------------------------
1 | # syntax=docker/dockerfile:1
2 |
3 | ARG PYTHON_VERSION=3.10
4 |
5 | FROM python:${PYTHON_VERSION}
6 |
7 | ARG uid=1000
8 | ARG gid=1000
9 |
10 | RUN addgroup --gid $gid sphinx \
11 | && useradd --uid $uid --gid $gid -M sphinx
12 |
13 | WORKDIR /src
14 | COPY requirements.txt docs-requirements.txt ./
15 | RUN pip install --no-cache-dir -r requirements.txt -r docs-requirements.txt
16 |
17 | USER sphinx
18 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2023 blacktop
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include test-requirements.txt
2 | include requirements.txt
3 | include README.md
4 | include README.rst
5 | include LICENSE
6 | recursive-include tests *.py
7 | recursive-include tests/unit/testdata *
8 | recursive-include tests/integration/testdata *
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | .PHONY: all
2 | all: docs
3 |
4 | .PHONY: clean
5 | clean:
6 | find -name "__pycache__" | xargs rm -rf
7 |
8 | .PHONY: build-docs
9 | build-docs:
10 | docker build -t docker-sdk-python-docs -f Dockerfile-docs $(uid_args) .
11 |
12 | .PHONY: docs
13 | docs: build-docs
14 | docker run --rm -t -v `pwd`:/src docker-sdk-python-docs sphinx-build docs docs/_build
15 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
ipsw-py
3 | ipsw
SDK for Python 🚧
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 | ## `NOTE:` This is a work in progress ⚠️
17 |
18 | ## Getting Started
19 |
20 | Start the `ipsw` daemon:
21 |
22 | ### macOS
23 |
24 | ```bash
25 | brew install blacktop/tap/ipswd
26 | brew services start blacktop/tap/ipswd
27 | ```
28 |
29 | ### Linux
30 |
31 | > ⚠️ UNTESTED ⚠️
32 |
33 | ```bash
34 | sudo snap install ipswd
35 | ```
36 |
37 | ### Docker
38 |
39 | ```bash
40 | docker run -d -p 3993:3993 -v `pwd`:/data blacktop/ipswd start
41 | ```
42 |
43 | ## Installing
44 |
45 | The latest stable version is [available on PyPI](https://pypi.org/project/ipsw/). Either add `ipsw` to your `requirements.txt` file or install with **pip**:
46 |
47 | ```bash
48 | pip install ipsw
49 | ```
50 |
51 | ## Geting Started
52 |
53 | Get IPSW info
54 |
55 | ```python
56 | import ipsw
57 |
58 | client = ipsw.IpswClient(base_url='tcp://127.0.0.1:3993')
59 |
60 | info = client.info.get("iPhone15,2_16.5_20F5028e_Restore.ipsw")
61 | print(f'{info.version} ({info.build})')
62 | for device in info.devices:
63 | print(f'- {device}')
64 | ```
65 | ```bash
66 | 16.5 (20F5028e)
67 | - iPhone 14 Pro
68 | ```
69 |
70 | Get DSC info
71 |
72 | ```python
73 | import ipsw
74 |
75 | client = ipsw.IpswClient(base_url='tcp://127.0.0.1:3993')
76 |
77 | dsc = client.dsc.open("20F5028e__iPhone15,2/dyld_shared_cache_arm64e")
78 | print(dsc)
79 | print(dsc.dylibs[0])
80 | ```
81 | ```python
82 |
83 | ```
84 | ```json
85 | {
86 | "index": 1,
87 | "name": "/usr/lib/libobjc.A.dylib",
88 | "version": "876.0.0.0.0",
89 | "uuid": "085A190C-6214-38EA-ACCB-428C3E8AFA65",
90 | "load_address": 6443204608
91 | }
92 | ```
93 |
94 | Get dylib inside DSC info
95 |
96 | ```py
97 | libswiftCore = dsc.dylib("libswiftCore.dylib")
98 | print(libswiftCore)
99 | ```
100 | ```python
101 |
102 | ```
103 |
104 | Get DSC symbol addresses
105 |
106 | ```python
107 | syms = dsc.sym_addrs([{'pattern': '.*zero.*', 'image': 'libsystem_c.dylib'}])
108 | print(syms)
109 | ```
110 |
111 | Convert between DSC offsets and addresses
112 |
113 | ```python
114 | off = dsc.a2o(7624591060)
115 | adr = dsc.o2a(61146836)
116 | ```
117 |
118 | Lookup DSC symbol by address
119 |
120 | ```python
121 | print(next(dsc.a2s([7624591060])))
122 | ```
123 | ```json
124 | {
125 | "address": 7624591060,
126 | "symbol": "__exit",
127 | "demanged": "__exit",
128 | "mapping": "__TEXT",
129 | "uuid": "3AB55994-1201-3908-BE27-52BB7EFA7573",
130 | "ext": ".21",
131 | "image": "/usr/lib/system/libsystem_kernel.dylib",
132 | "section": "__text",
133 | "segment": "__TEXT"
134 | }
135 | ```
136 |
137 |
138 | Get MachO info
139 |
140 | ```python
141 | import ipsw
142 |
143 | client = ipsw.IpswClient(base_url='tcp://127.0.0.1:3993')
144 |
145 | macho = client.macho.open("/bin/ls", arch="arm64e")
146 | print(macho)
147 | ```
148 | ```bash
149 |
150 | ```
151 |
152 | ## Community
153 |
154 | You have questions, need support and or just want to talk about `ipsw-py`?
155 |
156 | Here are ways to get in touch with the `ipsw-py` community:
157 |
158 | [](https://discord.gg/xx2y9yrcgs)
159 | [](https://twitter.com/blacktop__)
160 | [](https://mastodon.social/@blacktop)
161 | [](https://github.com/blacktop/ipsw/discussions)
162 |
163 | ## License
164 |
165 | MIT Copyright (c) 2023 **blacktop**
166 |
--------------------------------------------------------------------------------
/docs-requirements.txt:
--------------------------------------------------------------------------------
1 | myst-parser==1.0.0
2 | Sphinx==7.0.1
3 |
--------------------------------------------------------------------------------
/docs/_static/custom.css:
--------------------------------------------------------------------------------
1 | dl.hide-signature > dt {
2 | display: none;
3 | }
4 |
5 | dl.field-list > dt {
6 | /* prevent code blocks from forcing wrapping on the "Parameters" header */
7 | word-break: initial;
8 | }
9 |
10 | code.literal{
11 | hyphens: none;
12 | }
13 |
--------------------------------------------------------------------------------
/docs/_templates/page.html:
--------------------------------------------------------------------------------
1 | {% extends "!page.html" %}
2 | {% set css_files = css_files + ["_static/custom.css"] %}
3 |
--------------------------------------------------------------------------------
/docs/conf.py:
--------------------------------------------------------------------------------
1 | #
2 | # ipsw-sdk-python documentation build configuration file, created by
3 | # sphinx-quickstart on Wed Sep 14 15:48:58 2016.
4 | #
5 | # This file is execfile()d with the current directory set to its
6 | # containing dir.
7 | #
8 | # Note that not all possible configuration values are present in this
9 | # autogenerated file.
10 | #
11 | # All configuration values have a default; values that are commented out
12 | # serve to show the default.
13 |
14 | # If extensions (or modules to document with autodoc) are in another directory,
15 | # add these directories to sys.path here. If the directory is relative to the
16 | # documentation root, use os.path.abspath to make it absolute, like shown here.
17 | #
18 | import datetime
19 | import os
20 | import sys
21 |
22 | sys.path.insert(0, os.path.abspath(".."))
23 |
24 |
25 | # -- General configuration ------------------------------------------------
26 |
27 | # If your documentation needs a minimal Sphinx version, state it here.
28 | #
29 | # needs_sphinx = '1.0'
30 |
31 | # Add any Sphinx extension module names here, as strings. They can be
32 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
33 | # ones.
34 | extensions = ["sphinx.ext.autodoc", "sphinx.ext.napoleon", "myst_parser"]
35 |
36 | # Add any paths that contain templates here, relative to this directory.
37 | templates_path = ["_templates"]
38 |
39 |
40 | source_suffix = {
41 | ".rst": "restructuredtext",
42 | ".txt": "markdown",
43 | ".md": "markdown",
44 | }
45 |
46 | # The encoding of source files.
47 | #
48 | # source_encoding = 'utf-8-sig'
49 |
50 | # The master toctree document.
51 | master_doc = "index"
52 |
53 | # General information about the project.
54 | project = "ipsw SDK for Python"
55 | year = datetime.datetime.now().year
56 | copyright = "%d Blacktop" % year
57 | author = "Blacktop"
58 |
59 | # The version info for the project you're documenting, acts as replacement for
60 | # |version| and |release|, also used in various other places throughout the
61 | # built documents.
62 | #
63 | # see https://github.com/pypa/setuptools_scm#usage-from-sphinx
64 | from importlib.metadata import version
65 |
66 | release = version("ipsw")
67 | # for example take major/minor
68 | version = ".".join(release.split(".")[:2])
69 |
70 | # The language for content autogenerated by Sphinx. Refer to documentation
71 | # for a list of supported languages.
72 | #
73 | # This is also used if you do content translation via gettext catalogs.
74 | # Usually you set "language" from the command line for these cases.
75 | language = "en"
76 |
77 | # There are two options for replacing |today|: either, you set today to some
78 | # non-false value, then it is used:
79 | #
80 | # today = ''
81 | #
82 | # Else, today_fmt is used as the format for a strftime call.
83 | #
84 | # today_fmt = '%B %d, %Y'
85 |
86 | # List of patterns, relative to source directory, that match files and
87 | # directories to ignore when looking for source files.
88 | # This patterns also effect to html_static_path and html_extra_path
89 | exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
90 |
91 | # The reST default role (used for this markup: `text`) to use for all
92 | # documents.
93 | #
94 | # default_role = None
95 |
96 | # If true, '()' will be appended to :func: etc. cross-reference text.
97 | #
98 | # add_function_parentheses = True
99 |
100 | # If true, the current module name will be prepended to all description
101 | # unit titles (such as .. function::).
102 | #
103 | add_module_names = False
104 |
105 | # If true, sectionauthor and moduleauthor directives will be shown in the
106 | # output. They are ignored by default.
107 | #
108 | # show_authors = False
109 |
110 | # The name of the Pygments (syntax highlighting) style to use.
111 | pygments_style = "sphinx"
112 |
113 | # A list of ignored prefixes for module index sorting.
114 | # modindex_common_prefix = []
115 |
116 | # If true, keep warnings as "system message" paragraphs in the built documents.
117 | # keep_warnings = False
118 |
119 | # If true, `todo` and `todoList` produce output, else they produce nothing.
120 | todo_include_todos = False
121 |
122 |
123 | # -- Options for HTML output ----------------------------------------------
124 |
125 | # The theme to use for HTML and HTML Help pages. See the documentation for
126 | # a list of builtin themes.
127 | #
128 | html_theme = "alabaster"
129 |
130 | # Theme options are theme-specific and customize the look and feel of a theme
131 | # further. For a list of options available for each theme, see the
132 | # documentation.
133 | #
134 | html_theme_options = {
135 | "description": "A Python library for the ipsw API",
136 | "fixed_sidebar": True,
137 | }
138 |
139 | # Add any paths that contain custom themes here, relative to this directory.
140 | # html_theme_path = []
141 |
142 | # The name for this set of Sphinx documents.
143 | # " v documentation" by default.
144 | #
145 | # html_title = u'ipsw-sdk-python v2.0'
146 |
147 | # A shorter title for the navigation bar. Default is the same as html_title.
148 | #
149 | # html_short_title = None
150 |
151 | # The name of an image file (relative to this directory) to place at the top
152 | # of the sidebar.
153 | #
154 | # html_logo = None
155 |
156 | # The name of an image file (relative to this directory) to use as a favicon of
157 | # the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
158 | # pixels large.
159 | #
160 | # html_favicon = None
161 |
162 | # Add any paths that contain custom static files (such as style sheets) here,
163 | # relative to this directory. They are copied after the builtin static files,
164 | # so a file named "default.css" will overwrite the builtin "default.css".
165 | html_static_path = ["_static"]
166 |
167 | # Add any extra paths that contain custom files (such as robots.txt or
168 | # .htaccess) here, relative to this directory. These files are copied
169 | # directly to the root of the documentation.
170 | #
171 | # html_extra_path = []
172 |
173 | # If not None, a 'Last updated on:' timestamp is inserted at every page
174 | # bottom, using the given strftime format.
175 | # The empty string is equivalent to '%b %d, %Y'.
176 | #
177 | # html_last_updated_fmt = None
178 |
179 | # If true, SmartyPants will be used to convert quotes and dashes to
180 | # typographically correct entities.
181 | #
182 | # html_use_smartypants = True
183 |
184 | # Custom sidebar templates, maps document names to template names.
185 | #
186 | html_sidebars = {
187 | "**": [
188 | "about.html",
189 | "navigation.html",
190 | "searchbox.html",
191 | ]
192 | }
193 |
194 | # Additional templates that should be rendered to pages, maps page names to
195 | # template names.
196 | #
197 | # html_additional_pages = {}
198 |
199 | # If false, no module index is generated.
200 | #
201 | # html_domain_indices = True
202 |
203 | # If false, no index is generated.
204 | #
205 | # html_use_index = True
206 |
207 | # If true, the index is split into individual pages for each letter.
208 | #
209 | # html_split_index = False
210 |
211 | # If true, links to the reST sources are added to the pages.
212 | #
213 | # html_show_sourcelink = True
214 |
215 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
216 | #
217 | # html_show_sphinx = True
218 |
219 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
220 | #
221 | # html_show_copyright = True
222 |
223 | # If true, an OpenSearch description file will be output, and all pages will
224 | # contain a tag referring to it. The value of this option must be the
225 | # base URL from which the finished HTML is served.
226 | #
227 | # html_use_opensearch = ''
228 |
229 | # This is the file name suffix for HTML files (e.g. ".xhtml").
230 | # html_file_suffix = None
231 |
232 | # Language to be used for generating the HTML full-text search index.
233 | # Sphinx supports the following languages:
234 | # 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
235 | # 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh'
236 | #
237 | # html_search_language = 'en'
238 |
239 | # A dictionary with options for the search language support, empty by default.
240 | # 'ja' uses this config value.
241 | # 'zh' user can custom change `jieba` dictionary path.
242 | #
243 | # html_search_options = {'type': 'default'}
244 |
245 | # The name of a javascript file (relative to the configuration directory) that
246 | # implements a search results scorer. If empty, the default will be used.
247 | #
248 | # html_search_scorer = 'scorer.js'
249 |
250 | # Output file base name for HTML help builder.
251 | htmlhelp_basename = "ipsw-sdk-pythondoc"
252 |
253 | # -- Options for LaTeX output ---------------------------------------------
254 |
255 | latex_elements = {
256 | # The paper size ('letterpaper' or 'a4paper').
257 | #
258 | # 'papersize': 'letterpaper',
259 | # The font size ('10pt', '11pt' or '12pt').
260 | #
261 | # 'pointsize': '10pt',
262 | # Additional stuff for the LaTeX preamble.
263 | #
264 | # 'preamble': '',
265 | # Latex figure (float) alignment
266 | #
267 | # 'figure_align': 'htbp',
268 | }
269 |
270 | # Grouping the document tree into LaTeX files. List of tuples
271 | # (source start file, target name, title,
272 | # author, documentclass [howto, manual, or own class]).
273 | latex_documents = [
274 | (master_doc, "ipsw-sdk-python.tex", "ipsw-sdk-python Documentation", "Blacktop.", "manual"),
275 | ]
276 |
277 | # The name of an image file (relative to this directory) to place at the top of
278 | # the title page.
279 | #
280 | # latex_logo = None
281 |
282 | # For "manual" documents, if this is true, then toplevel headings are parts,
283 | # not chapters.
284 | #
285 | # latex_use_parts = False
286 |
287 | # If true, show page references after internal links.
288 | #
289 | # latex_show_pagerefs = False
290 |
291 | # If true, show URL addresses after external links.
292 | #
293 | # latex_show_urls = False
294 |
295 | # Documents to append as an appendix to all manuals.
296 | #
297 | # latex_appendices = []
298 |
299 | # It false, will not define \strong, \code, itleref, \crossref ... but only
300 | # \sphinxstrong, ..., \sphinxtitleref, ... To help avoid clash with user added
301 | # packages.
302 | #
303 | # latex_keep_old_macro_names = True
304 |
305 | # If false, no module index is generated.
306 | #
307 | # latex_domain_indices = True
308 |
309 |
310 | # -- Options for manual page output ---------------------------------------
311 |
312 | # One entry per manual page. List of tuples
313 | # (source start file, name, description, authors, manual section).
314 | man_pages = [(master_doc, "ipsw-sdk-python", "ipsw-sdk-python Documentation", [author], 1)]
315 |
316 | # If true, show URL addresses after external links.
317 | #
318 | # man_show_urls = False
319 |
320 |
321 | # -- Options for Texinfo output -------------------------------------------
322 |
323 | # Grouping the document tree into Texinfo files. List of tuples
324 | # (source start file, target name, title, author,
325 | # dir menu entry, description, category)
326 | texinfo_documents = [
327 | (
328 | master_doc,
329 | "ipsw-sdk-python",
330 | "ipsw-sdk-python Documentation",
331 | author,
332 | "ipsw-sdk-python",
333 | "One line description of project.",
334 | "Miscellaneous",
335 | ),
336 | ]
337 |
338 | # Documents to append as an appendix to all manuals.
339 | #
340 | # texinfo_appendices = []
341 |
342 | # If false, no module index is generated.
343 | #
344 | # texinfo_domain_indices = True
345 |
346 | # How to display URL addresses: 'footnote', 'no', or 'inline'.
347 | #
348 | # texinfo_show_urls = 'footnote'
349 |
350 | # If true, do not generate a @detailmenu in the "Top" node's menu.
351 | #
352 | # texinfo_no_detailmenu = False
353 |
354 |
355 | # Napoleon settings
356 | napoleon_google_docstring = True
357 | napoleon_numpy_docstring = False
358 |
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 | ipsw SDK for Python
2 | ===================
3 |
4 | A Python library for the ipsw API. It lets you do anything the ``ipsw`` command does, but from within Python apps – download, parse, explore IPSWs, etc.
5 |
6 | For more information about the API, `see its documentation `_. Either add ``ipsw`` to your ``requirements.txt`` file or install with pip::
12 |
13 | pip install ipsw
14 |
15 | Getting started
16 | ---------------
17 |
18 | To talk to a ipsw daemon, you first need to instantiate a client. You can use :py:func:`~ipsw.client.from_env` to connect using the default socket or the configuration in your environment:
19 |
20 | .. code-block:: python
21 |
22 | import ipsw
23 | client = ipsw.from_env()
24 |
25 | You can now run containers:
26 |
27 | .. code-block:: python
28 |
29 | >>> client.ipsw_info("iPhone15,2_16.4_20E246_Restore.ipsw")
30 | 'INFO\n'
31 |
32 |
33 | That's just a taste of what you can do with the ipsw SDK for Python. For more, :doc:`take a look at the reference `.
34 |
35 | .. toctree::
36 | :hidden:
37 | :maxdepth: 2
38 |
39 | client
40 | api
41 |
--------------------------------------------------------------------------------
/ipsw/__init__.py:
--------------------------------------------------------------------------------
1 | # flake8: noqa
2 | from .api import APIClient
3 | from .client import IpswClient
4 | from .version import __version__
5 |
6 | __title__ = "ipsw"
7 |
--------------------------------------------------------------------------------
/ipsw/api/__init__.py:
--------------------------------------------------------------------------------
1 | # flake8: noqa
2 | from .client import APIClient
3 |
--------------------------------------------------------------------------------
/ipsw/api/client.py:
--------------------------------------------------------------------------------
1 | import json
2 | import struct
3 | import urllib
4 | from functools import partial
5 |
6 | import requests
7 | import requests.exceptions
8 | import websocket
9 |
10 | from ..constants import (
11 | DEFAULT_MAX_POOL_SIZE,
12 | DEFAULT_NUM_POOLS,
13 | DEFAULT_NUM_POOLS_SSH,
14 | DEFAULT_TIMEOUT_SECONDS,
15 | DEFAULT_USER_AGENT,
16 | IS_WINDOWS_PLATFORM,
17 | MINIMUM_IPSW_API_VERSION,
18 | STREAM_HEADER_SIZE_BYTES,
19 | )
20 | from ..errors import InvalidVersion, IpswException, create_api_error_from_http_exception
21 | from ..transport import UnixHTTPAdapter
22 | from ..utils import config, update_headers, utils
23 | from ..utils.json_stream import json_stream
24 | from ..utils.proxy import ProxyConfig
25 | from ..utils.socket import consume_socket_output, demux_adaptor, frames_iter
26 | from .daemon import DaemonApiMixin
27 | from .dsc import DscApiMixin
28 | from .info import InfoApiMixin
29 | from .macho import MachoApiMixin
30 |
31 | try:
32 | from ..transport import NpipeHTTPAdapter
33 | except ImportError:
34 | pass
35 |
36 | try:
37 | from ..transport import SSHHTTPAdapter
38 | except ImportError:
39 | pass
40 |
41 |
42 | class APIClient(requests.Session, DaemonApiMixin, DscApiMixin, InfoApiMixin, MachoApiMixin):
43 | """
44 | A low-level client for the ipsw API.
45 |
46 | Example:
47 |
48 | >>> import ipsw
49 | >>> client = ipsw.APIClient(base_url='unix://var/run/ipsw.sock')
50 | >>> client.version()
51 | {u'ApiVersion': u'1.0',
52 | u'Arch': u'amd64',
53 | u'BuildTime': u'2017-11-19T18:46:37.000000000+00:00',
54 | u'GitCommit': u'f4ffd2511c',
55 | u'GoVersion': u'go1.9.2',
56 | u'MinAPIVersion': u'1.0',
57 | u'Os': u'linux'}
58 |
59 | Args:
60 | base_url (str): URL to the ipsw server. For example,
61 | ``unix:///var/run/ipsw.sock`` or ``tcp://127.0.0.1:1234``.
62 | version (str): The version of the API to use. Set to ``auto`` to
63 | automatically detect the server's version. Default: ``1.35``
64 | timeout (int): Default timeout for API calls, in seconds.
65 | user_agent (str): Set a custom user agent for requests to the server.
66 | use_ssh_client (bool): If set to `True`, an ssh connection is made
67 | via shelling out to the ssh client. Ensure the ssh client is
68 | installed and configured on the host.
69 | max_pool_size (int): The maximum number of connections
70 | to save in the pool.
71 | """
72 |
73 | __attrs__ = requests.Session.__attrs__ + ["_general_configs", "_version", "base_url", "timeout"]
74 |
75 | def __init__(
76 | self,
77 | base_url=None,
78 | version=None,
79 | timeout=DEFAULT_TIMEOUT_SECONDS,
80 | user_agent=DEFAULT_USER_AGENT,
81 | num_pools=None,
82 | use_ssh_client=False,
83 | max_pool_size=DEFAULT_MAX_POOL_SIZE,
84 | ):
85 | super().__init__()
86 |
87 | self.base_url = base_url
88 | self.timeout = timeout
89 | self.headers["User-Agent"] = user_agent
90 |
91 | self._general_configs = config.load_general_config()
92 |
93 | proxy_config = self._general_configs.get("proxies", {})
94 | try:
95 | proxies = proxy_config[base_url]
96 | except KeyError:
97 | proxies = proxy_config.get("default", {})
98 |
99 | self._proxy_configs = ProxyConfig.from_dict(proxies)
100 |
101 | base_url = utils.parse_host(
102 | base_url,
103 | IS_WINDOWS_PLATFORM,
104 | )
105 | # SSH has a different default for num_pools to all other adapters
106 | num_pools = num_pools or DEFAULT_NUM_POOLS_SSH if base_url.startswith("ssh://") else DEFAULT_NUM_POOLS
107 |
108 | if base_url.startswith("http+unix://"):
109 | self._custom_adapter = UnixHTTPAdapter(
110 | base_url, timeout, pool_connections=num_pools, max_pool_size=max_pool_size
111 | )
112 | self.mount("http+ipsw://", self._custom_adapter)
113 | self._unmount("http://", "https://")
114 | # host part of URL should be unused, but is resolved by requests
115 | # module in proxy_bypass_macosx_sysconf()
116 | self.base_url = "http+ipsw://localhost"
117 | elif base_url.startswith("npipe://"):
118 | if not IS_WINDOWS_PLATFORM:
119 | raise IpswException("The npipe:// protocol is only supported on Windows")
120 | try:
121 | self._custom_adapter = NpipeHTTPAdapter(
122 | base_url, timeout, pool_connections=num_pools, max_pool_size=max_pool_size
123 | )
124 | except NameError:
125 | raise IpswException("Install pypiwin32 package to enable npipe:// support")
126 | self.mount("http+ipsw://", self._custom_adapter)
127 | self.base_url = "http+ipsw://localnpipe"
128 | elif base_url.startswith("ssh://"):
129 | try:
130 | self._custom_adapter = SSHHTTPAdapter(
131 | base_url, timeout, pool_connections=num_pools, max_pool_size=max_pool_size, shell_out=use_ssh_client
132 | )
133 | except NameError:
134 | raise IpswException("Install paramiko package to enable ssh:// support")
135 | self.mount("http+ipsw://ssh", self._custom_adapter)
136 | self._unmount("http://", "https://")
137 | self.base_url = "http+ipsw://ssh"
138 | else:
139 | self.base_url = base_url
140 |
141 | # version detection needs to be after unix adapter mounting
142 | if version is None or (isinstance(version, str) and version.lower() == "auto"):
143 | self._version = self._retrieve_server_version()
144 | else:
145 | self._version = version
146 | if not isinstance(self._version, str):
147 | raise IpswException("Version parameter must be a string or None. Found {}".format(type(version).__name__))
148 | if utils.version_lt(self._version, MINIMUM_IPSW_API_VERSION):
149 | raise InvalidVersion(
150 | "API versions below {} are no longer supported by this " "library.".format(MINIMUM_IPSW_API_VERSION)
151 | )
152 |
153 | def _retrieve_server_version(self):
154 | try:
155 | return self.version(api_version=False)["api_version"]
156 | except KeyError:
157 | raise IpswException('Invalid response from ipsw daemon: key "api_version"' " is missing.")
158 | except Exception as e:
159 | raise IpswException(f"Error while fetching server API version: {e}")
160 |
161 | def _set_request_timeout(self, kwargs):
162 | """Prepare the kwargs for an HTTP request by inserting the timeout
163 | parameter, if not already present."""
164 | kwargs.setdefault("timeout", self.timeout)
165 | return kwargs
166 |
167 | @update_headers
168 | def _post(self, url, **kwargs):
169 | return self.post(url, **self._set_request_timeout(kwargs))
170 |
171 | @update_headers
172 | def _get(self, url, **kwargs):
173 | return self.get(url, **self._set_request_timeout(kwargs))
174 |
175 | @update_headers
176 | def _put(self, url, **kwargs):
177 | return self.put(url, **self._set_request_timeout(kwargs))
178 |
179 | @update_headers
180 | def _delete(self, url, **kwargs):
181 | return self.delete(url, **self._set_request_timeout(kwargs))
182 |
183 | def _url(self, pathfmt, *args, **kwargs):
184 | for arg in args:
185 | if not isinstance(arg, str):
186 | raise ValueError("Expected a string but found {} ({}) " "instead".format(arg, type(arg)))
187 |
188 | quote_f = partial(urllib.parse.quote, safe="/:")
189 | args = map(quote_f, args)
190 |
191 | if kwargs.get("versioned_api", True):
192 | return "{}/v{}{}".format(self.base_url, self._version, pathfmt.format(*args))
193 | else:
194 | return f"{self.base_url}{pathfmt.format(*args)}"
195 |
196 | def _raise_for_status(self, response):
197 | """Raises stored :class:`APIError`, if one occurred."""
198 | try:
199 | response.raise_for_status()
200 | except requests.exceptions.HTTPError as e:
201 | raise create_api_error_from_http_exception(e) from e
202 |
203 | def _result(self, response, json=False, binary=False):
204 | assert not (json and binary)
205 | self._raise_for_status(response)
206 |
207 | if json:
208 | return response.json()
209 | if binary:
210 | return response.content
211 | return response.text
212 |
213 | def _post_json(self, url, data, **kwargs):
214 | # Go <1.1 can't unserialize null to a string
215 | # so we do this disgusting thing here.
216 | data2 = {}
217 | if data is not None and isinstance(data, dict):
218 | for k, v in iter(data.items()):
219 | if v is not None:
220 | data2[k] = v
221 | elif data is not None:
222 | data2 = data
223 |
224 | if "headers" not in kwargs:
225 | kwargs["headers"] = {}
226 | kwargs["headers"]["Content-Type"] = "application/json"
227 | return self._post(url, data=json.dumps(data2), **kwargs)
228 |
229 | def _attach_params(self, override=None):
230 | return override or {"stdout": 1, "stderr": 1, "stream": 1}
231 |
232 | def _create_websocket_connection(self, url):
233 | return websocket.create_connection(url)
234 |
235 | def _get_raw_response_socket(self, response):
236 | self._raise_for_status(response)
237 | if self.base_url == "http+ipsw://localnpipe":
238 | sock = response.raw._fp.fp.raw.sock
239 | elif self.base_url.startswith("http+ipsw://ssh"):
240 | sock = response.raw._fp.fp.channel
241 | else:
242 | sock = response.raw._fp.fp.raw
243 | if self.base_url.startswith("https://"):
244 | sock = sock._sock
245 | try:
246 | # Keep a reference to the response to stop it being garbage
247 | # collected. If the response is garbage collected, it will
248 | # close TLS sockets.
249 | sock._response = response
250 | except AttributeError:
251 | # UNIX sockets can't have attributes set on them, but that's
252 | # fine because we won't be doing TLS over them
253 | pass
254 |
255 | return sock
256 |
257 | def _stream_helper(self, response, decode=False):
258 | """Generator for data coming from a chunked-encoded HTTP response."""
259 |
260 | if response.raw._fp.chunked:
261 | if decode:
262 | yield from json_stream(self._stream_helper(response, False))
263 | else:
264 | reader = response.raw
265 | while not reader.closed:
266 | # this read call will block until we get a chunk
267 | data = reader.read(1)
268 | if not data:
269 | break
270 | if reader._fp.chunk_left:
271 | data += reader.read(reader._fp.chunk_left)
272 | yield data
273 | else:
274 | # Response isn't chunked, meaning we probably
275 | # encountered an error immediately
276 | yield self._result(response, json=decode)
277 |
278 | def _multiplexed_buffer_helper(self, response):
279 | """A generator of multiplexed data blocks read from a buffered
280 | response."""
281 | buf = self._result(response, binary=True)
282 | buf_length = len(buf)
283 | walker = 0
284 | while True:
285 | if buf_length - walker < STREAM_HEADER_SIZE_BYTES:
286 | break
287 | header = buf[walker : walker + STREAM_HEADER_SIZE_BYTES]
288 | _, length = struct.unpack_from(">BxxxL", header)
289 | start = walker + STREAM_HEADER_SIZE_BYTES
290 | end = start + length
291 | walker = end
292 | yield buf[start:end]
293 |
294 | def _multiplexed_response_stream_helper(self, response):
295 | """A generator of multiplexed data blocks coming from a response
296 | stream."""
297 |
298 | # Disable timeout on the underlying socket to prevent
299 | # Read timed out(s) for long running processes
300 | socket = self._get_raw_response_socket(response)
301 | self._disable_socket_timeout(socket)
302 |
303 | while True:
304 | header = response.raw.read(STREAM_HEADER_SIZE_BYTES)
305 | if not header:
306 | break
307 | _, length = struct.unpack(">BxxxL", header)
308 | if not length:
309 | continue
310 | data = response.raw.read(length)
311 | if not data:
312 | break
313 | yield data
314 |
315 | def _stream_raw_result(self, response, chunk_size=1, decode=True):
316 | """Stream result for TTY-enabled container and raw binary data"""
317 | self._raise_for_status(response)
318 |
319 | # Disable timeout on the underlying socket to prevent
320 | # Read timed out(s) for long running processes
321 | socket = self._get_raw_response_socket(response)
322 | self._disable_socket_timeout(socket)
323 |
324 | yield from response.iter_content(chunk_size, decode)
325 |
326 | def _read_from_socket(self, response, stream, tty=True, demux=False):
327 | """Consume all data from the socket, close the response and return the
328 | data. If stream=True, then a generator is returned instead and the
329 | caller is responsible for closing the response.
330 | """
331 | socket = self._get_raw_response_socket(response)
332 |
333 | gen = frames_iter(socket, tty)
334 |
335 | if demux:
336 | # The generator will output tuples (stdout, stderr)
337 | gen = (demux_adaptor(*frame) for frame in gen)
338 | else:
339 | # The generator will output strings
340 | gen = (data for (_, data) in gen)
341 |
342 | if stream:
343 | return gen
344 | else:
345 | try:
346 | # Wait for all frames, concatenate them, and return the result
347 | return consume_socket_output(gen, demux=demux)
348 | finally:
349 | response.close()
350 |
351 | def _disable_socket_timeout(self, socket):
352 | """Depending on the combination of python version and whether we're
353 | connecting over http or https, we might need to access _sock, which
354 | may or may not exist; or we may need to just settimeout on socket
355 | itself, which also may or may not have settimeout on it. To avoid
356 | missing the correct one, we try both.
357 |
358 | We also do not want to set the timeout if it is already disabled, as
359 | you run the risk of changing a socket that was non-blocking to
360 | blocking, for example when using gevent.
361 | """
362 | sockets = [socket, getattr(socket, "_sock", None)]
363 |
364 | for s in sockets:
365 | if not hasattr(s, "settimeout"):
366 | continue
367 |
368 | timeout = -1
369 |
370 | if hasattr(s, "gettimeout"):
371 | timeout = s.gettimeout()
372 |
373 | # Don't change the timeout if it is already disabled.
374 | if timeout is None or timeout == 0.0:
375 | continue
376 |
377 | s.settimeout(None)
378 |
379 | def _unmount(self, *args):
380 | for proto in args:
381 | self.adapters.pop(proto)
382 |
383 | def get_adapter(self, url):
384 | try:
385 | return super().get_adapter(url)
386 | except requests.exceptions.InvalidSchema as e:
387 | if self._custom_adapter:
388 | return self._custom_adapter
389 | else:
390 | raise e
391 |
392 | @property
393 | def api_version(self):
394 | return self._version
395 |
--------------------------------------------------------------------------------
/ipsw/api/daemon.py:
--------------------------------------------------------------------------------
1 | class DaemonApiMixin:
2 | # def events(self, since=None, until=None, filters=None, decode=None):
3 | # """
4 | # Get real-time events from the server. Similar to the ``ipsw events``
5 | # command.
6 |
7 | # Args:
8 | # since (UTC datetime or int): Get events from this point
9 | # until (UTC datetime or int): Get events until this point
10 | # filters (dict): Filter the events by event time, container or image
11 | # decode (bool): If set to true, stream will be decoded into dicts on
12 | # the fly. False by default.
13 |
14 | # Returns:
15 | # A :py:class:`ipsw.types.daemon.CancellableStream` generator
16 |
17 | # Raises:
18 | # :py:class:`ipsw.errors.APIError`
19 | # If the server returns an error.
20 |
21 | # Example:
22 |
23 | # >>> for event in client.events(decode=True)
24 | # ... print(event)
25 | # {u'from': u'image/with:tag',
26 | # u'id': u'container-id',
27 | # u'status': u'start',
28 | # u'time': 1423339459}
29 | # ...
30 |
31 | # or
32 |
33 | # >>> events = client.events()
34 | # >>> for event in events:
35 | # ... print(event)
36 | # >>> # and cancel from another thread
37 | # >>> events.close()
38 | # """
39 |
40 | # if isinstance(since, datetime):
41 | # since = utils.datetime_to_timestamp(since)
42 |
43 | # if isinstance(until, datetime):
44 | # until = utils.datetime_to_timestamp(until)
45 |
46 | # if filters:
47 | # filters = utils.convert_filters(filters)
48 |
49 | # params = {
50 | # 'since': since,
51 | # 'until': until,
52 | # 'filters': filters
53 | # }
54 | # url = self._url('/events')
55 |
56 | # response = self._get(url, params=params, stream=True, timeout=None)
57 | # stream = self._stream_helper(response, decode=decode)
58 |
59 | # return types.CancellableStream(stream, response)
60 |
61 | def ping(self):
62 | """
63 | Checks the server is responsive. An exception will be raised if it
64 | isn't responding.
65 |
66 | Returns:
67 | (bool) The response from the server.
68 |
69 | Raises:
70 | :py:class:`ipsw.errors.APIError`
71 | If the server returns an error.
72 | """
73 | return self._result(self._get(self._url("/_ping"))) == "OK"
74 |
75 | def version(self, api_version=True):
76 | """
77 | Returns version information from the server. Similar to the ``ipsw
78 | version`` command.
79 |
80 | Returns:
81 | (dict): The server version information
82 |
83 | Raises:
84 | :py:class:`ipsw.errors.APIError`
85 | If the server returns an error.
86 | """
87 | url = self._url("/version", versioned_api=api_version)
88 | return self._result(self._get(url), json=True)
89 |
--------------------------------------------------------------------------------
/ipsw/api/dsc.py:
--------------------------------------------------------------------------------
1 | class DscApiMixin:
2 | def dsc_a2o(self, path=None, addr=0):
3 | """
4 | Convert virtual address to offset. Identical to the ``ipsw dyld a2o``
5 | command.
6 |
7 | Args:
8 | path (str): The path to the dyld_shared_cache file.
9 | addr (int): The address to convert to an offset.
10 |
11 | Raises:
12 | :py:class:`ipsw.errors.APIError`
13 | If the server returns an error.
14 | """
15 | return self._result(self._post_json(self._url("/dsc/a2o"), data={"path": path, "addr": addr}), True)
16 |
17 | def dsc_a2s(self, path=None, addrs=None, decode=False):
18 | """
19 | Lookup symbol for address. Identical to the ``ipsw dyld a2s``
20 | command.
21 |
22 | Args:
23 | path (str): The path to the dyld_shared_cache file.
24 | addrs ([int]): List of addresses of the symbols to lookup.
25 | decode (bool): If set to true, stream will be decoded into dicts
26 | on the fly. False by default.
27 |
28 | Raises:
29 | :py:class:`ipsw.errors.APIError`
30 | If the server returns an error.
31 | """
32 | params = {
33 | "path": path,
34 | "addrs": addrs,
35 | }
36 | url = self._url("/dsc/a2s")
37 | response = self._post_json(url, data=params, stream=True, timeout=None)
38 | return self._stream_helper(response, decode=decode)
39 |
40 | def dsc_o2a(self, path=None, off=0):
41 | """
42 | Convert offset to virtual address. Identical to the ``ipsw dyld o2a``
43 | command.
44 |
45 | Args:
46 | path (str): The path to the dyld_shared_cache file.
47 | off (int): The offset to convert to an address.
48 |
49 | Raises:
50 | :py:class:`ipsw.errors.APIError`
51 | If the server returns an error.
52 | """
53 | return self._result(self._post_json(self._url("/dsc/o2a"), data={"path": path, "off": off}), True)
54 |
55 | def dsc_info(self, path=None):
56 | """
57 | Display DSC header information. Identical to the ``ipsw dyld info --dylibs --json``
58 | command.
59 |
60 | Returns:
61 | (dict): The info as a dict
62 |
63 | Raises:
64 | :py:class:`ipsw.errors.APIError`
65 | If the server returns an error.
66 | """
67 | return self._result(self._get(self._url("/dsc/info"), params={"path": path}), True)
68 |
69 | def dsc_macho(self, path=None, dylib=None):
70 | """
71 | Display DSC dylib information. Identical to the ``ipsw dyld macho DSC DYLIB --json``
72 | command.
73 |
74 | Returns:
75 | (dict): The info as a dict
76 |
77 | Raises:
78 | :py:class:`ipsw.errors.APIError`
79 | If the server returns an error.
80 | """
81 | return self._result(self._get(self._url("/dsc/macho"), params={"path": path, "dylib": dylib}), True)
82 |
83 | def dsc_sym_addrs(self, path=None, lookups=None):
84 | """
85 | Display DSC dylib slide info. Identical to the ``ipsw dyld slide``
86 | command.
87 |
88 | Args:
89 | path (str): The path to the dyld_shared_cache file.
90 | lookups (dict): Symbol lookups.
91 |
92 | Raises:
93 | :py:class:`ipsw.errors.APIError`
94 | If the server returns an error.
95 | """
96 | return self._result(self._post_json(self._url("/dsc/symaddr"), data={"path": path, "lookups": lookups}), True)
97 |
98 | def dsc_slide_info(self, path=None, auth=False, decode=False):
99 | """
100 | Display DSC dylib slide info. Identical to the ``ipsw dyld slide``
101 | command.
102 |
103 | Args:
104 | path (str): The path to the dyld_shared_cache file.
105 | auth (bool): Filter to only ``auth`` slide-info. False by default.
106 | decode (bool): If set to true, stream will be decoded into dicts
107 | on the fly. False by default.
108 |
109 | Raises:
110 | :py:class:`ipsw.errors.APIError`
111 | If the server returns an error.
112 | """
113 | params = {
114 | "path": path,
115 | "type": "auth" if auth else "",
116 | }
117 | url = self._url("/dsc/slide")
118 | response = self._post_json(url, data=params, stream=True, timeout=None)
119 | return self._stream_helper(response, decode=decode)
120 |
--------------------------------------------------------------------------------
/ipsw/api/info.py:
--------------------------------------------------------------------------------
1 | class InfoApiMixin:
2 | def ipsw_info(self, path=None):
3 | """
4 | Display IPSW information. Identical to the ``ipsw info``
5 | command.
6 |
7 | Returns:
8 | (dict): The info as a dict
9 |
10 | Raises:
11 | :py:class:`ipsw.errors.APIError`
12 | If the server returns an error.
13 | """
14 | return self._result(self._get(self._url("/info/ipsw"), params={"path": path}), True)
15 |
16 | def ota_info(self, path=None):
17 | """
18 | Display OTA information. Identical to the ``ipsw info``
19 | command.
20 |
21 | Returns:
22 | (dict): The info as a dict
23 |
24 | Raises:
25 | :py:class:`ipsw.errors.APIError`
26 | If the server returns an error.
27 | """
28 | return self._result(self._get(self._url("/info/ota"), params={"path": path}), True)
29 |
30 | def remote_ipsw_info(self, url=None, proxy=None, insecure=False):
31 | """
32 | Display remote IPSW information. Identical to the ``ipsw info --remote``
33 | command.
34 |
35 | Returns:
36 | (dict): The info as a dict
37 |
38 | Raises:
39 | :py:class:`ipsw.errors.APIError`
40 | If the server returns an error.
41 | """
42 | return self._result(self._get(self._url("/info/ipsw/remote"), params={url, proxy, insecure}), True)
43 |
44 | def remote_ota_info(self, url=None, proxy=None, insecure=False):
45 | """
46 | Display remote OTA information. Identical to the ``ipsw info --remote``
47 | command.
48 |
49 | Returns:
50 | (dict): The info as a dict
51 |
52 | Raises:
53 | :py:class:`ipsw.errors.APIError`
54 | If the server returns an error.
55 | """
56 | return self._result(self._get(self._url("/info/ota/remote"), params={url, proxy, insecure}), True)
57 |
--------------------------------------------------------------------------------
/ipsw/api/macho.py:
--------------------------------------------------------------------------------
1 | class MachoApiMixin:
2 | def macho_info(self, path=None, arch=None):
3 | """
4 | Display MachO header information. Identical to the ``ipsw macho info --json``
5 | command.
6 |
7 | Returns:
8 | (dict): The info as a dict
9 |
10 | Raises:
11 | :py:class:`ipsw.errors.APIError`
12 | If the server returns an error.
13 | """
14 | return self._result(self._get(self._url("/macho/info"), params={"path": path, "arch": arch}), True)
15 |
--------------------------------------------------------------------------------
/ipsw/client.py:
--------------------------------------------------------------------------------
1 | from .api.client import APIClient
2 | from .constants import DEFAULT_TIMEOUT_SECONDS, DEFAULT_MAX_POOL_SIZE
3 | from .models.dsc import DscCollection
4 | from .models.info import InfoCollection
5 | from .models.macho import MachoCollection
6 | from .utils import kwargs_from_env
7 |
8 |
9 | class IpswClient:
10 | """
11 | A client for communicating with a ipsw server.
12 |
13 | Example:
14 |
15 | >>> import ipsw
16 | >>> client = ipsw.IpswClient(base_url='unix://var/run/ipsw.sock')
17 |
18 | Args:
19 | base_url (str): URL to the ipsw server. For example,
20 | ``unix:///var/run/ipsw.sock`` or ``tcp://127.0.0.1:8080``.
21 | version (str): The version of the API to use. Set to ``auto`` to
22 | automatically detect the server's version. Default: ``1.0``
23 | timeout (int): Default timeout for API calls, in seconds.
24 | user_agent (str): Set a custom user agent for requests to the server.
25 | use_ssh_client (bool): If set to `True`, an ssh connection is made
26 | via shelling out to the ssh client. Ensure the ssh client is
27 | installed and configured on the host.
28 | max_pool_size (int): The maximum number of connections
29 | to save in the pool.
30 | """
31 |
32 | def __init__(self, *args, **kwargs):
33 | self.api = APIClient(*args, **kwargs)
34 |
35 | @classmethod
36 | def from_env(cls, **kwargs):
37 | """
38 | Return a client configured from environment variables.
39 |
40 | The environment variables used are the same as those used by the
41 | ipsw command-line client. They are:
42 |
43 | .. envvar:: IPSW_HOST
44 |
45 | The URL to the ipsw host.
46 |
47 | Args:
48 | version (str): The version of the API to use. Set to ``auto`` to
49 | automatically detect the server's version. Default: ``auto``
50 | timeout (int): Default timeout for API calls, in seconds.
51 | max_pool_size (int): The maximum number of connections
52 | to save in the pool.
53 | assert_hostname (bool): Verify the hostname of the server.
54 | environment (dict): The environment to read environment variables
55 | from. Default: the value of ``os.environ``
56 | use_ssh_client (bool): If set to `True`, an ssh connection is
57 | made via shelling out to the ssh client. Ensure the ssh
58 | client is installed and configured on the host.
59 |
60 | Example:
61 |
62 | >>> import ipsw
63 | >>> client = ipsw.from_env()
64 |
65 | """
66 | timeout = kwargs.pop("timeout", DEFAULT_TIMEOUT_SECONDS)
67 | max_pool_size = kwargs.pop("max_pool_size", DEFAULT_MAX_POOL_SIZE)
68 | version = kwargs.pop("version", None)
69 | use_ssh_client = kwargs.pop("use_ssh_client", False)
70 | return cls(
71 | timeout=timeout,
72 | max_pool_size=max_pool_size,
73 | version=version,
74 | use_ssh_client=use_ssh_client,
75 | **kwargs_from_env(**kwargs),
76 | )
77 |
78 | @property
79 | def dsc(self):
80 | """
81 | An object for getting DSC info.
82 | """
83 | return DscCollection(client=self)
84 |
85 | @property
86 | def info(self):
87 | """
88 | An object for getting local/remote IPSW/OTA info.
89 | """
90 | return InfoCollection(client=self)
91 |
92 | @property
93 | def macho(self):
94 | """
95 | An object for getting MachO info.
96 | """
97 | return MachoCollection(client=self)
98 |
99 | # Top-level methods
100 | def ping(self, *args, **kwargs):
101 | return self.api.ping(*args, **kwargs)
102 |
103 | ping.__doc__ = APIClient.ping.__doc__
104 |
105 | def version(self, *args, **kwargs):
106 | return self.api.version(*args, **kwargs)
107 |
108 | version.__doc__ = APIClient.version.__doc__
109 |
110 | def close(self):
111 | return self.api.close()
112 |
113 | close.__doc__ = APIClient.close.__doc__
114 |
115 | def __getattr__(self, name):
116 | s = [f"'IpswClient' object has no attribute '{name}'"]
117 | # If a user calls a method on APIClient, they
118 | if hasattr(APIClient, name):
119 | s.append(
120 | "In ipsw SDK for Python 2.0, this method is now on the "
121 | "object APIClient. See the low-level API section of the "
122 | "documentation for more details."
123 | )
124 | raise AttributeError(" ".join(s))
125 |
126 |
127 | from_env = IpswClient.from_env
128 |
--------------------------------------------------------------------------------
/ipsw/constants.py:
--------------------------------------------------------------------------------
1 | import sys
2 | from .version import __version__
3 |
4 | DEFAULT_IPSW_API_VERSION = "1.0"
5 | MINIMUM_IPSW_API_VERSION = "1.0"
6 | DEFAULT_TIMEOUT_SECONDS = 60
7 | STREAM_HEADER_SIZE_BYTES = 8
8 |
9 | DEFAULT_HTTP_HOST = "127.0.0.1"
10 | DEFAULT_UNIX_SOCKET = "http+unix:///var/run/ipsw.sock"
11 | DEFAULT_NPIPE = "npipe:////./pipe/ipsw"
12 |
13 | BYTE_UNITS = {"b": 1, "k": 1024, "m": 1024 * 1024, "g": 1024 * 1024 * 1024}
14 |
15 | IS_WINDOWS_PLATFORM = sys.platform == "win32"
16 | WINDOWS_LONGPATH_PREFIX = "\\\\?\\"
17 |
18 | DEFAULT_USER_AGENT = f"ipsw-sdk-python/{__version__}"
19 | DEFAULT_NUM_POOLS = 25
20 |
21 | # The OpenSSH server default value for MaxSessions is 10 which means we can
22 | # use up to 9, leaving the final session for the underlying SSH connection.
23 | DEFAULT_NUM_POOLS_SSH = 9
24 |
25 | DEFAULT_MAX_POOL_SIZE = 10
26 |
27 | DEFAULT_DATA_CHUNK_SIZE = 1024 * 2048
28 |
--------------------------------------------------------------------------------
/ipsw/errors.py:
--------------------------------------------------------------------------------
1 | import requests
2 |
3 |
4 | class IpswException(Exception):
5 | """
6 | A base class from which all other exceptions inherit.
7 | If you want to catch all errors that the ipsw SDK might raise,
8 | catch this base exception.
9 | """
10 |
11 |
12 | def create_api_error_from_http_exception(e):
13 | """
14 | Create a suitable APIError from requests.exceptions.HTTPError.
15 | """
16 | response = e.response
17 | try:
18 | explanation = response.json()["error"]
19 | except ValueError:
20 | explanation = (response.content or "").strip()
21 | cls = APIError
22 | if response.status_code == 404:
23 | cls = NotFound
24 | raise cls(e, response=response, explanation=explanation) from e
25 |
26 |
27 | class APIError(requests.exceptions.HTTPError, IpswException):
28 | """
29 | An HTTP error from the API.
30 | """
31 |
32 | def __init__(self, message, response=None, explanation=None):
33 | # requests 1.2 supports response as a keyword argument, but
34 | # requests 1.1 doesn't
35 | super().__init__(message)
36 | self.response = response
37 | self.explanation = explanation
38 |
39 | def __str__(self):
40 | message = super().__str__()
41 |
42 | if self.is_client_error():
43 | message = "{} Client Error for {}: {}".format(
44 | self.response.status_code, self.response.url, self.response.reason
45 | )
46 |
47 | elif self.is_server_error():
48 | message = "{} Server Error for {}: {}".format(
49 | self.response.status_code, self.response.url, self.response.reason
50 | )
51 |
52 | if self.explanation:
53 | message = f'{message} ("{self.explanation}")'
54 |
55 | return message
56 |
57 | @property
58 | def status_code(self):
59 | if self.response is not None:
60 | return self.response.status_code
61 |
62 | def is_error(self):
63 | return self.is_client_error() or self.is_server_error()
64 |
65 | def is_client_error(self):
66 | if self.status_code is None:
67 | return False
68 | return 400 <= self.status_code < 500
69 |
70 | def is_server_error(self):
71 | if self.status_code is None:
72 | return False
73 | return 500 <= self.status_code < 600
74 |
75 |
76 | class NotFound(APIError):
77 | pass
78 |
79 |
80 | class InvalidVersion(IpswException):
81 | pass
82 |
83 |
84 | class StreamParseError(RuntimeError):
85 | def __init__(self, reason):
86 | self.msg = reason
87 |
--------------------------------------------------------------------------------
/ipsw/models/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/blacktop/ipsw-py/8ef01abf7249fdb8f3d2722d934f356f0da01819/ipsw/models/__init__.py
--------------------------------------------------------------------------------
/ipsw/models/configs.py:
--------------------------------------------------------------------------------
1 | from ..api import APIClient
2 | from .resource import Model, Collection
3 |
4 |
5 | class Config(Model):
6 | """A config."""
7 |
8 | id_attribute = "ID"
9 |
10 | def __repr__(self):
11 | return f"<{self.__class__.__name__}: '{self.name}'>"
12 |
13 | @property
14 | def name(self):
15 | return self.attrs["Spec"]["Name"]
16 |
17 | def remove(self):
18 | """
19 | Remove this config.
20 |
21 | Raises:
22 | :py:class:`ipsw.errors.APIError`
23 | If config failed to remove.
24 | """
25 | return self.client.api.remove_config(self.id)
26 |
27 | """Configs on the ipsw server."""
28 | model = Config
29 |
30 | def create(self, **kwargs):
31 | obj = self.client.api.create_config(**kwargs)
32 | return self.prepare_model(obj)
33 |
34 | create.__doc__ = APIClient.create_config.__doc__
35 |
36 | def get(self, config_id):
37 | """
38 | Get a config.
39 |
40 | Args:
41 | config_id (str): Config ID.
42 |
43 | Returns:
44 | (:py:class:`Config`): The config.
45 |
46 | Raises:
47 | :py:class:`ipsw.errors.NotFound`
48 | If the config does not exist.
49 | :py:class:`ipsw.errors.APIError`
50 | If the server returns an error.
51 | """
52 | return self.prepare_model(self.client.api.inspect_config(config_id))
53 |
54 | def list(self, **kwargs):
55 | """
56 | List configs. Similar to the ``ipsw config ls`` command.
57 |
58 | Args:
59 | filters (dict): Server-side list filtering options.
60 |
61 | Returns:
62 | (list of :py:class:`Config`): The configs.
63 |
64 | Raises:
65 | :py:class:`ipsw.errors.APIError`
66 | If the server returns an error.
67 | """
68 | resp = self.client.api.configs(**kwargs)
69 | return [self.prepare_model(obj) for obj in resp]
70 |
--------------------------------------------------------------------------------
/ipsw/models/dsc.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | from ..api import APIClient
4 | from .resource import Collection, Model
5 |
6 |
7 | class DSC(Model):
8 | """
9 | DSC info.
10 | """
11 |
12 | def __repr__(self):
13 | return "<{}: '({}) - {} - {}'>".format(
14 | self.__class__.__name__,
15 | self.magic,
16 | self.platform,
17 | self.uuid,
18 | )
19 |
20 | @property
21 | def magic(self):
22 | """
23 | The header magic.
24 | """
25 | return self.attrs["info"].get("magic", None)
26 |
27 | @property
28 | def uuid(self):
29 | """
30 | The header UUID.
31 | """
32 | return self.attrs["info"].get("uuid", None)
33 |
34 | @property
35 | def platform(self):
36 | """
37 | The header platform.
38 | """
39 | return self.attrs["info"].get("platform", None)
40 |
41 | @property
42 | def dylibs(self):
43 | """
44 | The DSC info.
45 | """
46 | return self.attrs["info"].get("dylibs", None)
47 |
48 | @property
49 | def info(self):
50 | """
51 | The DSC info.
52 | """
53 | return self.attrs.get("info", None)
54 |
55 | @property
56 | def path(self):
57 | """
58 | The DSC path.
59 | """
60 | return self.attrs.get("path", None)
61 |
62 | def a2o(self, addr=0):
63 | """
64 | Convert address to offsest.
65 | """
66 | return self.client.api.dsc_a2o(self.path, addr)
67 |
68 | def a2s(self, addrs=None, decode=False):
69 | """
70 | Lookup symbols for addresses.
71 | """
72 | return self.client.api.dsc_a2s(self.path, addrs, decode)
73 |
74 | def o2a(self, off=0):
75 | """
76 | Convert offsest to address.
77 | """
78 | return self.client.api.dsc_o2a(self.path, off)
79 |
80 | def dylib(self, dylib=None):
81 | """
82 | Get DSC dylib info.
83 | """
84 | return Dylib(
85 | image_name=dylib,
86 | attrs=self.client.api.dsc_macho(self.path, dylib),
87 | client=self.client,
88 | collection=self,
89 | )
90 |
91 | def sym_addrs(self, lookups=None):
92 | """
93 | Lookup symbols addresses.
94 | """
95 | return self.client.api.dsc_sym_addrs(self.path, lookups)
96 |
97 | def slide_infos(self, auth=False, decode=False):
98 | """
99 | Get DSC slide info.
100 | """
101 | return self.client.api.dsc_slide_info(self.path, auth, decode)
102 |
103 |
104 | class Dylib(Model):
105 | """
106 | Dylib info.
107 | """
108 |
109 | def __init__(self, image_name, *args, **kwargs):
110 | super().__init__(*args, **kwargs)
111 | self.image_name = image_name
112 |
113 | def __repr__(self):
114 | return "<{}: '{} {} ({})'>".format(
115 | self.__class__.__name__,
116 | self.magic,
117 | self.cpu,
118 | self.sub_cpu,
119 | )
120 |
121 | @property
122 | def magic(self):
123 | """
124 | The header magic.
125 | """
126 | return self.attrs["macho"]["header"].get("magic", None)
127 |
128 | @property
129 | def cpu(self):
130 | """
131 | The header CPU.
132 | """
133 | return self.attrs["macho"]["header"].get("cpu", None)
134 |
135 | @property
136 | def sub_cpu(self):
137 | """
138 | The header sub CPU.
139 | """
140 | return self.attrs["macho"]["header"].get("subcpu", None)
141 |
142 | @property
143 | def header(self):
144 | """
145 | The header.
146 | """
147 | return self.attrs["macho"].get("header", None)
148 |
149 | @property
150 | def load_commands(self):
151 | """
152 | The header.
153 | """
154 | return self.attrs["macho"].get("loads", None)
155 |
156 |
157 | class Rebase(Model):
158 | """
159 | Rebase info.
160 | """
161 |
162 | def __repr__(self):
163 | return "<{}: '{}'>".format(
164 | self.__class__.__name__,
165 | self.pointer,
166 | )
167 |
168 | @property
169 | def pointer(self):
170 | """
171 | The rebase pointer.
172 | """
173 | return self.attrs.get("pointer", None)
174 |
175 |
176 | class DscCollection(Collection):
177 | model = DSC
178 |
179 | def open(self, path=None):
180 | """
181 | Get DSC info.
182 | """
183 | return self.prepare_model(self.client.api.dsc_info(path))
184 |
--------------------------------------------------------------------------------
/ipsw/models/info.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | from ..api import APIClient
4 | from .resource import Collection, Model
5 |
6 |
7 | class Info(Model):
8 | """
9 | IPSW/OTA info.
10 | """
11 |
12 | def __repr__(self):
13 | return "<{}: '{} ({})'>".format(
14 | self.__class__.__name__,
15 | self.version,
16 | self.build,
17 | )
18 |
19 | @property
20 | def version(self):
21 | """
22 | The iOS version.
23 | """
24 | return self.attrs["info"]["Plists"]["restore"].get("ProductVersion", None)
25 |
26 | @property
27 | def build(self):
28 | """
29 | The iOS version.
30 | """
31 | return self.attrs["info"]["Plists"]["restore"].get("ProductBuildVersion", None)
32 |
33 | @property
34 | def devices(self):
35 | """
36 | The iOS devices.
37 | """
38 | devices = set()
39 | for dt in self.attrs["info"]["DeviceTrees"].values():
40 | for child in dt["device-tree"]["children"]:
41 | if "product" in child:
42 | devices.add(child["product"]["product-name"])
43 | devlist = list(devices)
44 | devlist.sort()
45 | return devlist
46 |
47 |
48 | class InfoCollection(Collection):
49 | model = Info
50 |
51 | def get(self, ipsw=None, ota=None, url=None):
52 | """
53 | The iOS version.
54 | """
55 | if url:
56 | return self.prepare_model(self.client.api.remote_ipsw_info(url))
57 | else:
58 | if ipsw:
59 | return self.prepare_model(self.client.api.ipsw_info(os.path.abspath(ipsw)))
60 | elif ota:
61 | return self.prepare_model(self.client.api.ipsw_info(os.path.abspath(ipsw)))
62 |
--------------------------------------------------------------------------------
/ipsw/models/macho.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | from ..api import APIClient
4 | from .resource import Collection, Model
5 |
6 |
7 | class Macho(Model):
8 | """
9 | MachO info.
10 | """
11 |
12 | def __repr__(self):
13 | return "<{}: '{} {} ({})'>".format(
14 | self.__class__.__name__,
15 | self.magic,
16 | self.cpu,
17 | self.sub_cpu,
18 | )
19 |
20 | @property
21 | def magic(self):
22 | """
23 | The header magic.
24 | """
25 | return self.attrs["info"]["header"].get("magic", None)
26 |
27 | @property
28 | def cpu(self):
29 | """
30 | The header CPU.
31 | """
32 | return self.attrs["info"]["header"].get("cpu", None)
33 |
34 | @property
35 | def sub_cpu(self):
36 | """
37 | The header sub CPU.
38 | """
39 | return self.attrs["info"]["header"].get("subcpu", None)
40 |
41 | @property
42 | def header(self):
43 | """
44 | The header.
45 | """
46 | return self.attrs["info"].get("header", None)
47 |
48 | @property
49 | def path(self):
50 | """
51 | The DSC path.
52 | """
53 | return self.attrs.get("path", None)
54 |
55 |
56 | class MachoCollection(Collection):
57 | model = Macho
58 |
59 | def open(self, path=None, arch=None):
60 | """
61 | Get MachO info.
62 | """
63 | return self.prepare_model(self.client.api.macho_info(path, arch))
64 |
--------------------------------------------------------------------------------
/ipsw/models/resource.py:
--------------------------------------------------------------------------------
1 | class Model:
2 | """
3 | A base class for representing a single object on the server.
4 | """
5 |
6 | id_attribute = "Id"
7 |
8 | def __init__(self, attrs=None, client=None, collection=None):
9 | #: A client pointing at the server that this object is on.
10 | self.client = client
11 |
12 | #: The collection that this model is part of.
13 | self.collection = collection
14 |
15 | #: The raw representation of this object from the API
16 | self.attrs = attrs
17 | if self.attrs is None:
18 | self.attrs = {}
19 |
20 | def __repr__(self):
21 | return f"<{self.__class__.__name__}: {self.short_id}>"
22 |
23 | def __eq__(self, other):
24 | return isinstance(other, self.__class__) and self.id == other.id
25 |
26 | def __hash__(self):
27 | return hash(f"{self.__class__.__name__}:{self.id}")
28 |
29 | @property
30 | def id(self):
31 | """
32 | The ID of the object.
33 | """
34 | return self.attrs.get(self.id_attribute)
35 |
36 | @property
37 | def short_id(self):
38 | """
39 | The ID of the object, truncated to 12 characters.
40 | """
41 | return self.id[:12]
42 |
43 | def reload(self):
44 | """
45 | Load this object from the server again and update ``attrs`` with the
46 | new data.
47 | """
48 | new_model = self.collection.get(self.id)
49 | self.attrs = new_model.attrs
50 |
51 |
52 | class Collection:
53 | """
54 | A base class for representing all objects of a particular type on the
55 | server.
56 | """
57 |
58 | #: The type of object this collection represents, set by subclasses
59 | model = None
60 |
61 | def __init__(self, client=None):
62 | #: The client pointing at the server that this collection of objects
63 | #: is on.
64 | self.client = client
65 |
66 | def __call__(self, *args, **kwargs):
67 | raise TypeError(
68 | "'{}' object is not callable. You might be trying to use the old "
69 | "(pre-2.0) API - use ipsw.APIClient if so.".format(self.__class__.__name__)
70 | )
71 |
72 | def list(self):
73 | raise NotImplementedError
74 |
75 | def get(self, key):
76 | raise NotImplementedError
77 |
78 | def create(self, attrs=None):
79 | raise NotImplementedError
80 |
81 | def prepare_model(self, attrs):
82 | """
83 | Create a model from a set of attributes.
84 | """
85 | if isinstance(attrs, Model):
86 | attrs.client = self.client
87 | attrs.collection = self
88 | return attrs
89 | elif isinstance(attrs, dict):
90 | return self.model(attrs=attrs, client=self.client, collection=self)
91 | else:
92 | raise Exception("Can't create %s from %s" % (self.model.__name__, attrs))
93 |
--------------------------------------------------------------------------------
/ipsw/transport/__init__.py:
--------------------------------------------------------------------------------
1 | # flake8: noqa
2 | from .unixconn import UnixHTTPAdapter
3 |
4 | try:
5 | from .npipeconn import NpipeHTTPAdapter
6 | from .npipesocket import NpipeSocket
7 | except ImportError:
8 | pass
9 |
10 | try:
11 | from .sshconn import SSHHTTPAdapter
12 | except ImportError:
13 | pass
14 |
--------------------------------------------------------------------------------
/ipsw/transport/basehttpadapter.py:
--------------------------------------------------------------------------------
1 | import requests.adapters
2 |
3 |
4 | class BaseHTTPAdapter(requests.adapters.HTTPAdapter):
5 | def close(self):
6 | super().close()
7 | if hasattr(self, "pools"):
8 | self.pools.clear()
9 |
--------------------------------------------------------------------------------
/ipsw/transport/npipeconn.py:
--------------------------------------------------------------------------------
1 | import queue
2 | import requests.adapters
3 |
4 | from .basehttpadapter import BaseHTTPAdapter
5 | from .. import constants
6 | from .npipesocket import NpipeSocket
7 |
8 | import http.client as httplib
9 |
10 | try:
11 | import requests.packages.urllib3 as urllib3
12 | except ImportError:
13 | import urllib3
14 |
15 | RecentlyUsedContainer = urllib3._collections.RecentlyUsedContainer
16 |
17 |
18 | class NpipeHTTPConnection(httplib.HTTPConnection):
19 | def __init__(self, npipe_path, timeout=60):
20 | super().__init__("localhost", timeout=timeout)
21 | self.npipe_path = npipe_path
22 | self.timeout = timeout
23 |
24 | def connect(self):
25 | sock = NpipeSocket()
26 | sock.settimeout(self.timeout)
27 | sock.connect(self.npipe_path)
28 | self.sock = sock
29 |
30 |
31 | class NpipeHTTPConnectionPool(urllib3.connectionpool.HTTPConnectionPool):
32 | def __init__(self, npipe_path, timeout=60, maxsize=10):
33 | super().__init__("localhost", timeout=timeout, maxsize=maxsize)
34 | self.npipe_path = npipe_path
35 | self.timeout = timeout
36 |
37 | def _new_conn(self):
38 | return NpipeHTTPConnection(self.npipe_path, self.timeout)
39 |
40 | # When re-using connections, urllib3 tries to call select() on our
41 | # NpipeSocket instance, causing a crash. To circumvent this, we override
42 | # _get_conn, where that check happens.
43 | def _get_conn(self, timeout):
44 | conn = None
45 | try:
46 | conn = self.pool.get(block=self.block, timeout=timeout)
47 |
48 | except AttributeError: # self.pool is None
49 | raise urllib3.exceptions.ClosedPoolError(self, "Pool is closed.")
50 |
51 | except queue.Empty:
52 | if self.block:
53 | raise urllib3.exceptions.EmptyPoolError(
54 | self, "Pool reached maximum size and no more " "connections are allowed."
55 | )
56 | # Oh well, we'll create a new connection then
57 |
58 | return conn or self._new_conn()
59 |
60 |
61 | class NpipeHTTPAdapter(BaseHTTPAdapter):
62 | __attrs__ = requests.adapters.HTTPAdapter.__attrs__ + ["npipe_path", "pools", "timeout", "max_pool_size"]
63 |
64 | def __init__(
65 | self,
66 | base_url,
67 | timeout=60,
68 | pool_connections=constants.DEFAULT_NUM_POOLS,
69 | max_pool_size=constants.DEFAULT_MAX_POOL_SIZE,
70 | ):
71 | self.npipe_path = base_url.replace("npipe://", "")
72 | self.timeout = timeout
73 | self.max_pool_size = max_pool_size
74 | self.pools = RecentlyUsedContainer(pool_connections, dispose_func=lambda p: p.close())
75 | super().__init__()
76 |
77 | def get_connection(self, url, proxies=None):
78 | with self.pools.lock:
79 | pool = self.pools.get(url)
80 | if pool:
81 | return pool
82 |
83 | pool = NpipeHTTPConnectionPool(self.npipe_path, self.timeout, maxsize=self.max_pool_size)
84 | self.pools[url] = pool
85 |
86 | return pool
87 |
88 | def request_url(self, request, proxies):
89 | # The select_proxy utility in requests errors out when the provided URL
90 | # doesn't have a hostname, like is the case when using a UNIX socket.
91 | # Since proxies are an irrelevant notion in the case of UNIX sockets
92 | # anyway, we simply return the path URL directly.
93 | return request.path_url
94 |
--------------------------------------------------------------------------------
/ipsw/transport/npipesocket.py:
--------------------------------------------------------------------------------
1 | import functools
2 | import time
3 | import io
4 |
5 | import win32file
6 | import win32pipe
7 |
8 | cERROR_PIPE_BUSY = 0xE7
9 | cSECURITY_SQOS_PRESENT = 0x100000
10 | cSECURITY_ANONYMOUS = 0
11 |
12 | MAXIMUM_RETRY_COUNT = 10
13 |
14 |
15 | def check_closed(f):
16 | @functools.wraps(f)
17 | def wrapped(self, *args, **kwargs):
18 | if self._closed:
19 | raise RuntimeError("Can not reuse socket after connection was closed.")
20 | return f(self, *args, **kwargs)
21 |
22 | return wrapped
23 |
24 |
25 | class NpipeSocket:
26 | """Partial implementation of the socket API over windows named pipes.
27 | This implementation is only designed to be used as a client socket,
28 | and server-specific methods (bind, listen, accept...) are not
29 | implemented.
30 | """
31 |
32 | def __init__(self, handle=None):
33 | self._timeout = win32pipe.NMPWAIT_USE_DEFAULT_WAIT
34 | self._handle = handle
35 | self._closed = False
36 |
37 | def accept(self):
38 | raise NotImplementedError()
39 |
40 | def bind(self, address):
41 | raise NotImplementedError()
42 |
43 | def close(self):
44 | self._handle.Close()
45 | self._closed = True
46 |
47 | @check_closed
48 | def connect(self, address, retry_count=0):
49 | try:
50 | handle = win32file.CreateFile(
51 | address,
52 | win32file.GENERIC_READ | win32file.GENERIC_WRITE,
53 | 0,
54 | None,
55 | win32file.OPEN_EXISTING,
56 | cSECURITY_ANONYMOUS | cSECURITY_SQOS_PRESENT,
57 | 0,
58 | )
59 | except win32pipe.error as e:
60 | # See Remarks:
61 | # https://msdn.microsoft.com/en-us/library/aa365800.aspx
62 | if e.winerror == cERROR_PIPE_BUSY:
63 | # Another program or thread has grabbed our pipe instance
64 | # before we got to it. Wait for availability and attempt to
65 | # connect again.
66 | retry_count = retry_count + 1
67 | if retry_count < MAXIMUM_RETRY_COUNT:
68 | time.sleep(1)
69 | return self.connect(address, retry_count)
70 | raise e
71 |
72 | self.flags = win32pipe.GetNamedPipeInfo(handle)[0]
73 |
74 | self._handle = handle
75 | self._address = address
76 |
77 | @check_closed
78 | def connect_ex(self, address):
79 | return self.connect(address)
80 |
81 | @check_closed
82 | def detach(self):
83 | self._closed = True
84 | return self._handle
85 |
86 | @check_closed
87 | def dup(self):
88 | return NpipeSocket(self._handle)
89 |
90 | def getpeername(self):
91 | return self._address
92 |
93 | def getsockname(self):
94 | return self._address
95 |
96 | def getsockopt(self, level, optname, buflen=None):
97 | raise NotImplementedError()
98 |
99 | def ioctl(self, control, option):
100 | raise NotImplementedError()
101 |
102 | def listen(self, backlog):
103 | raise NotImplementedError()
104 |
105 | def makefile(self, mode=None, bufsize=None):
106 | if mode.strip("b") != "r":
107 | raise NotImplementedError()
108 | rawio = NpipeFileIOBase(self)
109 | if bufsize is None or bufsize <= 0:
110 | bufsize = io.DEFAULT_BUFFER_SIZE
111 | return io.BufferedReader(rawio, buffer_size=bufsize)
112 |
113 | @check_closed
114 | def recv(self, bufsize, flags=0):
115 | err, data = win32file.ReadFile(self._handle, bufsize)
116 | return data
117 |
118 | @check_closed
119 | def recvfrom(self, bufsize, flags=0):
120 | data = self.recv(bufsize, flags)
121 | return (data, self._address)
122 |
123 | @check_closed
124 | def recvfrom_into(self, buf, nbytes=0, flags=0):
125 | return self.recv_into(buf, nbytes, flags), self._address
126 |
127 | @check_closed
128 | def recv_into(self, buf, nbytes=0):
129 | readbuf = buf
130 | if not isinstance(buf, memoryview):
131 | readbuf = memoryview(buf)
132 |
133 | err, data = win32file.ReadFile(self._handle, readbuf[:nbytes] if nbytes else readbuf)
134 | return len(data)
135 |
136 | def _recv_into_py2(self, buf, nbytes):
137 | err, data = win32file.ReadFile(self._handle, nbytes or len(buf))
138 | n = len(data)
139 | buf[:n] = data
140 | return n
141 |
142 | @check_closed
143 | def send(self, string, flags=0):
144 | err, nbytes = win32file.WriteFile(self._handle, string)
145 | return nbytes
146 |
147 | @check_closed
148 | def sendall(self, string, flags=0):
149 | return self.send(string, flags)
150 |
151 | @check_closed
152 | def sendto(self, string, address):
153 | self.connect(address)
154 | return self.send(string)
155 |
156 | def setblocking(self, flag):
157 | if flag:
158 | return self.settimeout(None)
159 | return self.settimeout(0)
160 |
161 | def settimeout(self, value):
162 | if value is None:
163 | # Blocking mode
164 | self._timeout = win32pipe.NMPWAIT_WAIT_FOREVER
165 | elif not isinstance(value, (float, int)) or value < 0:
166 | raise ValueError("Timeout value out of range")
167 | elif value == 0:
168 | # Non-blocking mode
169 | self._timeout = win32pipe.NMPWAIT_NO_WAIT
170 | else:
171 | # Timeout mode - Value converted to milliseconds
172 | self._timeout = value * 1000
173 |
174 | def gettimeout(self):
175 | return self._timeout
176 |
177 | def setsockopt(self, level, optname, value):
178 | raise NotImplementedError()
179 |
180 | @check_closed
181 | def shutdown(self, how):
182 | return self.close()
183 |
184 |
185 | class NpipeFileIOBase(io.RawIOBase):
186 | def __init__(self, npipe_socket):
187 | self.sock = npipe_socket
188 |
189 | def close(self):
190 | super().close()
191 | self.sock = None
192 |
193 | def fileno(self):
194 | return self.sock.fileno()
195 |
196 | def isatty(self):
197 | return False
198 |
199 | def readable(self):
200 | return True
201 |
202 | def readinto(self, buf):
203 | return self.sock.recv_into(buf)
204 |
205 | def seekable(self):
206 | return False
207 |
208 | def writable(self):
209 | return False
210 |
--------------------------------------------------------------------------------
/ipsw/transport/sshconn.py:
--------------------------------------------------------------------------------
1 | import paramiko
2 | import queue
3 | import urllib.parse
4 | import requests.adapters
5 | import logging
6 | import os
7 | import signal
8 | import socket
9 | import subprocess
10 |
11 | from .basehttpadapter import BaseHTTPAdapter
12 | from .. import constants
13 |
14 | import http.client as httplib
15 |
16 | try:
17 | import requests.packages.urllib3 as urllib3
18 | except ImportError:
19 | import urllib3
20 |
21 | RecentlyUsedContainer = urllib3._collections.RecentlyUsedContainer
22 |
23 |
24 | class SSHSocket(socket.socket):
25 | def __init__(self, host):
26 | super().__init__(socket.AF_INET, socket.SOCK_STREAM)
27 | self.host = host
28 | self.port = None
29 | self.user = None
30 | if ":" in self.host:
31 | self.host, self.port = self.host.split(":")
32 | if "@" in self.host:
33 | self.user, self.host = self.host.split("@")
34 |
35 | self.proc = None
36 |
37 | def connect(self, **kwargs):
38 | args = ["ssh"]
39 | if self.user:
40 | args = args + ["-l", self.user]
41 |
42 | if self.port:
43 | args = args + ["-p", self.port]
44 |
45 | args = args + ["--", self.host, "ipsw system dial-stdio"]
46 |
47 | preexec_func = None
48 | if not constants.IS_WINDOWS_PLATFORM:
49 |
50 | def f():
51 | signal.signal(signal.SIGINT, signal.SIG_IGN)
52 |
53 | preexec_func = f
54 |
55 | env = dict(os.environ)
56 |
57 | # drop LD_LIBRARY_PATH and SSL_CERT_FILE
58 | env.pop("LD_LIBRARY_PATH", None)
59 | env.pop("SSL_CERT_FILE", None)
60 |
61 | self.proc = subprocess.Popen(
62 | args, env=env, stdout=subprocess.PIPE, stdin=subprocess.PIPE, preexec_fn=preexec_func
63 | )
64 |
65 | def _write(self, data):
66 | if not self.proc or self.proc.stdin.closed:
67 | raise Exception("SSH subprocess not initiated." "connect() must be called first.")
68 | written = self.proc.stdin.write(data)
69 | self.proc.stdin.flush()
70 | return written
71 |
72 | def sendall(self, data):
73 | self._write(data)
74 |
75 | def send(self, data):
76 | return self._write(data)
77 |
78 | def recv(self, n):
79 | if not self.proc:
80 | raise Exception("SSH subprocess not initiated." "connect() must be called first.")
81 | return self.proc.stdout.read(n)
82 |
83 | def makefile(self, mode):
84 | if not self.proc:
85 | self.connect()
86 | self.proc.stdout.channel = self
87 |
88 | return self.proc.stdout
89 |
90 | def close(self):
91 | if not self.proc or self.proc.stdin.closed:
92 | return
93 | self.proc.stdin.write(b"\n\n")
94 | self.proc.stdin.flush()
95 | self.proc.terminate()
96 |
97 |
98 | class SSHConnection(httplib.HTTPConnection):
99 | def __init__(self, ssh_transport=None, timeout=60, host=None):
100 | super().__init__("localhost", timeout=timeout)
101 | self.ssh_transport = ssh_transport
102 | self.timeout = timeout
103 | self.ssh_host = host
104 |
105 | def connect(self):
106 | if self.ssh_transport:
107 | sock = self.ssh_transport.open_session()
108 | sock.settimeout(self.timeout)
109 | sock.exec_command("ipsw system dial-stdio")
110 | else:
111 | sock = SSHSocket(self.ssh_host)
112 | sock.settimeout(self.timeout)
113 | sock.connect()
114 |
115 | self.sock = sock
116 |
117 |
118 | class SSHConnectionPool(urllib3.connectionpool.HTTPConnectionPool):
119 | scheme = "ssh"
120 |
121 | def __init__(self, ssh_client=None, timeout=60, maxsize=10, host=None):
122 | super().__init__("localhost", timeout=timeout, maxsize=maxsize)
123 | self.ssh_transport = None
124 | self.timeout = timeout
125 | if ssh_client:
126 | self.ssh_transport = ssh_client.get_transport()
127 | self.ssh_host = host
128 |
129 | def _new_conn(self):
130 | return SSHConnection(self.ssh_transport, self.timeout, self.ssh_host)
131 |
132 | # When re-using connections, urllib3 calls fileno() on our
133 | # SSH channel instance, quickly overloading our fd limit. To avoid this,
134 | # we override _get_conn
135 | def _get_conn(self, timeout):
136 | conn = None
137 | try:
138 | conn = self.pool.get(block=self.block, timeout=timeout)
139 |
140 | except AttributeError: # self.pool is None
141 | raise urllib3.exceptions.ClosedPoolError(self, "Pool is closed.")
142 |
143 | except queue.Empty:
144 | if self.block:
145 | raise urllib3.exceptions.EmptyPoolError(
146 | self, "Pool reached maximum size and no more " "connections are allowed."
147 | )
148 | # Oh well, we'll create a new connection then
149 |
150 | return conn or self._new_conn()
151 |
152 |
153 | class SSHHTTPAdapter(BaseHTTPAdapter):
154 | __attrs__ = requests.adapters.HTTPAdapter.__attrs__ + [
155 | "pools",
156 | "timeout",
157 | "ssh_client",
158 | "ssh_params",
159 | "max_pool_size",
160 | ]
161 |
162 | def __init__(
163 | self,
164 | base_url,
165 | timeout=60,
166 | pool_connections=constants.DEFAULT_NUM_POOLS,
167 | max_pool_size=constants.DEFAULT_MAX_POOL_SIZE,
168 | shell_out=False,
169 | ):
170 | self.ssh_client = None
171 | if not shell_out:
172 | self._create_paramiko_client(base_url)
173 | self._connect()
174 |
175 | self.ssh_host = base_url
176 | if base_url.startswith("ssh://"):
177 | self.ssh_host = base_url[len("ssh://") :]
178 |
179 | self.timeout = timeout
180 | self.max_pool_size = max_pool_size
181 | self.pools = RecentlyUsedContainer(pool_connections, dispose_func=lambda p: p.close())
182 | super().__init__()
183 |
184 | def _create_paramiko_client(self, base_url):
185 | logging.getLogger("paramiko").setLevel(logging.WARNING)
186 | self.ssh_client = paramiko.SSHClient()
187 | base_url = urllib.parse.urlparse(base_url)
188 | self.ssh_params = {"hostname": base_url.hostname, "port": base_url.port, "username": base_url.username}
189 | ssh_config_file = os.path.expanduser("~/.ssh/config")
190 | if os.path.exists(ssh_config_file):
191 | conf = paramiko.SSHConfig()
192 | with open(ssh_config_file) as f:
193 | conf.parse(f)
194 | host_config = conf.lookup(base_url.hostname)
195 | if "proxycommand" in host_config:
196 | self.ssh_params["sock"] = paramiko.ProxyCommand(host_config["proxycommand"])
197 | if "hostname" in host_config:
198 | self.ssh_params["hostname"] = host_config["hostname"]
199 | if base_url.port is None and "port" in host_config:
200 | self.ssh_params["port"] = host_config["port"]
201 | if base_url.username is None and "user" in host_config:
202 | self.ssh_params["username"] = host_config["user"]
203 | if "identityfile" in host_config:
204 | self.ssh_params["key_filename"] = host_config["identityfile"]
205 |
206 | self.ssh_client.load_system_host_keys()
207 | self.ssh_client.set_missing_host_key_policy(paramiko.RejectPolicy())
208 |
209 | def _connect(self):
210 | if self.ssh_client:
211 | self.ssh_client.connect(**self.ssh_params)
212 |
213 | def get_connection(self, url, proxies=None):
214 | if not self.ssh_client:
215 | return SSHConnectionPool(
216 | ssh_client=self.ssh_client, timeout=self.timeout, maxsize=self.max_pool_size, host=self.ssh_host
217 | )
218 | with self.pools.lock:
219 | pool = self.pools.get(url)
220 | if pool:
221 | return pool
222 |
223 | # Connection is closed try a reconnect
224 | if self.ssh_client and not self.ssh_client.get_transport():
225 | self._connect()
226 |
227 | pool = SSHConnectionPool(
228 | ssh_client=self.ssh_client, timeout=self.timeout, maxsize=self.max_pool_size, host=self.ssh_host
229 | )
230 | self.pools[url] = pool
231 |
232 | return pool
233 |
234 | def close(self):
235 | super().close()
236 | if self.ssh_client:
237 | self.ssh_client.close()
238 |
--------------------------------------------------------------------------------
/ipsw/transport/unixconn.py:
--------------------------------------------------------------------------------
1 | import requests.adapters
2 | import socket
3 | import http.client as httplib
4 |
5 | from .basehttpadapter import BaseHTTPAdapter
6 | from .. import constants
7 |
8 | try:
9 | import requests.packages.urllib3 as urllib3
10 | except ImportError:
11 | import urllib3
12 |
13 |
14 | RecentlyUsedContainer = urllib3._collections.RecentlyUsedContainer
15 |
16 |
17 | class UnixHTTPConnection(httplib.HTTPConnection):
18 | def __init__(self, base_url, unix_socket, timeout=60):
19 | super().__init__("localhost", timeout=timeout)
20 | self.base_url = base_url
21 | self.unix_socket = unix_socket
22 | self.timeout = timeout
23 |
24 | def connect(self):
25 | sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
26 | sock.settimeout(self.timeout)
27 | sock.connect(self.unix_socket)
28 | self.sock = sock
29 |
30 | def putheader(self, header, *values):
31 | super().putheader(header, *values)
32 |
33 | def response_class(self, sock, *args, **kwargs):
34 | return httplib.HTTPResponse(sock, *args, **kwargs)
35 |
36 |
37 | class UnixHTTPConnectionPool(urllib3.connectionpool.HTTPConnectionPool):
38 | def __init__(self, base_url, socket_path, timeout=60, maxsize=10):
39 | super().__init__("localhost", timeout=timeout, maxsize=maxsize)
40 | self.base_url = base_url
41 | self.socket_path = socket_path
42 | self.timeout = timeout
43 |
44 | def _new_conn(self):
45 | return UnixHTTPConnection(self.base_url, self.socket_path, self.timeout)
46 |
47 |
48 | class UnixHTTPAdapter(BaseHTTPAdapter):
49 | __attrs__ = requests.adapters.HTTPAdapter.__attrs__ + ["pools", "socket_path", "timeout", "max_pool_size"]
50 |
51 | def __init__(
52 | self,
53 | socket_url,
54 | timeout=60,
55 | pool_connections=constants.DEFAULT_NUM_POOLS,
56 | max_pool_size=constants.DEFAULT_MAX_POOL_SIZE,
57 | ):
58 | socket_path = socket_url.replace("http+unix://", "")
59 | if not socket_path.startswith("/"):
60 | socket_path = "/" + socket_path
61 | self.socket_path = socket_path
62 | self.timeout = timeout
63 | self.max_pool_size = max_pool_size
64 | self.pools = RecentlyUsedContainer(pool_connections, dispose_func=lambda p: p.close())
65 | super().__init__()
66 |
67 | def get_connection(self, url, proxies=None):
68 | with self.pools.lock:
69 | pool = self.pools.get(url)
70 | if pool:
71 | return pool
72 |
73 | pool = UnixHTTPConnectionPool(url, self.socket_path, self.timeout, maxsize=self.max_pool_size)
74 | self.pools[url] = pool
75 |
76 | return pool
77 |
78 | def request_url(self, request, proxies):
79 | # The select_proxy utility in requests errors out when the provided URL
80 | # doesn't have a hostname, like is the case when using a UNIX socket.
81 | # Since proxies are an irrelevant notion in the case of UNIX sockets
82 | # anyway, we simply return the path URL directly.
83 | return request.path_url
84 |
--------------------------------------------------------------------------------
/ipsw/types/__init__.py:
--------------------------------------------------------------------------------
1 | # flake8: noqa
2 | from .daemon import CancellableStream
3 |
--------------------------------------------------------------------------------
/ipsw/types/daemon.py:
--------------------------------------------------------------------------------
1 | import socket
2 |
3 | try:
4 | import requests.packages.urllib3 as urllib3
5 | except ImportError:
6 | import urllib3
7 |
8 | from ..errors import IpswException
9 |
10 |
11 | class CancellableStream:
12 | """
13 | Stream wrapper for real-time events, logs, etc. from the server.
14 |
15 | Example:
16 | >>> events = client.events()
17 | >>> for event in events:
18 | ... print(event)
19 | >>> # and cancel from another thread
20 | >>> events.close()
21 | """
22 |
23 | def __init__(self, stream, response):
24 | self._stream = stream
25 | self._response = response
26 |
27 | def __iter__(self):
28 | return self
29 |
30 | def __next__(self):
31 | try:
32 | return next(self._stream)
33 | except urllib3.exceptions.ProtocolError:
34 | raise StopIteration
35 | except OSError:
36 | raise StopIteration
37 |
38 | next = __next__
39 |
40 | def close(self):
41 | """
42 | Closes the event streaming.
43 | """
44 |
45 | if not self._response.raw.closed:
46 | # find the underlying socket object
47 | # based on api.client._get_raw_response_socket
48 |
49 | sock_fp = self._response.raw._fp.fp
50 |
51 | if hasattr(sock_fp, "raw"):
52 | sock_raw = sock_fp.raw
53 |
54 | if hasattr(sock_raw, "sock"):
55 | sock = sock_raw.sock
56 |
57 | elif hasattr(sock_raw, "_sock"):
58 | sock = sock_raw._sock
59 |
60 | elif hasattr(sock_fp, "channel"):
61 | # We're working with a paramiko (SSH) channel, which doesn't
62 | # support cancelable streams with the current implementation
63 | raise IpswException("Cancellable streams not supported for the SSH protocol")
64 | else:
65 | sock = sock_fp._sock
66 |
67 | if hasattr(urllib3.contrib, "pyopenssl") and isinstance(sock, urllib3.contrib.pyopenssl.WrappedSocket):
68 | sock = sock.socket
69 |
70 | sock.shutdown(socket.SHUT_RDWR)
71 | sock.close()
72 |
--------------------------------------------------------------------------------
/ipsw/utils/__init__.py:
--------------------------------------------------------------------------------
1 | # flake8: noqa
2 | from .decorators import check_resource, minimum_version, update_headers
3 | from .utils import parse_host, kwargs_from_env, version_lt, version_gte, format_environment
4 |
--------------------------------------------------------------------------------
/ipsw/utils/config.py:
--------------------------------------------------------------------------------
1 | import yaml
2 | import logging
3 | import os
4 |
5 | from ..constants import IS_WINDOWS_PLATFORM
6 |
7 | IPSW_CONFIG_FILENAME = os.path.join(".config", "ipsw", "config.yml")
8 |
9 | log = logging.getLogger(__name__)
10 |
11 |
12 | def find_config_file(config_path=None):
13 | paths = list(
14 | filter(
15 | None,
16 | [
17 | config_path, # 1
18 | config_path_from_environment(), # 2
19 | os.path.join(home_dir(), IPSW_CONFIG_FILENAME), # 3
20 | ],
21 | )
22 | )
23 |
24 | log.debug(f"Trying paths: {repr(paths)}")
25 |
26 | for path in paths:
27 | if os.path.exists(path):
28 | log.debug(f"Found file at path: {path}")
29 | return path
30 |
31 | log.debug("No config file found")
32 |
33 | return None
34 |
35 |
36 | def config_path_from_environment():
37 | config_dir = os.environ.get("IPSW_CONFIG")
38 | if not config_dir:
39 | return None
40 | return os.path.join(config_dir, os.path.basename(IPSW_CONFIG_FILENAME))
41 |
42 |
43 | def home_dir():
44 | """
45 | Get the user's home directory, using the same logic as the ipsw
46 | client - use %USERPROFILE% on Windows, $HOME/getuid on POSIX.
47 | """
48 | if IS_WINDOWS_PLATFORM:
49 | return os.environ.get("USERPROFILE", "")
50 | else:
51 | return os.path.expanduser("~")
52 |
53 |
54 | def load_general_config(config_path=None):
55 | config_file = find_config_file(config_path)
56 |
57 | if not config_file:
58 | return {}
59 |
60 | try:
61 | with open(config_file) as f:
62 | return yaml.safe_load(f)
63 | except (OSError, ValueError) as e:
64 | log.debug(e)
65 |
66 | log.debug("All parsing attempts failed - returning empty config")
67 | return {}
68 |
--------------------------------------------------------------------------------
/ipsw/utils/decorators.py:
--------------------------------------------------------------------------------
1 | import functools
2 |
3 | from .. import errors
4 | from . import utils
5 |
6 |
7 | def check_resource(resource_name):
8 | def decorator(f):
9 | @functools.wraps(f)
10 | def wrapped(self, resource_id=None, *args, **kwargs):
11 | if resource_id is None and kwargs.get(resource_name):
12 | resource_id = kwargs.pop(resource_name)
13 | if isinstance(resource_id, dict):
14 | resource_id = resource_id.get("Id", resource_id.get("ID"))
15 | if not resource_id:
16 | raise errors.NullResource("Resource ID was not provided")
17 | return f(self, resource_id, *args, **kwargs)
18 |
19 | return wrapped
20 |
21 | return decorator
22 |
23 |
24 | def minimum_version(version):
25 | def decorator(f):
26 | @functools.wraps(f)
27 | def wrapper(self, *args, **kwargs):
28 | if utils.version_lt(self._version, version):
29 | raise errors.InvalidVersion("{} is not available for version < {}".format(f.__name__, version))
30 | return f(self, *args, **kwargs)
31 |
32 | return wrapper
33 |
34 | return decorator
35 |
36 |
37 | def update_headers(f):
38 | def inner(self, *args, **kwargs):
39 | if "HttpHeaders" in self._general_configs:
40 | if not kwargs.get("headers"):
41 | kwargs["headers"] = self._general_configs["HttpHeaders"]
42 | else:
43 | kwargs["headers"].update(self._general_configs["HttpHeaders"])
44 | return f(self, *args, **kwargs)
45 |
46 | return inner
47 |
--------------------------------------------------------------------------------
/ipsw/utils/json_stream.py:
--------------------------------------------------------------------------------
1 | import json
2 | import json.decoder
3 |
4 | from ..errors import StreamParseError
5 |
6 |
7 | json_decoder = json.JSONDecoder()
8 |
9 |
10 | def stream_as_text(stream):
11 | """
12 | Given a stream of bytes or text, if any of the items in the stream
13 | are bytes convert them to text.
14 | This function can be removed once we return text streams
15 | instead of byte streams.
16 | """
17 | for data in stream:
18 | if not isinstance(data, str):
19 | data = data.decode("utf-8", "replace")
20 | yield data
21 |
22 |
23 | def json_splitter(buffer):
24 | """Attempt to parse a json object from a buffer. If there is at least one
25 | object, return it and the rest of the buffer, otherwise return None.
26 | """
27 | buffer = buffer.strip()
28 | try:
29 | obj, index = json_decoder.raw_decode(buffer)
30 | rest = buffer[json.decoder.WHITESPACE.match(buffer, index).end() :]
31 | return obj, rest
32 | except ValueError:
33 | return None
34 |
35 |
36 | def json_stream(stream):
37 | """Given a stream of text, return a stream of json objects.
38 | This handles streams which are inconsistently buffered (some entries may
39 | be newline delimited, and others are not).
40 | """
41 | return split_buffer(stream, json_splitter, json_decoder.decode)
42 |
43 |
44 | def line_splitter(buffer, separator="\n"):
45 | index = buffer.find(str(separator))
46 | if index == -1:
47 | return None
48 | return buffer[: index + 1], buffer[index + 1 :]
49 |
50 |
51 | def split_buffer(stream, splitter=None, decoder=lambda a: a):
52 | """Given a generator which yields strings and a splitter function,
53 | joins all input, splits on the separator and yields each chunk.
54 | Unlike string.split(), each chunk includes the trailing
55 | separator, except for the last one if none was found on the end
56 | of the input.
57 | """
58 | splitter = splitter or line_splitter
59 | buffered = ""
60 |
61 | for data in stream_as_text(stream):
62 | buffered += data
63 | while True:
64 | buffer_split = splitter(buffered)
65 | if buffer_split is None:
66 | break
67 |
68 | item, buffered = buffer_split
69 | yield item
70 |
71 | if buffered:
72 | try:
73 | yield decoder(buffered)
74 | except Exception as e:
75 | raise StreamParseError(e)
76 |
--------------------------------------------------------------------------------
/ipsw/utils/proxy.py:
--------------------------------------------------------------------------------
1 | from .utils import format_environment
2 |
3 |
4 | class ProxyConfig(dict):
5 | """
6 | Hold the client's proxy configuration
7 | """
8 |
9 | @property
10 | def http(self):
11 | return self.get("http")
12 |
13 | @property
14 | def https(self):
15 | return self.get("https")
16 |
17 | @property
18 | def ftp(self):
19 | return self.get("ftp")
20 |
21 | @property
22 | def no_proxy(self):
23 | return self.get("no_proxy")
24 |
25 | @staticmethod
26 | def from_dict(config):
27 | """
28 | Instantiate a new ProxyConfig from a dictionary that represents a
29 | client configuration, as described in `the documentation`_.
30 | """
31 | return ProxyConfig(
32 | http=config.get("httpProxy"),
33 | https=config.get("httpsProxy"),
34 | ftp=config.get("ftpProxy"),
35 | no_proxy=config.get("noProxy"),
36 | )
37 |
38 | def get_environment(self):
39 | """
40 | Return a dictionary representing the environment variables used to
41 | set the proxy settings.
42 | """
43 | env = {}
44 | if self.http:
45 | env["http_proxy"] = env["HTTP_PROXY"] = self.http
46 | if self.https:
47 | env["https_proxy"] = env["HTTPS_PROXY"] = self.https
48 | if self.ftp:
49 | env["ftp_proxy"] = env["FTP_PROXY"] = self.ftp
50 | if self.no_proxy:
51 | env["no_proxy"] = env["NO_PROXY"] = self.no_proxy
52 | return env
53 |
54 | def inject_proxy_environment(self, environment):
55 | """
56 | Given a list of strings representing environment variables, prepend the
57 | environment variables corresponding to the proxy settings.
58 | """
59 | if not self:
60 | return environment
61 |
62 | proxy_env = format_environment(self.get_environment())
63 | if not environment:
64 | return proxy_env
65 | # It is important to prepend our variables, because we want the
66 | # variables defined in "environment" to take precedence.
67 | return proxy_env + environment
68 |
69 | def __str__(self):
70 | return "ProxyConfig(http={}, https={}, ftp={}, no_proxy={})".format(
71 | self.http, self.https, self.ftp, self.no_proxy
72 | )
73 |
--------------------------------------------------------------------------------
/ipsw/utils/socket.py:
--------------------------------------------------------------------------------
1 | import errno
2 | import os
3 | import select
4 | import socket as pysocket
5 | import struct
6 |
7 | try:
8 | from ..transport import NpipeSocket
9 | except ImportError:
10 | NpipeSocket = type(None)
11 |
12 |
13 | STDOUT = 1
14 | STDERR = 2
15 |
16 |
17 | class SocketError(Exception):
18 | pass
19 |
20 |
21 | # NpipeSockets have their own error types
22 | # pywintypes.error: (109, 'ReadFile', 'The pipe has been ended.')
23 | NPIPE_ENDED = 109
24 |
25 |
26 | def read(socket, n=4096):
27 | """
28 | Reads at most n bytes from socket
29 | """
30 |
31 | recoverable_errors = (errno.EINTR, errno.EDEADLK, errno.EWOULDBLOCK)
32 |
33 | if not isinstance(socket, NpipeSocket):
34 | select.select([socket], [], [])
35 |
36 | try:
37 | if hasattr(socket, "recv"):
38 | return socket.recv(n)
39 | if isinstance(socket, getattr(pysocket, "SocketIO")):
40 | return socket.read(n)
41 | return os.read(socket.fileno(), n)
42 | except OSError as e:
43 | if e.errno not in recoverable_errors:
44 | raise
45 | except Exception as e:
46 | is_pipe_ended = isinstance(socket, NpipeSocket) and len(e.args) > 0 and e.args[0] == NPIPE_ENDED
47 | if is_pipe_ended:
48 | # npipes don't support duplex sockets, so we interpret
49 | # a PIPE_ENDED error as a close operation (0-length read).
50 | return ""
51 | raise
52 |
53 |
54 | def read_exactly(socket, n):
55 | """
56 | Reads exactly n bytes from socket
57 | Raises SocketError if there isn't enough data
58 | """
59 | data = bytes()
60 | while len(data) < n:
61 | next_data = read(socket, n - len(data))
62 | if not next_data:
63 | raise SocketError("Unexpected EOF")
64 | data += next_data
65 | return data
66 |
67 |
68 | def next_frame_header(socket):
69 | """
70 | Returns the stream and size of the next frame of data waiting to be read
71 | from socket, according to the protocol defined here:
72 | """
73 | try:
74 | data = read_exactly(socket, 8)
75 | except SocketError:
76 | return (-1, -1)
77 |
78 | stream, actual = struct.unpack(">BxxxL", data)
79 | return (stream, actual)
80 |
81 |
82 | def frames_iter(socket, tty):
83 | """
84 | Return a generator of frames read from socket. A frame is a tuple where
85 | the first item is the stream number and the second item is a chunk of data.
86 |
87 | If the tty setting is enabled, the streams are multiplexed into the stdout
88 | stream.
89 | """
90 | if tty:
91 | return ((STDOUT, frame) for frame in frames_iter_tty(socket))
92 | else:
93 | return frames_iter_no_tty(socket)
94 |
95 |
96 | def frames_iter_no_tty(socket):
97 | """
98 | Returns a generator of data read from the socket when the tty setting is
99 | not enabled.
100 | """
101 | while True:
102 | (stream, n) = next_frame_header(socket)
103 | if n < 0:
104 | break
105 | while n > 0:
106 | result = read(socket, n)
107 | if result is None:
108 | continue
109 | data_length = len(result)
110 | if data_length == 0:
111 | # We have reached EOF
112 | return
113 | n -= data_length
114 | yield (stream, result)
115 |
116 |
117 | def frames_iter_tty(socket):
118 | """
119 | Return a generator of data read from the socket when the tty setting is
120 | enabled.
121 | """
122 | while True:
123 | result = read(socket)
124 | if len(result) == 0:
125 | # We have reached EOF
126 | return
127 | yield result
128 |
129 |
130 | def consume_socket_output(frames, demux=False):
131 | """
132 | Iterate through frames read from the socket and return the result.
133 |
134 | Args:
135 |
136 | demux (bool):
137 | If False, stdout and stderr are multiplexed, and the result is the
138 | concatenation of all the frames. If True, the streams are
139 | demultiplexed, and the result is a 2-tuple where each item is the
140 | concatenation of frames belonging to the same stream.
141 | """
142 | if demux is False:
143 | # If the streams are multiplexed, the generator returns strings, that
144 | # we just need to concatenate.
145 | return bytes().join(frames)
146 |
147 | # If the streams are demultiplexed, the generator yields tuples
148 | # (stdout, stderr)
149 | out = [None, None]
150 | for frame in frames:
151 | # It is guaranteed that for each frame, one and only one stream
152 | # is not None.
153 | assert frame != (None, None)
154 | if frame[0] is not None:
155 | if out[0] is None:
156 | out[0] = frame[0]
157 | else:
158 | out[0] += frame[0]
159 | else:
160 | if out[1] is None:
161 | out[1] = frame[1]
162 | else:
163 | out[1] += frame[1]
164 | return tuple(out)
165 |
166 |
167 | def demux_adaptor(stream_id, data):
168 | """
169 | Utility to demultiplex stdout and stderr when reading frames from the
170 | socket.
171 | """
172 | if stream_id == STDOUT:
173 | return (data, None)
174 | elif stream_id == STDERR:
175 | return (None, data)
176 | else:
177 | raise ValueError(f"{stream_id} is not a valid stream")
178 |
--------------------------------------------------------------------------------
/ipsw/utils/utils.py:
--------------------------------------------------------------------------------
1 | import collections
2 | import os
3 | import string
4 |
5 | from .. import errors
6 | from ..constants import DEFAULT_HTTP_HOST
7 | from ..constants import DEFAULT_UNIX_SOCKET
8 | from ..constants import DEFAULT_NPIPE
9 | from ..constants import BYTE_UNITS
10 |
11 | from urllib.parse import urlparse, urlunparse
12 | from packaging.version import Version
13 |
14 | URLComponents = collections.namedtuple(
15 | "URLComponents",
16 | "scheme netloc url params query fragment",
17 | )
18 |
19 |
20 | def parse_host(addr, is_win32=False, tls=False):
21 | # Sensible defaults
22 | if not addr and is_win32:
23 | return DEFAULT_NPIPE
24 | if not addr or addr.strip() == "unix://":
25 | return DEFAULT_UNIX_SOCKET
26 |
27 | addr = addr.strip()
28 |
29 | parsed_url = urlparse(addr)
30 | proto = parsed_url.scheme
31 | if not proto or any([x not in string.ascii_letters + "+" for x in proto]):
32 | # https://bugs.python.org/issue754016
33 | parsed_url = urlparse("//" + addr, "tcp")
34 | proto = "tcp"
35 |
36 | if proto == "fd":
37 | raise errors.IpswException("fd protocol is not implemented")
38 |
39 | # These protos are valid aliases for our library but not for the
40 | # official spec
41 | if proto == "http" or proto == "https":
42 | tls = proto == "https"
43 | proto = "tcp"
44 | elif proto == "http+unix":
45 | proto = "unix"
46 |
47 | if proto not in ("tcp", "unix", "npipe", "ssh"):
48 | raise errors.IpswException(f"Invalid bind address protocol: {addr}")
49 |
50 | if proto == "tcp" and not parsed_url.netloc:
51 | # "tcp://" is exceptionally disallowed by convention;
52 | # omitting a hostname for other protocols is fine
53 | raise errors.IpswException(f"Invalid bind address format: {addr}")
54 |
55 | if any([parsed_url.params, parsed_url.query, parsed_url.fragment, parsed_url.password]):
56 | raise errors.IpswException(f"Invalid bind address format: {addr}")
57 |
58 | if parsed_url.path and proto == "ssh":
59 | raise errors.IpswException("Invalid bind address format: no path allowed for this protocol:" " {}".format(addr))
60 | else:
61 | path = parsed_url.path
62 | if proto == "unix" and parsed_url.hostname is not None:
63 | # For legacy reasons, we consider unix://path
64 | # to be valid and equivalent to unix:///path
65 | path = "/".join((parsed_url.hostname, path))
66 |
67 | netloc = parsed_url.netloc
68 | if proto in ("tcp", "ssh"):
69 | port = parsed_url.port or 0
70 | if port <= 0:
71 | if proto != "ssh":
72 | raise errors.IpswException("Invalid bind address format: port is required:" " {}".format(addr))
73 | port = 22
74 | netloc = f"{parsed_url.netloc}:{port}"
75 |
76 | if not parsed_url.hostname:
77 | netloc = f"{DEFAULT_HTTP_HOST}:{port}"
78 |
79 | # Rewrite schemes to fit library internals (requests adapters)
80 | if proto == "tcp":
81 | proto = "http{}".format("s" if tls else "")
82 | elif proto == "unix":
83 | proto = "http+unix"
84 |
85 | if proto in ("http+unix", "npipe"):
86 | return f"{proto}://{path}".rstrip("/")
87 |
88 | return urlunparse(
89 | URLComponents(
90 | scheme=proto,
91 | netloc=netloc,
92 | url=path,
93 | params="",
94 | query="",
95 | fragment="",
96 | )
97 | ).rstrip("/")
98 |
99 |
100 | def kwargs_from_env(ssl_version=None, assert_hostname=None, environment=None):
101 | if not environment:
102 | environment = os.environ
103 | host = environment.get("IPSW_HOST")
104 |
105 | # empty string for cert path is the same as unset.
106 | cert_path = environment.get("IPSW_CERT_PATH") or None
107 |
108 | # empty string for tls verify counts as "false".
109 | # Any value or 'unset' counts as true.
110 | tls_verify = environment.get("IPSW_TLS_VERIFY")
111 | if tls_verify == "":
112 | tls_verify = False
113 | else:
114 | tls_verify = tls_verify is not None
115 | enable_tls = cert_path or tls_verify
116 |
117 | params = {}
118 |
119 | if host:
120 | params["base_url"] = host
121 |
122 | if not enable_tls:
123 | return params
124 |
125 | if not cert_path:
126 | cert_path = os.path.join(os.path.expanduser("~"), ".config", "ipsw")
127 |
128 | if not tls_verify and assert_hostname is None:
129 | # assert_hostname is a subset of TLS verification,
130 | # so if it's not set already then set it to false.
131 | assert_hostname = False
132 |
133 | # params['tls'] = TLSConfig(
134 | # client_cert=(os.path.join(cert_path, 'cert.pem'),
135 | # os.path.join(cert_path, 'key.pem')),
136 | # ca_cert=os.path.join(cert_path, 'ca.pem'),
137 | # verify=tls_verify,
138 | # ssl_version=ssl_version,
139 | # assert_hostname=assert_hostname,
140 | # )
141 |
142 | return params
143 |
144 |
145 | def format_environment(environment):
146 | def format_env(key, value):
147 | if value is None:
148 | return key
149 | if isinstance(value, bytes):
150 | value = value.decode("utf-8")
151 |
152 | return f"{key}={value}"
153 |
154 | return [format_env(*var) for var in iter(environment.items())]
155 |
156 |
157 | def compare_version(v1, v2):
158 | """Compare docker versions
159 |
160 | >>> v1 = '1.9'
161 | >>> v2 = '1.10'
162 | >>> compare_version(v1, v2)
163 | 1
164 | >>> compare_version(v2, v1)
165 | -1
166 | >>> compare_version(v2, v2)
167 | 0
168 | """
169 | s1 = Version(v1)
170 | s2 = Version(v2)
171 | if s1 == s2:
172 | return 0
173 | elif s1 > s2:
174 | return -1
175 | else:
176 | return 1
177 |
178 |
179 | def version_lt(v1, v2):
180 | return compare_version(v1, v2) > 0
181 |
182 |
183 | def version_gte(v1, v2):
184 | return not version_lt(v1, v2)
185 |
--------------------------------------------------------------------------------
/ipsw/version.py:
--------------------------------------------------------------------------------
1 | try:
2 | from ._version import __version__
3 | except ImportError:
4 | try:
5 | # importlib.metadata available in Python 3.8+, the fallback (0.0.0)
6 | # is fine because release builds use _version (above) rather than
7 | # this code path, so it only impacts developing w/ 3.7
8 | from importlib.metadata import version, PackageNotFoundError
9 |
10 | try:
11 | __version__ = version("ipsw")
12 | except PackageNotFoundError:
13 | __version__ = "0.0.0"
14 | except ImportError:
15 | __version__ = "0.0.0"
16 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | requires = ["setuptools>=45", "setuptools_scm[toml]>=6.2"]
3 |
4 | [tool.setuptools_scm]
5 | write_to = 'ipsw/_version.py'
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | packaging==23.1
2 | paramiko==3.1.0
3 | pywin32==306; sys_platform == 'win32'
4 | requests==2.30.0
5 | urllib3==2.0.2
6 | websocket-client==1.5.1
7 | pyyaml==6.0
--------------------------------------------------------------------------------
/scripts /release.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | #
3 | # Create the official release
4 | #
5 |
6 | VERSION=$1
7 | REPO=blacktop/ipsw-py
8 | GITHUB_REPO=git@github.com:$REPO
9 |
10 | if [ -z $VERSION ]; then
11 | echo "Usage: $0 VERSION [upload]"
12 | exit 1
13 | fi
14 |
15 | echo "##> Removing stale build files and other untracked files"
16 | git clean -x -d -i
17 | test -z "$(git clean -x -d -n)" || exit 1
18 |
19 | echo "##> Tagging the release as $VERSION"
20 | git tag $VERSION
21 | if [[ $? != 0 ]]; then
22 | head_commit=$(git show --pretty=format:%H HEAD)
23 | tag_commit=$(git show --pretty=format:%H $VERSION)
24 | if [[ $head_commit != $tag_commit ]]; then
25 | echo "ERROR: tag already exists, but isn't the current HEAD"
26 | exit 1
27 | fi
28 | fi
29 | if [[ $2 == 'upload' ]]; then
30 | echo "##> Pushing tag to github"
31 | git push $GITHUB_REPO $VERSION || exit 1
32 | fi
33 |
34 |
35 | echo "##> sdist & wheel"
36 | python setup.py sdist bdist_wheel
37 |
38 | if [[ $2 == 'upload' ]]; then
39 | echo '##> Uploading sdist to pypi'
40 | twine upload dist/ipsw-$VERSION*
41 | fi
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [metadata]
2 | description_file = README.rst
3 | license = MIT
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import codecs
4 | import os
5 |
6 | from setuptools import find_packages
7 | from setuptools import setup
8 |
9 | ROOT_DIR = os.path.dirname(__file__)
10 | SOURCE_DIR = os.path.join(ROOT_DIR)
11 |
12 | requirements = [
13 | "packaging >= 23.0",
14 | "requests >= 2.28.2",
15 | "urllib3 >= 1.26.15",
16 | "websocket-client >= 1.5.1",
17 | "pyyaml >= 6.0",
18 | ]
19 |
20 | extras_require = {
21 | # win32 APIs if on Windows (required for npipe support)
22 | ':sys_platform == "win32"': "pywin32>=306",
23 | # This is now a no-op, as similarly the requests[security] extra is
24 | # a no-op as of requests 2.26.0, this is always available/by default now
25 | # see https://github.com/psf/requests/pull/5867
26 | "tls": [],
27 | # Only required when connecting using the ssh:// protocol
28 | "ssh": ["paramiko>=3.1.0"],
29 | }
30 |
31 | with open("./test-requirements.txt") as test_reqs_txt:
32 | test_requirements = [line for line in test_reqs_txt]
33 |
34 |
35 | long_description = ""
36 | with codecs.open("./README.md", encoding="utf-8") as readme_md:
37 | long_description = readme_md.read()
38 |
39 | setup(
40 | name="ipsw",
41 | use_scm_version={"write_to": "ipsw/_version.py"},
42 | description="A Python library for the ipsw Engine API.",
43 | long_description=long_description,
44 | long_description_content_type="text/markdown",
45 | url="https://github.com/blacktop/ipsw-py",
46 | project_urls={
47 | "Documentation": "https://ipsw-py.readthedocs.io",
48 | "Changelog": "https://ipsw-py.readthedocs.io/en/stable/change-log.html", # noqa: E501
49 | "Source": "https://github.com/blacktop/ipsw-py",
50 | "Tracker": "https://github.com/blacktop/ipsw-py/issues",
51 | },
52 | packages=find_packages(exclude=["tests.*", "tests"]),
53 | setup_requires=["setuptools_scm"],
54 | install_requires=requirements,
55 | tests_require=test_requirements,
56 | extras_require=extras_require,
57 | python_requires=">=3.7",
58 | zip_safe=False,
59 | test_suite="tests",
60 | classifiers=[
61 | "Development Status :: 5 - Production/Stable",
62 | "Environment :: Other Environment",
63 | "Intended Audience :: Developers",
64 | "Operating System :: OS Independent",
65 | "Programming Language :: Python",
66 | "Programming Language :: Python :: 3",
67 | "Programming Language :: Python :: 3.7",
68 | "Programming Language :: Python :: 3.8",
69 | "Programming Language :: Python :: 3.9",
70 | "Programming Language :: Python :: 3.10",
71 | "Programming Language :: Python :: 3.11",
72 | "Topic :: Software Development",
73 | "Topic :: Utilities",
74 | "License :: OSI Approved :: Apache Software License",
75 | ],
76 | maintainer="Blacktop",
77 | )
78 |
--------------------------------------------------------------------------------
/test-requirements.txt:
--------------------------------------------------------------------------------
1 | setuptools==67.7.2
2 | coverage==7.2.5
3 | flake8==6.0.0
4 | pytest==7.3.1
5 | pytest-cov==4.0.0
6 | pytest-timeout==2.1.0
--------------------------------------------------------------------------------
/tox.ini:
--------------------------------------------------------------------------------
1 | [tox]
2 | envlist = py{37,38,39,310,311}, flake8
3 | skipsdist=True
4 |
5 | [testenv]
6 | usedevelop=True
7 | commands =
8 | py.test -v --cov=ipsw {posargs:tests/unit}
9 | deps =
10 | -r{toxinidir}/test-requirements.txt
11 | -r{toxinidir}/requirements.txt
12 |
13 | [testenv:flake8]
14 | commands = flake8 ipsw tests setup.py
15 | deps =
16 | -r{toxinidir}/test-requirements.txt
--------------------------------------------------------------------------------