├── .flake8 ├── .gitignore ├── LICENSE ├── README.md ├── docs ├── .gitignore ├── Makefile ├── make.bat └── source │ ├── PyQt5 │ ├── QtCore.py │ ├── QtWidgets.py │ └── __init__.py │ ├── conf.py │ ├── getting-started.rst │ ├── ida_diskio.py │ ├── ida_kernwin.py │ ├── ida_loader.py │ ├── idaapi.py │ ├── index.rst │ └── pkg.rst ├── info.json ├── installer.py ├── pkg ├── __init__.py ├── actions │ ├── __init__.py │ └── packagemanager.py ├── commands.py ├── compat.py ├── config.py ├── downloader.py ├── env.py ├── internal_api │ ├── __init__.py │ ├── decoder.py │ ├── kaitai │ │ ├── __init__.py │ │ ├── mach_o.ksy │ │ ├── mach_o.py │ │ ├── microsoft_pe.ksy │ │ └── microsoft_pe.py │ ├── mac.py │ └── win.py ├── logger.py ├── main.py ├── package.py ├── process.py ├── repo.py ├── vendor │ ├── __init__.py │ ├── kaitaistruct.py │ └── semantic_version │ │ ├── __init__.py │ │ ├── base.py │ │ └── django_fields.py └── virtualenv_utils.py └── uninstaller.py /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | exclude=kaitai,installer_inliner.py,semantic_version,docs 3 | max-line-length=160 -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | ## Ignore Visual Studio temporary files, build results, and 2 | ## files generated by popular Visual Studio add-ons. 3 | ## 4 | ## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore 5 | 6 | # User-specific files 7 | *.suo 8 | *.user 9 | *.userosscache 10 | *.sln.docstates 11 | 12 | # User-specific files (MonoDevelop/Xamarin Studio) 13 | *.userprefs 14 | 15 | # Build results 16 | [Dd]ebug/ 17 | [Dd]ebugPublic/ 18 | [Rr]elease/ 19 | [Rr]eleases/ 20 | x64/ 21 | x86/ 22 | bld/ 23 | [Bb]in/ 24 | [Oo]bj/ 25 | [Ll]og/ 26 | 27 | # Visual Studio 2015 cache/options directory 28 | .vs/ 29 | # Uncomment if you have tasks that create the project's static files in wwwroot 30 | #wwwroot/ 31 | 32 | # MSTest test Results 33 | [Tt]est[Rr]esult*/ 34 | [Bb]uild[Ll]og.* 35 | 36 | # NUNIT 37 | *.VisualState.xml 38 | TestResult.xml 39 | 40 | # Build Results of an ATL Project 41 | [Dd]ebugPS/ 42 | [Rr]eleasePS/ 43 | dlldata.c 44 | 45 | # .NET Core 46 | project.lock.json 47 | project.fragment.lock.json 48 | artifacts/ 49 | **/Properties/launchSettings.json 50 | 51 | *_i.c 52 | *_p.c 53 | *_i.h 54 | *.ilk 55 | *.meta 56 | *.obj 57 | *.pch 58 | *.pdb 59 | *.pgc 60 | *.pgd 61 | *.rsp 62 | *.sbr 63 | *.tlb 64 | *.tli 65 | *.tlh 66 | *.tmp 67 | *.tmp_proj 68 | *.log 69 | *.vspscc 70 | *.vssscc 71 | .builds 72 | *.pidb 73 | *.svclog 74 | *.scc 75 | 76 | # Chutzpah Test files 77 | _Chutzpah* 78 | 79 | # Visual C++ cache files 80 | ipch/ 81 | *.aps 82 | *.ncb 83 | *.opendb 84 | *.opensdf 85 | *.sdf 86 | *.cachefile 87 | *.VC.db 88 | *.VC.VC.opendb 89 | 90 | # Visual Studio profiler 91 | *.psess 92 | *.vsp 93 | *.vspx 94 | *.sap 95 | 96 | # TFS 2012 Local Workspace 97 | $tf/ 98 | 99 | # Guidance Automation Toolkit 100 | *.gpState 101 | 102 | # ReSharper is a .NET coding add-in 103 | _ReSharper*/ 104 | *.[Rr]e[Ss]harper 105 | *.DotSettings.user 106 | 107 | # JustCode is a .NET coding add-in 108 | .JustCode 109 | 110 | # TeamCity is a build add-in 111 | _TeamCity* 112 | 113 | # DotCover is a Code Coverage Tool 114 | *.dotCover 115 | 116 | # Visual Studio code coverage results 117 | *.coverage 118 | *.coveragexml 119 | 120 | # NCrunch 121 | _NCrunch_* 122 | .*crunch*.local.xml 123 | nCrunchTemp_* 124 | 125 | # MightyMoose 126 | *.mm.* 127 | AutoTest.Net/ 128 | 129 | # Web workbench (sass) 130 | .sass-cache/ 131 | 132 | # Installshield output folder 133 | [Ee]xpress/ 134 | 135 | # DocProject is a documentation generator add-in 136 | DocProject/buildhelp/ 137 | DocProject/Help/*.HxT 138 | DocProject/Help/*.HxC 139 | DocProject/Help/*.hhc 140 | DocProject/Help/*.hhk 141 | DocProject/Help/*.hhp 142 | DocProject/Help/Html2 143 | DocProject/Help/html 144 | 145 | # Click-Once directory 146 | publish/ 147 | 148 | # Publish Web Output 149 | *.[Pp]ublish.xml 150 | *.azurePubxml 151 | # TODO: Comment the next line if you want to checkin your web deploy settings 152 | # but database connection strings (with potential passwords) will be unencrypted 153 | *.pubxml 154 | *.publishproj 155 | 156 | # Microsoft Azure Web App publish settings. Comment the next line if you want to 157 | # checkin your Azure Web App publish settings, but sensitive information contained 158 | # in these scripts will be unencrypted 159 | PublishScripts/ 160 | 161 | # NuGet Packages 162 | *.nupkg 163 | # The packages folder can be ignored because of Package Restore 164 | **/packages/* 165 | # except build/, which is used as an MSBuild target. 166 | !**/packages/build/ 167 | # Uncomment if necessary however generally it will be regenerated when needed 168 | #!**/packages/repositories.config 169 | # NuGet v3's project.json files produces more ignoreable files 170 | *.nuget.props 171 | *.nuget.targets 172 | 173 | # Microsoft Azure Build Output 174 | csx/ 175 | *.build.csdef 176 | 177 | # Microsoft Azure Emulator 178 | ecf/ 179 | rcf/ 180 | 181 | # Windows Store app package directories and files 182 | AppPackages/ 183 | BundleArtifacts/ 184 | Package.StoreAssociation.xml 185 | _pkginfo.txt 186 | 187 | # Visual Studio cache files 188 | # files ending in .cache can be ignored 189 | *.[Cc]ache 190 | # but keep track of directories ending in .cache 191 | !*.[Cc]ache/ 192 | 193 | # Others 194 | ClientBin/ 195 | ~$* 196 | *~ 197 | *.dbmdl 198 | *.dbproj.schemaview 199 | *.jfm 200 | *.pfx 201 | *.publishsettings 202 | node_modules/ 203 | orleans.codegen.cs 204 | 205 | # Since there are multiple workflows, uncomment next line to ignore bower_components 206 | # (https://github.com/github/gitignore/pull/1529#issuecomment-104372622) 207 | #bower_components/ 208 | 209 | # RIA/Silverlight projects 210 | Generated_Code/ 211 | 212 | # Backup & report files from converting an old project file 213 | # to a newer Visual Studio version. Backup files are not needed, 214 | # because we have git ;-) 215 | _UpgradeReport_Files/ 216 | Backup*/ 217 | UpgradeLog*.XML 218 | UpgradeLog*.htm 219 | 220 | # SQL Server files 221 | *.mdf 222 | *.ldf 223 | 224 | # Business Intelligence projects 225 | *.rdl.data 226 | *.bim.layout 227 | *.bim_*.settings 228 | 229 | # Microsoft Fakes 230 | FakesAssemblies/ 231 | 232 | # GhostDoc plugin setting file 233 | *.GhostDoc.xml 234 | 235 | # Node.js Tools for Visual Studio 236 | .ntvs_analysis.dat 237 | 238 | # Visual Studio 6 build log 239 | *.plg 240 | 241 | # Visual Studio 6 workspace options file 242 | *.opt 243 | 244 | # Visual Studio 6 auto-generated workspace file (contains which files were open etc.) 245 | *.vbw 246 | 247 | # Visual Studio LightSwitch build output 248 | **/*.HTMLClient/GeneratedArtifacts 249 | **/*.DesktopClient/GeneratedArtifacts 250 | **/*.DesktopClient/ModelManifest.xml 251 | **/*.Server/GeneratedArtifacts 252 | **/*.Server/ModelManifest.xml 253 | _Pvt_Extensions 254 | 255 | # Paket dependency manager 256 | .paket/paket.exe 257 | paket-files/ 258 | 259 | # FAKE - F# Make 260 | .fake/ 261 | 262 | # JetBrains Rider 263 | .idea/ 264 | *.sln.iml 265 | 266 | # CodeRush 267 | .cr/ 268 | 269 | # Python Tools for Visual Studio (PTVS) 270 | __pycache__/ 271 | *.pyc 272 | 273 | # Cake - Uncomment if you are using it 274 | # tools/ 275 | 276 | .vscode -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 Jinmo 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Package manager for IDA Pro 2 | 3 | > WARNING: This project is still between alpha and beta state. Feel free to report bugs if this is not working! 4 | 5 | ## How to install 6 | 7 | Execute the script below in IDAPython console (minified [`installer.py`](https://github.com/Jinmo/idapkg/raw/master/installer.py).) 8 | 9 | ``` 10 | import zipfile,tempfile,sys,os,threading,shutil,importlib 11 | def install():P=os.path;tag='v0.1.4';n=tempfile.NamedTemporaryFile(delete=False,suffix='.zip');n.close();print('Started downloading idapkg...');importlib.import_module('urllib.request' if sys.version_info.major==3 else 'urllib').urlretrieve('https://github.com/Jinmo/idapkg/archive/%s.zip'%tag,n.name);f=open(n.name,'rb+');f.seek(0,os.SEEK_END);f.truncate(f.tell()-0x28);f.close();z=zipfile.ZipFile(n.name);base=z.namelist()[0];sys.path.append(P.join(n.name,base));from pkg.config import g;import pkg.main as main;packages_path=g['path']['packages'];z.extractall(packages_path);z.close();dest=P.join(packages_path,'idapkg');P.isdir(dest)and shutil.rmtree(dest);os.rename(P.join(packages_path,base),dest);main.update_pythonrc();main.init_environment(False);print('Installation success! Please restart IDA to use idapkg.');os.unlink(n.name) 12 | threading.Thread(target=install).start() 13 | ``` 14 | 15 | Then you can access related actions via command palette (Ctrl+Shift+P on windows/mac/linux, or Command+Shift+P on mac) after restarting IDA Pro. 16 | 17 | ### Testing latest changes 18 | 19 | To test the master branch, you can replace `tag='v...'` into `tag='master'`. 20 | 21 | ## What file is created 22 | 23 | `~(Your home directory)/idapkg`, and some lines in idapythonrc.py will be created. 24 | 25 | ``` 26 | idapkg/ 27 | packages/ 28 | python/ 29 | config.json 30 | ``` 31 | 32 | ### packages/ 33 | 34 | When a package is installed, `packages/` is created and further added to `IDAUSR` variable. This enables following folders to be loaded by IDA Pro. 35 | 36 | ``` 37 | 38 | plugins/ 39 | procs/ 40 | loaders/ 41 | til/ 42 | sig/ 43 | ids/ 44 | ``` 45 | 46 | ### python/ - virtualenv 47 | 48 | To manage PIP packages easily, this creates a virtualenv and creates `pip`, `easy_install` and other virtalenv-related files and activates the environment. 49 | 50 | TL;DR If you run `pip install`, they are installed into `python/lib/*` (`Lib` on windows, all same.) 51 | 52 | ### config.json 53 | 54 | In fact, all paths above are configurable! 55 | 56 | ```json 57 | { 58 | "path": { 59 | "virtualenv": "...\\idapkg\\python", 60 | "packages": "...\\idapkg\\packages" 61 | }, 62 | "repos": [ 63 | "https://api.idapkg.com", 64 | "github:Jinmo/idapkg-repo/master" 65 | ] 66 | } 67 | ``` 68 | 69 | And you can use your private repo for fetching packages. The api server will be uploaded soon! 70 | 71 | ## Writing a package 72 | 73 | See [Writing a package (link)](https://idapkg.com/getting-started). 74 | 75 | ## TODO 76 | 77 | Currently finding way to reliably and generally [update `IDAUSR` variable on all platforms](https://github.com/Jinmo/idapkg/blob/master/pkg/internal_api/win.py). Currently only supporting Windows and Mac OS X. -------------------------------------------------------------------------------- /docs/.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | *.egg 3 | *.so 4 | *.swp 5 | 6 | .dir-locals.el 7 | .cache/ 8 | .idea 9 | .mypy_cache/ 10 | .pytest_cache/ 11 | .ropeproject/ 12 | TAGS 13 | .tags 14 | .tox/ 15 | .tx/ 16 | .venv/ 17 | .coverage 18 | htmlcov 19 | .DS_Store 20 | sphinx/pycode/Grammar*pickle 21 | distribute-* 22 | 23 | env/ 24 | build/ 25 | dist/ 26 | Sphinx.egg-info/ 27 | doc/_build/ 28 | doc/locale/ 29 | tests/.coverage 30 | tests/build/ 31 | utils/regression_test.js 32 | 33 | node_modules/ 34 | source/_build -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | SOURCEDIR = source 8 | BUILDDIR = build 9 | 10 | # Put it first so that "make" without argument is like "make help". 11 | help: 12 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 13 | 14 | .PHONY: help Makefile 15 | 16 | # Catch-all target: route all unknown targets to Sphinx using the new 17 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 18 | %: Makefile 19 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=source 11 | set BUILDDIR=build 12 | 13 | if "%1" == "" goto help 14 | 15 | %SPHINXBUILD% >NUL 2>NUL 16 | if errorlevel 9009 ( 17 | echo. 18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 19 | echo.installed, then set the SPHINXBUILD environment variable to point 20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 21 | echo.may add the Sphinx directory to PATH. 22 | echo. 23 | echo.If you don't have Sphinx installed, grab it from 24 | echo.http://sphinx-doc.org/ 25 | exit /b 1 26 | ) 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /docs/source/PyQt5/QtCore.py: -------------------------------------------------------------------------------- 1 | class QObject(object): 2 | pass 3 | 4 | class QCoreApplication(object): 5 | pass 6 | 7 | class QEvent(object): 8 | pass 9 | 10 | pyqtSignal = lambda: True -------------------------------------------------------------------------------- /docs/source/PyQt5/QtWidgets.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Jinmo/idapkg/5d6af9bd59c5dc886d68335119fae41491f06ea7/docs/source/PyQt5/QtWidgets.py -------------------------------------------------------------------------------- /docs/source/PyQt5/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Jinmo/idapkg/5d6af9bd59c5dc886d68335119fae41491f06ea7/docs/source/PyQt5/__init__.py -------------------------------------------------------------------------------- /docs/source/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Configuration file for the Sphinx documentation builder. 4 | # 5 | # This file does only contain a selection of the most common options. For a 6 | # full list see the documentation: 7 | # http://www.sphinx-doc.org/en/master/config 8 | 9 | # -- Path setup -------------------------------------------------------------- 10 | 11 | # If extensions (or modules to document with autodoc) are in another directory, 12 | # add these directories to sys.path here. If the directory is relative to the 13 | # documentation root, use os.path.abspath to make it absolute, like shown here. 14 | # 15 | import os 16 | import sys 17 | sys.path.insert(0, os.path.abspath('../../')) 18 | sys.path.insert(0, os.path.abspath('.')) 19 | 20 | 21 | # -- Project information ----------------------------------------------------- 22 | 23 | project = u'idapkg' 24 | copyright = u'2019, jinmo' 25 | author = u'jinmo' 26 | 27 | # The short X.Y version 28 | version = u'' 29 | # The full version, including alpha/beta/rc tags 30 | release = u'' 31 | 32 | 33 | # -- General configuration --------------------------------------------------- 34 | 35 | # If your documentation needs a minimal Sphinx version, state it here. 36 | # 37 | # needs_sphinx = '1.0' 38 | 39 | # Add any Sphinx extension module names here, as strings. They can be 40 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 41 | # ones. 42 | extensions = [ 43 | 'sphinx.ext.autodoc', 44 | 'sphinx.ext.intersphinx' 45 | ] 46 | 47 | intersphinx_mapping = {'python': ('https://docs.python.org/2', None)} 48 | 49 | # Add any paths that contain templates here, relative to this directory. 50 | templates_path = ['_templates'] 51 | 52 | # The suffix(es) of source filenames. 53 | # You can specify multiple suffix as a list of string: 54 | # 55 | # source_suffix = ['.rst', '.md'] 56 | source_suffix = '.rst' 57 | 58 | # The master toctree document. 59 | master_doc = 'index' 60 | 61 | # The language for content autogenerated by Sphinx. Refer to documentation 62 | # for a list of supported languages. 63 | # 64 | # This is also used if you do content translation via gettext catalogs. 65 | # Usually you set "language" from the command line for these cases. 66 | language = None 67 | 68 | # List of patterns, relative to source directory, that match files and 69 | # directories to ignore when looking for source files. 70 | # This pattern also affects html_static_path and html_extra_path. 71 | exclude_patterns = [] 72 | 73 | # The name of the Pygments (syntax highlighting) style to use. 74 | pygments_style = None 75 | 76 | 77 | # -- Options for HTML output ------------------------------------------------- 78 | 79 | # The theme to use for HTML and HTML Help pages. See the documentation for 80 | # a list of builtin themes. 81 | # 82 | html_theme = 'sphinx_rtd_theme' 83 | 84 | # Theme options are theme-specific and customize the look and feel of a theme 85 | # further. For a list of options available for each theme, see the 86 | # documentation. 87 | # 88 | # html_theme_options = {} 89 | 90 | # Add any paths that contain custom static files (such as style sheets) here, 91 | # relative to this directory. They are copied after the builtin static files, 92 | # so a file named "default.css" will overwrite the builtin "default.css". 93 | html_static_path = ['_static'] 94 | 95 | # Custom sidebar templates, must be a dictionary that maps document names 96 | # to template names. 97 | # 98 | # The default sidebars (for documents that don't match any pattern) are 99 | # defined by theme itself. Builtin themes are using these templates by 100 | # default: ``['localtoc.html', 'relations.html', 'sourcelink.html', 101 | # 'searchbox.html']``. 102 | # 103 | # html_sidebars = {} 104 | 105 | 106 | # -- Options for HTMLHelp output --------------------------------------------- 107 | 108 | # Output file base name for HTML help builder. 109 | htmlhelp_basename = 'idapkgdoc' 110 | 111 | 112 | # -- Options for LaTeX output ------------------------------------------------ 113 | 114 | latex_elements = { 115 | # The paper size ('letterpaper' or 'a4paper'). 116 | # 117 | # 'papersize': 'letterpaper', 118 | 119 | # The font size ('10pt', '11pt' or '12pt'). 120 | # 121 | # 'pointsize': '10pt', 122 | 123 | # Additional stuff for the LaTeX preamble. 124 | # 125 | # 'preamble': '', 126 | 127 | # Latex figure (float) alignment 128 | # 129 | # 'figure_align': 'htbp', 130 | } 131 | 132 | # Grouping the document tree into LaTeX files. List of tuples 133 | # (source start file, target name, title, 134 | # author, documentclass [howto, manual, or own class]). 135 | latex_documents = [ 136 | (master_doc, 'idapkg.tex', u'idapkg Documentation', 137 | u'jinmo', 'manual'), 138 | ] 139 | 140 | 141 | # -- Options for manual page output ------------------------------------------ 142 | 143 | # One entry per manual page. List of tuples 144 | # (source start file, name, description, authors, manual section). 145 | man_pages = [ 146 | (master_doc, 'idapkg', u'idapkg Documentation', 147 | [author], 1) 148 | ] 149 | 150 | 151 | # -- Options for Texinfo output ---------------------------------------------- 152 | 153 | # Grouping the document tree into Texinfo files. List of tuples 154 | # (source start file, target name, title, author, 155 | # dir menu entry, description, category) 156 | texinfo_documents = [ 157 | (master_doc, 'idapkg', u'idapkg Documentation', 158 | author, 'idapkg', 'One line description of project.', 159 | 'Miscellaneous'), 160 | ] 161 | 162 | 163 | # -- Options for Epub output ------------------------------------------------- 164 | 165 | # Bibliographic Dublin Core info. 166 | epub_title = project 167 | 168 | # The unique identifier of the text. This can be a ISBN number 169 | # or the project homepage. 170 | # 171 | # epub_identifier = '' 172 | 173 | # A unique identification for the text. 174 | # 175 | # epub_uid = '' 176 | 177 | # A list of files that should not be packed into the epub file. 178 | epub_exclude_files = ['search.html'] 179 | -------------------------------------------------------------------------------- /docs/source/getting-started.rst: -------------------------------------------------------------------------------- 1 | Getting Started: Writing your plugins 2 | ------------------------------------- 3 | 4 | The package format is same as IDA loads the plugin, except info.json. 5 | 6 | Package format 7 | <<<<<<<<<<<<< 8 | 9 | 1. plugins/ : Plugins directory 10 | ================================ 11 | 12 | 13 | ... and procs/ has processor modules, loaders/ has loader modules. 14 | Extension is important: 15 | 16 | - .py: idapython 17 | - .dll/dylib/so: native plugins 18 | - 64.dll/64.dylib/64.so: native plugins for EA64 19 | - .idc: idc 20 | 21 | Type libraries, FLIRT signatures, and known function types can be bundled 22 | into til/ sig/ ids/. 23 | Please note that these directories are also in IDA Pro's installed folder. 24 | 25 | 2. info.json (required) 26 | ================================ 27 | 28 | You can bundle your IDA plugin with `info.json`, and `README.md` (if needed). 29 | 30 | .. code-block :: json 31 | 32 | { 33 | "_id": "my-plugin", 34 | "name": "Community Headers", 35 | "version": "1.0.0", 36 | "description": "Loads C/C++ header from online" 37 | } 38 | 39 | 3. README.md 40 | ================================ 41 | 42 | .. code-block :: md 43 | 44 | My Plugin 45 | === 46 | 47 | Awesome description here. 48 | 49 | Testing & Uploading 50 | <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< 51 | 52 | Place your package folder under `idapkg/packages` 53 | or `"path" > "packages"` entry on config.json, 54 | then the package will be loaded after restarting IDA 55 | or executing `pkg.refresh()`. 56 | 57 | Alternatively, you can execute :code:`pkg.local("package name or path")`. 58 | 59 | Debugging the installer 60 | ======================= 61 | 62 | Installer scripts are executed first time, and should raise exception if fails. 63 | 64 | .. code-block:: python 65 | 66 | p = pkg.local("name or path") 67 | p.install() 68 | 69 | `Finally, you can zip and upload your package at /upload `_. The package can be managed via the repo. 70 | 71 | Optional fields on :code:`info.json` 72 | ------------------------------------- 73 | 74 | \_id (actual path), name, version, description are needed. 75 | 76 | installers: Installation scripts 77 | <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< 78 | 79 | .. code-block:: javascript 80 | 81 | "installers": [ 82 | "installers/pip.py" 83 | ] 84 | 85 | They must be python scripts. The entries are executed in order. 86 | 87 | Before execution, `os.chdir(package_root)` is done. 88 | `__file__` is also provided. 89 | Raise exception to abort installation, and the files will be removed. 90 | 91 | For installing pip packages, see below. 92 | 93 | Example: Python dependencies 94 | ================================ 95 | 96 | idapkg creates and uses virtualenv at `~/idapkg/python.` 97 | pip for this env is also available. 98 | 99 | .. code-block:: python 100 | 101 | import os 102 | import pkg.env as env 103 | 104 | if env.os == 'linux' and env.version < 7: 105 | assert not os.system('apt-get install -y php7.2-cli') # Bonus! 106 | 107 | assert not os.system('pip install -r requirements.txt') 108 | 109 | 110 | `pkg.env` module is from idapkg, and it has useful variables too. 111 | 112 | - `env.os`: operating system, one of ('win', 'mac', 'linux') 113 | - `env.ea`: current ea, one of (32, 64) 114 | - `env.version`: python Decimal object for IDA Pro's version 115 | (ex. `Decimal(6.95)`) 116 | - `env.version_info`: namedtuple with version details 117 | (ex. `VersionPair(major=7, minor=0, micro=171130)`) 118 | 119 | For `pkg.*` references, see `pkg module` section of `API docs `_. 120 | 121 | dependencies: Dependencies between packages 122 | <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< 123 | 124 | A package can have dependency list. 125 | The loading order is also sorted regarding to dependencies. 126 | 127 | .. code-block:: javascript 128 | 129 | "dependencies": { 130 | "ifred": "*" 131 | } 132 | 133 | keywords: Package keywords 134 | <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< 135 | 136 | Array of words that represents your package. 137 | Note that `procs`, `plugins`, and some words are 138 | automatically added depending on the content. 139 | 140 | .. code-block:: javascript 141 | 142 | "keywords": ["theme"] 143 | 144 | homepage: Your project homepage 145 | <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< 146 | 147 | Add website to package information page. 148 | 149 | .. code-block:: javascript 150 | 151 | "homepage": "https://your_site.com" 152 | 153 | Additional notes 154 | -------------------------------- 155 | 156 | `idapkg/packages/` is added to :py:data:`sys.path` at startup, 157 | so placing python modules inside the package root enables importing 158 | your packages in IDAPython. 159 | 160 | -------------------------------------------------------------------------------- /docs/source/ida_diskio.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Jinmo/idapkg/5d6af9bd59c5dc886d68335119fae41491f06ea7/docs/source/ida_diskio.py -------------------------------------------------------------------------------- /docs/source/ida_kernwin.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Jinmo/idapkg/5d6af9bd59c5dc886d68335119fae41491f06ea7/docs/source/ida_kernwin.py -------------------------------------------------------------------------------- /docs/source/ida_loader.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Jinmo/idapkg/5d6af9bd59c5dc886d68335119fae41491f06ea7/docs/source/ida_loader.py -------------------------------------------------------------------------------- /docs/source/idaapi.py: -------------------------------------------------------------------------------- 1 | def idadir(suffix): 2 | return suffix -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | Welcome to idapkg's documentation! 2 | ================================== 3 | 4 | idapkg is a package manager for IDA Pro utilizing 5 | `IDAUSR env `_. 6 | 7 | A Package is a collection of plugins, processor, loader modules, and til/sig/ids files. They can be downloaded from `public `_ / `private `_ repos. A package's directory structure is like below: 8 | 9 | .. code-block :: diff 10 | 11 | Required: 12 | + info.json 13 | 14 | Optional for IDA: 15 | + plugins/ 16 | ... 17 | + procs/ 18 | ... 19 | + loaders/ 20 | ... 21 | + til/ sig/ ids/ 22 | ... 23 | 24 | Optional for package: 25 | + README.md 26 | 27 | .. toctree:: 28 | :maxdepth: 1 29 | :caption: Contents: 30 | 31 | getting-started 32 | pkg 33 | 34 | 35 | 36 | Indices and tables 37 | ================== 38 | 39 | * :ref:`genindex` 40 | * :ref:`modindex` 41 | -------------------------------------------------------------------------------- /docs/source/pkg.rst: -------------------------------------------------------------------------------- 1 | pkg module 2 | =========== 3 | 4 | .. automodule:: pkg.commands 5 | :members: 6 | 7 | pkg.package: Package-related classes 8 | ############################################ 9 | 10 | .. automodule:: pkg.package 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | pkg.repo: Repository information 16 | ############################################ 17 | 18 | .. automodule:: pkg.repo 19 | :members: 20 | :show-inheritance: 21 | 22 | pkg.config: config.json as :code:`g` objects 23 | ############################################ 24 | 25 | .. automodule:: pkg.config 26 | 27 | pkg.env: IDA version, and EA 28 | ############################################ 29 | 30 | .. automodule:: pkg.env 31 | :members: os, ea, version, version_info 32 | :show-inheritance: 33 | 34 | pkg.process: process launchers with output redirected 35 | ##################################################### 36 | 37 | .. automodule:: pkg.process 38 | :members: Popen, system 39 | 40 | -------------------------------------------------------------------------------- /info.json: -------------------------------------------------------------------------------- 1 | { 2 | "_id": "idapkg", 3 | "version": "0.1.4", 4 | "name": "idapkg", 5 | "description": "idapkg Core package", 6 | "uninstallers": [ 7 | "uninstaller.py" 8 | ] 9 | } 10 | -------------------------------------------------------------------------------- /installer.py: -------------------------------------------------------------------------------- 1 | import zipfile 2 | import tempfile 3 | import sys 4 | import os 5 | import threading 6 | import shutil 7 | import importlib 8 | 9 | 10 | def install(): 11 | tag = 'v0.1.4' 12 | 13 | n = tempfile.NamedTemporaryFile(delete=False, suffix='.zip') 14 | n.close() 15 | 16 | print('Started downloading idapkg...') 17 | importlib.import_module('urllib.request' if sys.version_info.major == 3 else 'urllib').urlretrieve( 18 | 'https://github.com/Jinmo/idapkg/archive/%s.zip' % tag, n.name) 19 | 20 | f = open(n.name, 'rb+') 21 | f.seek(0, os.SEEK_END) 22 | f.truncate(f.tell() - 0x28) 23 | f.close() 24 | 25 | z = zipfile.ZipFile(n.name) 26 | base = z.namelist()[0] 27 | 28 | sys.path.append(os.path.join(n.name, base)) 29 | 30 | from pkg.config import g 31 | import pkg.main as main 32 | 33 | packages_path = g['path']['packages'] 34 | z.extractall(packages_path) 35 | z.close() 36 | 37 | dest = os.path.join(packages_path, 'idapkg') 38 | 39 | os.path.isdir(dest) and shutil.rmtree(dest) 40 | os.rename(os.path.join(packages_path, base), dest) 41 | 42 | main.update_pythonrc() 43 | main.init_environment(False) 44 | 45 | print('Installation success! Please restart IDA to use idapkg.') 46 | os.unlink(n.name) 47 | 48 | 49 | threading.Thread(target=install).start() 50 | -------------------------------------------------------------------------------- /pkg/__init__.py: -------------------------------------------------------------------------------- 1 | __version__ = '0.1.4' 2 | 3 | import sys 4 | 5 | from .commands import * 6 | 7 | # expose 'pkg' in global namespace 8 | __builtins__[__name__] = sys.modules[__name__] 9 | -------------------------------------------------------------------------------- /pkg/actions/__init__.py: -------------------------------------------------------------------------------- 1 | import threading 2 | 3 | 4 | def register_action(name, shortcut=''): 5 | """Helper for registering actions""" 6 | 7 | def handler(f): 8 | # 1) Create the handler class 9 | class MyHandler(ida_kernwin.action_handler_t): 10 | def __init__(self): 11 | ida_kernwin.action_handler_t.__init__(self) 12 | 13 | def activate(self, _ctx): 14 | t = threading.Thread(target=f) 15 | t.start() 16 | return 1 17 | 18 | # This action is always available. 19 | def update(self, _ctx): 20 | return ida_kernwin.AST_ENABLE_ALWAYS 21 | 22 | # 2) Describe the action 23 | action_desc = ida_kernwin.action_desc_t( 24 | name, # The action name. This acts like an ID and must be unique 25 | name, # The action text. 26 | MyHandler(), # The action handler. 27 | shortcut, # Optional: the action shortcut 28 | name, # Optional: the action tooltip (available in menus/toolbar) 29 | 0) # Optional: the action icon (shows when in menus/toolbars) 30 | 31 | # 3) Register the action 32 | ida_kernwin.register_action(action_desc) 33 | return f 34 | 35 | return handler 36 | 37 | 38 | try: 39 | import ida_kernwin 40 | from . import packagemanager 41 | except ImportError: 42 | # actions are currently supported on ifred only. 43 | pass 44 | -------------------------------------------------------------------------------- /pkg/actions/packagemanager.py: -------------------------------------------------------------------------------- 1 | import threading 2 | 3 | from __palette__ import Palette, show_palette, Action 4 | 5 | from . import register_action 6 | from ..config import g, _save_config 7 | from ..logger import getLogger 8 | from ..package import LocalPackage 9 | from ..repo import get_online_packages, Repository 10 | 11 | log = getLogger(__name__) 12 | 13 | 14 | def _run_in_background(f): 15 | t = threading.Thread(target=f) 16 | t.start() 17 | return t 18 | 19 | 20 | @register_action('Packages: Install Package') 21 | def install_package(): 22 | pkgs = get_online_packages() 23 | pkgs = [x for x in pkgs if LocalPackage.by_name(x.id) is None] 24 | actions = [ 25 | (lambda _: Action(id=_.id, name=_.name, description=_.description, 26 | handler=lambda action: _run_in_background(_.install)))(item) 27 | for item in pkgs] 28 | show_palette( 29 | Palette('install', "Enter package name to install...", actions)) 30 | 31 | 32 | @register_action('Packages: Remove Package') 33 | def remove_package(): 34 | actions = LocalPackage.all() 35 | actions = [(lambda _: Action(id=_.id, name='%s %s' % (_.id, _.version), 36 | handler=lambda action: _run_in_background(_.remove)))(item) for item in actions] 37 | 38 | show_palette(Palette('remove', "Enter package name to remove...", actions)) 39 | 40 | 41 | @register_action('Packages: Upgrade Package') 42 | def upgrade_package(): 43 | actions = LocalPackage.all() 44 | actions = [(lambda _: Action(id=_.id, name='%s %s' % (_.id, _.version), 45 | handler=lambda action: _run_in_background(lambda: _upgrade_package(action.id))))(item) 46 | for item in actions] 47 | 48 | show_palette(Palette('remove', "Enter package name to remove...", actions)) 49 | 50 | 51 | def _upgrade_package(name): 52 | log.info("Upgrading package %s..." % name) 53 | repos = [Repository.from_url(url) for url in g['repos']] 54 | 55 | for repo in repos: 56 | res = repo.get(name) 57 | if res: 58 | res.install(upgrade=True) 59 | return 60 | 61 | log.info( 62 | "Package not found on all repositories! Please check ~/idapkg/config.json") 63 | 64 | 65 | @register_action('Packages: Disable Package') 66 | def disable_package(): 67 | actions = LocalPackage.all() 68 | actions = [(lambda _: Action(id=_.id, name='%s %s' % (_.id, _.version), 69 | handler=lambda action: _run_in_background(lambda: _disable_package(action.id))))(item) 70 | for item in actions] 71 | 72 | show_palette(Palette('disable', "Enter package name to disable...", actions)) 73 | 74 | 75 | def _disable_package(name): 76 | g['ignored_packages'].append(name) 77 | _save_config(g) 78 | 79 | 80 | @register_action('Packages: Enable Package') 81 | def enable_package(): 82 | actions = LocalPackage.all(disabled=True) 83 | actions = [(lambda _: Action(id=_.id, name='%s %s' % (_.id, _.version), 84 | handler=lambda action: _run_in_background(lambda: _enable_package(action.id))))(item) 85 | for item in actions] 86 | 87 | show_palette(Palette('disable', "Enter package name to disable...", actions)) 88 | 89 | 90 | def _enable_package(name): 91 | g['ignored_packages'].remove(name) 92 | _save_config(g) 93 | -------------------------------------------------------------------------------- /pkg/commands.py: -------------------------------------------------------------------------------- 1 | """ 2 | Some console-friendly methods are exposed in pkg.*, and defined at pkg.commands. 3 | """ 4 | import re 5 | import threading 6 | 7 | from .config import g 8 | from .package import LocalPackage 9 | from .repo import Repository 10 | from .vendor import semantic_version 11 | 12 | __all__ = ['install', 'remove', 'local', 'remote', 'refresh', 'upgrade'] 13 | 14 | 15 | def _parse_spec(spec): 16 | match = re.match(r"^([a-zA-Z0-9\-][a-zA-Z0-9_\-]{3,214})(.*)$", spec) 17 | name = match.group(1) 18 | version = match.group(2).strip() 19 | 20 | # Validate spec by parsing it 21 | version = '*' if not version else version 22 | semantic_version.Spec(version) 23 | 24 | return name, version 25 | 26 | 27 | def install(spec, repo=None, upgrade=False): 28 | """ 29 | Download and install a package from specified repository. 30 | See :meth:`install_from_repo`. 31 | 32 | :param spec: `name==version`, or just `name` only. 33 | :type spec: str 34 | :param repo: URL of the repository. Default: :code:`g['repos']` 35 | :type repo: list(str) or None 36 | :param upgrade: Upgrade when already installed if True. 37 | """ 38 | 39 | name, version = _parse_spec(spec) 40 | 41 | def _install_from_repositories(repos): 42 | pkg = remote(name, repos) 43 | if pkg is None: 44 | raise Exception('Package not found in all repositories: %r' % name) 45 | 46 | pkg.install(upgrade) 47 | 48 | if repo is None: 49 | repo = g['repos'] 50 | 51 | t = threading.Thread(target=_install_from_repositories, args=(repo,)) 52 | t.start() 53 | return t 54 | 55 | 56 | def remove(name): 57 | """ 58 | Remove a package locally (LocalPackage.remove). 59 | """ 60 | pkg = LocalPackage.by_name(name) 61 | if pkg: 62 | return pkg.remove() 63 | 64 | 65 | def local(name): 66 | """ 67 | Find an installed package (LocalPackage.by_name). 68 | 69 | :returns: None if package is not found, else LocalPackage instance. 70 | :rtype: LocalPackage 71 | """ 72 | return LocalPackage.by_name(name) 73 | 74 | 75 | def remote(name, repo=None): 76 | """ 77 | Find a remote package from given repos. 78 | 79 | :param name: Name of the package 80 | :param repo: URL of the repository. Default: :code:`g['repos']` 81 | :type repo: list(str) or None 82 | :returns: None if package is not found, else InstallablePackage instance. 83 | :rtype: InstallablePackage 84 | """ 85 | if repo is None: 86 | repo = g['repos'] 87 | 88 | for _repo in repo: 89 | pkg = Repository.from_url(_repo).get(name) 90 | if pkg is None: 91 | continue 92 | else: 93 | return pkg 94 | return None 95 | 96 | 97 | def refresh(): 98 | """ 99 | Rescan and load available plugins. 100 | """ 101 | for pkg in LocalPackage.all(): 102 | pkg.load() 103 | 104 | return True 105 | 106 | 107 | def upgrade(spec, repo=None): 108 | """ 109 | Upgrade specified package. (:code:`pkg.install(spec, repo, upgrade=True)`) 110 | 111 | :param spec: `name==version`, or just `name` only. 112 | :param repo: target repository to download. 113 | :type spec: str 114 | """ 115 | return install(spec, repo, upgrade=True) 116 | -------------------------------------------------------------------------------- /pkg/compat.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | if sys.version_info.major == 3: 4 | from http.client import ( 5 | HTTPSConnection, HTTPConnection, CannotSendRequest, ResponseNotReady, RemoteDisconnected) 6 | from urllib.parse import urlparse, urljoin, quote 7 | 8 | basestring = str 9 | else: 10 | from urllib import quote 11 | from urlparse import urlparse, urljoin 12 | from httplib import ( 13 | HTTPSConnection, HTTPConnection, CannotSendRequest, ResponseNotReady, BadStatusLine as RemoteDisconnected) 14 | 15 | basestring = basestring 16 | 17 | __all__ = ( 18 | 'quote', 'urlparse', 19 | 'HTTPSConnection', 'HTTPConnection', 'CannotSendRequest', 'ResponseNotReady', 'RemoteDisconnected', 20 | 'basestring') 21 | -------------------------------------------------------------------------------- /pkg/config.py: -------------------------------------------------------------------------------- 1 | # pylint: disable=invalid-name 2 | """ 3 | This module generates and manages config data. Initial config is like this: 4 | 5 | .. code:: python 6 | 7 | __initial_config = { 8 | 'path': { 9 | 'virtualenv': idapkg_dir('python'), 10 | 'packages': idapkg_dir('packages') 11 | }, 12 | 'repos': [ 13 | 'https://api.idapkg.com' 14 | ], 15 | 'idausr_native_bases': [None, None] 16 | } 17 | 18 | :g: 19 | Config object extended from __initial_config. 20 | Loaded from and saved to ~/idapkg/config.json. 21 | :code:`g['path']['packages'] == idapkg_dir('python')` initially. 22 | 23 | """ 24 | from __future__ import print_function 25 | 26 | import copy 27 | import json 28 | import os 29 | import sys 30 | 31 | from .compat import basestring 32 | from .env import os as current_os, version_info 33 | 34 | 35 | def basedir(): 36 | return os.path.expanduser(os.path.join('~', 'idapkg')) 37 | 38 | 39 | def config_path(): 40 | return os.path.join(basedir(), 'config.json') 41 | 42 | 43 | def _idapkg_dir(*suffixes): 44 | path = os.path.join(basedir(), *suffixes) 45 | if not os.path.isdir(path): 46 | os.makedirs(path) 47 | return path 48 | 49 | 50 | def _load_config(): 51 | return json.load(open(config_path(), 'r')) 52 | 53 | 54 | def _save_config(data): 55 | with open(config_path(), 'w') as config_file: 56 | json.dump(data, config_file, indent=4) 57 | 58 | 59 | def _normalized_type(obj): 60 | if isinstance(obj, basestring): 61 | return basestring 62 | return type(obj) 63 | 64 | 65 | def _fix_missing_config(obj, reference, path=None): 66 | assert isinstance(obj, dict), "config must be dictionary" 67 | 68 | if path is None: 69 | path = [] 70 | 71 | changed = False 72 | obj = copy.deepcopy(obj) 73 | 74 | for key, value in reference.items(): 75 | if key not in obj: 76 | changed = True 77 | obj[key] = copy.deepcopy(value) 78 | else: 79 | type_tar = _normalized_type(obj[key]) 80 | type_ref = _normalized_type(reference[key]) 81 | if type_tar != type_ref: 82 | changed = True 83 | obj[key] = copy.deepcopy(value) 84 | print('Type is different (%r): %r (saved) vs %r, replacing with initial value %r' 85 | % ('/'.join(path), type_tar, type_ref, value)) 86 | if isinstance(obj[key], dict): 87 | changed_, obj[key] = _fix_missing_config(obj[key], value, path + [key]) 88 | changed = changed or changed_ 89 | 90 | return changed, obj 91 | 92 | 93 | __initial_config = { 94 | 'path': { 95 | 'virtualenv': _idapkg_dir('python'), 96 | 'packages': _idapkg_dir('packages') 97 | }, 98 | 'repos': [ 99 | 'https://api.idapkg.com' 100 | ], 101 | 'idausr_native_bases': { 102 | current_os: { 103 | version_info.str(): [None, None] 104 | } 105 | }, 106 | 'ignored_packages': [] 107 | } 108 | 109 | # Step 1. create configuration 110 | try: 111 | g = _load_config() 112 | config_changed, g = _fix_missing_config(g, __initial_config) 113 | if config_changed: 114 | _save_config(g) 115 | except (IOError, ValueError): 116 | # save initial config 117 | print('Generating initial config at', config_path()) 118 | g = copy.deepcopy(__initial_config) 119 | _save_config(__initial_config) 120 | 121 | # Step 2. add sys.path 122 | sys.path.append(g['path']['packages']) 123 | -------------------------------------------------------------------------------- /pkg/downloader.py: -------------------------------------------------------------------------------- 1 | """ 2 | HTTP downloader as minimal as possible. 3 | I'm not sure if its codebase is minimal enough. 4 | """ 5 | 6 | import shutil 7 | import tempfile 8 | 9 | from .compat import ( 10 | HTTPSConnection, HTTPConnection, urlparse, urljoin, 11 | CannotSendRequest, ResponseNotReady, RemoteDisconnected) 12 | 13 | # Supported protocols 14 | SCHEME_MAP = { 15 | 'https': HTTPSConnection, 16 | 'http': HTTPConnection 17 | } 18 | 19 | # Default value of max retry count for one fetch 20 | RETRY_COUNT = 3 21 | 22 | # Keep-alive connections 23 | CACHED_CONNECTIONS = {} 24 | 25 | 26 | def _fetch(orig_url, timeout, retry=RETRY_COUNT): 27 | url = urlparse(orig_url) 28 | cls = SCHEME_MAP[url.scheme] 29 | 30 | if not retry: 31 | raise Exception("Max retries exceeded.") 32 | 33 | kwargs = {} 34 | if timeout is not None: 35 | kwargs['timeout'] = timeout 36 | 37 | key = (url.scheme, url.netloc) 38 | if key in CACHED_CONNECTIONS: 39 | conn = CACHED_CONNECTIONS[key] 40 | else: 41 | conn = cls(url.netloc, **kwargs) 42 | CACHED_CONNECTIONS[key] = conn 43 | try: 44 | conn.request( 45 | "GET", 46 | ''.join((url.path or '/', '?' + url.query if url.query else '')), 47 | headers={'Connection': 'Keep-Alive'}) 48 | except (CannotSendRequest, OSError): # Keep-alive expired 49 | del CACHED_CONNECTIONS[key] 50 | return _fetch(orig_url, timeout, retry) 51 | try: 52 | res = conn.getresponse() 53 | except (ResponseNotReady, RemoteDisconnected): 54 | # RemoteDisconnected is also triggered when keep-alive is disconnected 55 | # However it's safe to decrement retry count 56 | return _fetch(orig_url, timeout, retry - 1) 57 | 58 | loc = res.getheader("Location", None) 59 | if res.getheader("Connection", "").lower() == "close": 60 | del CACHED_CONNECTIONS[key] 61 | 62 | if res.status // 100 == 3 and loc: 63 | new_url = urljoin(orig_url, loc) 64 | return _fetch(new_url, timeout) 65 | 66 | if res.status // 100 != 2: 67 | raise Exception("HTTP status code: %d %s (from %s)" % (res.status, res.reason, orig_url)) 68 | 69 | return res 70 | 71 | 72 | def download(url, timeout=None, to_file=False): 73 | res = _fetch(url, timeout) 74 | 75 | # Some interfaces like ZipFile need some additional methods. 76 | if to_file: 77 | out_file = tempfile.TemporaryFile() 78 | shutil.copyfileobj(res, out_file) 79 | out_file.seek(0) 80 | return out_file 81 | else: 82 | return res 83 | 84 | 85 | if __name__ == '__main__': 86 | print(repr(download('https://idapkg.com', to_file=True).read())) 87 | -------------------------------------------------------------------------------- /pkg/env.py: -------------------------------------------------------------------------------- 1 | # pylint: disable=invalid-name 2 | """ 3 | :os: operating system. 'win' | 'mac' | 'linux' 4 | :ea: current ea. 32 | 64 5 | :version: Decimal object for IDA Pro's version (ex. :code:`Decimal(6.95)`) 6 | :version_info: 7 | namedtuple with version details 8 | (ex. :code:`VersionPair(major=7, minor=0, micro=171130)`) 9 | 10 | """ 11 | 12 | import collections 13 | import decimal 14 | import os as _os 15 | import sys 16 | 17 | OS_MAP = {'win32': 'win', 'darwin': 'mac', 'linux2': 'linux'} 18 | 19 | idc, idaapi = None, None 20 | 21 | # Will be set from IDA Pro 22 | ea = -1 23 | os = 'unknown' 24 | version = decimal.Decimal('0.0') 25 | 26 | 27 | def __load_version_from_ida(): 28 | _ea = 64 if idc.__EA64__ else 32 29 | _os = OS_MAP[sys.platform] 30 | return _ea, _os 31 | 32 | 33 | class version_info_cls(collections.namedtuple('VersionPair', 'major minor micro')): 34 | def str(self): 35 | return '%s.%s.%s' % (self.major, self.minor, self.micro) 36 | 37 | 38 | version_info = version_info_cls(0, 0, 0) 39 | 40 | 41 | def __load_ida_native_version(): 42 | sysdir = _os.path.dirname(idaapi.idadir(idaapi.CFG_SUBDIR)) 43 | exe_name = 'ida' if ea == 32 else 'ida64' 44 | if os == 'win': 45 | path = _os.path.join(sysdir, exe_name + '.exe') 46 | with open(path, 'rb') as f: 47 | data = f.read() 48 | needle = b'F\0i\0l\0e\0V\0e\0r\0s\0i\0o\0n\0\0\0\0\0' 49 | offset = data.rfind(needle) + len(needle) 50 | offset2 = data.find(b'\0\0', offset) + 1 51 | version_str = data[offset:offset2].decode('utf16') 52 | 53 | version_str = version_str[:version_str.rfind( 54 | '.')] + version_str[version_str.rfind('.') + 1:] 55 | elif os == 'mac': 56 | path = _os.path.join(sysdir, exe_name) 57 | with open(path, 'rb') as f: 58 | data = f.read() 59 | needle = b'CFBundleShortVersionString' 60 | offset = data.rfind(needle) 61 | offset = data.find(b'', offset) + 8 62 | offset2 = data.find(b'> 6) == 0: 29 | return Instruction( 30 | reg=(rm >> 3) & 7, 31 | target=address + 7 + struct.unpack(" 340 | ((b1 % 128) << 0) + ((b1 & 0x80 == 0) ? 0 : 341 | ((b2 % 128) << 7) + ((b2 & 0x80 == 0) ? 0 : 342 | ((b3 % 128) << 14) + ((b3 & 0x80 == 0) ? 0 : 343 | ((b4 % 128) << 21) + ((b4 & 0x80 == 0) ? 0 : 344 | ((b5 % 128) << 28) + ((b5 & 0x80 == 0) ? 0 : 345 | ((b6 % 128) << 35) + ((b6 & 0x80 == 0) ? 0 : 346 | ((b7 % 128) << 42) + ((b7 & 0x80 == 0) ? 0 : 347 | ((b8 % 128) << 49) + ((b8 & 0x80 == 0) ? 0 : 348 | ((b9 % 128) << 56) + ((b8 & 0x80 == 0) ? 0 : 349 | ((b10 % 128) << 63)))))))))) 350 | -webide-parse-mode: eager 351 | -webide-representation: "{value:dec}" 352 | segment_command_64: 353 | seq: 354 | - id: segname 355 | type: str 356 | size: 16 357 | pad-right: 0 358 | encoding: ascii 359 | - id: vmaddr 360 | type: u8 361 | - id: vmsize 362 | type: u8 363 | - id: fileoff 364 | type: u8 365 | - id: filesize 366 | type: u8 367 | - id: maxprot 368 | type: vm_prot 369 | - id: initprot 370 | type: vm_prot 371 | - id: nsects 372 | type: u4 373 | - id: flags 374 | type: u4 375 | - id: sections 376 | type: section_64 377 | repeat: expr 378 | repeat-expr: nsects 379 | types: 380 | section_64: 381 | seq: 382 | - id: sect_name 383 | -orig-id: sectname 384 | size: 16 385 | type: str 386 | pad-right: 0 387 | encoding: ascii 388 | - id: seg_name 389 | -orig-id: segname 390 | size: 16 391 | type: str 392 | pad-right: 0 393 | encoding: ascii 394 | - id: addr 395 | type: u8 396 | - id: size 397 | type: u8 398 | - id: offset 399 | type: u4 400 | - id: align 401 | type: u4 402 | - id: reloff 403 | type: u4 404 | - id: nreloc 405 | type: u4 406 | - id: flags 407 | type: u4 408 | - id: reserved1 409 | type: u4 410 | - id: reserved2 411 | type: u4 412 | - id: reserved3 413 | type: u4 414 | instances: 415 | data: 416 | io: _root._io 417 | pos: offset 418 | size: size 419 | -webide-representation: '{sect_name}: offs={offset}, size={size}' 420 | -webide-representation: '{segname} ({initprot}): offs={fileoff}, size={filesize}' 421 | dyld_info_command: 422 | seq: 423 | - id: rebase_off 424 | type: u4 425 | - id: rebase_size 426 | type: u4 427 | - id: bind_off 428 | type: u4 429 | - id: bind_size 430 | type: u4 431 | - id: weak_bind_off 432 | type: u4 433 | - id: weak_bind_size 434 | type: u4 435 | - id: lazy_bind_off 436 | type: u4 437 | - id: lazy_bind_size 438 | type: u4 439 | - id: export_off 440 | type: u4 441 | - id: export_size 442 | type: u4 443 | -webide-representation: 'rebase={rebase_off}, bind={bind_off}, weakBind={weak_bind_off}, lazyBind={lazy_bind_off}, export={export_off}' 444 | instances: 445 | rebase: 446 | io: _root._io 447 | pos: rebase_off 448 | size: rebase_size 449 | type: rebase_data 450 | bind: 451 | io: _root._io 452 | pos: bind_off 453 | size: bind_size 454 | type: bind_data 455 | lazy_bind: 456 | io: _root._io 457 | pos: lazy_bind_off 458 | size: lazy_bind_size 459 | type: lazy_bind_data 460 | exports: 461 | io: _root._io 462 | pos: export_off 463 | size: export_size 464 | type: export_node 465 | types: 466 | rebase_data: 467 | seq: 468 | - id: items 469 | type: rebase_item 470 | repeat: until 471 | repeat-until: _.opcode == opcode::done 472 | types: 473 | rebase_item: 474 | seq: 475 | - id: opcode_and_immediate 476 | type: u1 477 | - id: uleb 478 | type: uleb128 479 | if: > 480 | opcode == opcode::set_segment_and_offset_uleb or 481 | opcode == opcode::add_address_uleb or 482 | opcode == opcode::do_rebase_uleb_times or 483 | opcode == opcode::do_rebase_add_address_uleb or 484 | opcode == opcode::do_rebase_uleb_times_skipping_uleb 485 | - id: skip 486 | type: uleb128 487 | if: "opcode == opcode::do_rebase_uleb_times_skipping_uleb" 488 | instances: 489 | opcode: 490 | value: "opcode_and_immediate & 0xf0" 491 | enum: opcode 492 | -webide-parse-mode: eager 493 | immediate: 494 | value: "opcode_and_immediate & 0x0f" 495 | -webide-parse-mode: eager 496 | -webide-representation: "{opcode}, imm:{immediate}, uleb:{uleb}, skip:{skip}" 497 | enums: 498 | opcode: 499 | 0x00: done 500 | 0x10: set_type_immediate 501 | 0x20: set_segment_and_offset_uleb 502 | 0x30: add_address_uleb 503 | 0x40: add_address_immediate_scaled 504 | 0x50: do_rebase_immediate_times 505 | 0x60: do_rebase_uleb_times 506 | 0x70: do_rebase_add_address_uleb 507 | 0x80: do_rebase_uleb_times_skipping_uleb 508 | bind_item: 509 | seq: 510 | - id: opcode_and_immediate 511 | type: u1 512 | - id: uleb 513 | type: uleb128 514 | if: > 515 | opcode == bind_opcode::set_dylib_ordinal_uleb or 516 | opcode == bind_opcode::set_append_sleb or 517 | opcode == bind_opcode::set_segment_and_offset_uleb or 518 | opcode == bind_opcode::add_address_uleb or 519 | opcode == bind_opcode::do_bind_add_address_uleb or 520 | opcode == bind_opcode::do_bind_uleb_times_skipping_uleb 521 | - id: skip 522 | type: uleb128 523 | if: "opcode == bind_opcode::do_bind_uleb_times_skipping_uleb" 524 | - id: symbol 525 | type: strz 526 | if: "opcode == bind_opcode::set_symbol_trailing_flags_immediate" 527 | encoding: ascii 528 | instances: 529 | opcode: 530 | value: "opcode_and_immediate & 0xf0" 531 | enum: bind_opcode 532 | -webide-parse-mode: eager 533 | immediate: 534 | value: "opcode_and_immediate & 0x0f" 535 | -webide-parse-mode: eager 536 | -webide-representation: "{opcode}, imm:{immediate}, uleb:{uleb}, skip:{skip}, symbol:{symbol}" 537 | bind_data: 538 | seq: 539 | - id: items 540 | type: bind_item 541 | repeat: until 542 | repeat-until: _.opcode == bind_opcode::done 543 | lazy_bind_data: 544 | seq: 545 | - id: items 546 | type: bind_item 547 | repeat: eos 548 | export_node: 549 | seq: 550 | - id: terminal_size 551 | type: uleb128 552 | - id: children_count 553 | type: u1 554 | - id: children 555 | type: child 556 | repeat: expr 557 | repeat-expr: children_count 558 | - id: terminal 559 | size: terminal_size.value 560 | -webide-representation: "{children_count} children, term_size={terminal_size.value}" 561 | types: 562 | child: 563 | seq: 564 | - id: name 565 | type: strz 566 | encoding: ascii 567 | - id: node_offset 568 | type: uleb128 569 | instances: 570 | value: 571 | pos: node_offset.value 572 | type: export_node 573 | -webide-representation: "{name}: {node_offset}" 574 | enums: 575 | bind_opcode: 576 | 0x00: done 577 | 0x10: set_dylib_ordinal_immediate 578 | 0x20: set_dylib_ordinal_uleb 579 | 0x30: set_dylib_special_immediate 580 | 0x40: set_symbol_trailing_flags_immediate 581 | 0x50: set_type_immediate 582 | 0x60: set_append_sleb 583 | 0x70: set_segment_and_offset_uleb 584 | 0x80: add_address_uleb 585 | 0x90: do_bind 586 | 0xa0: do_bind_add_address_uleb 587 | 0xb0: do_bind_add_address_immediate_scaled 588 | 0xc0: do_bind_uleb_times_skipping_uleb 589 | symtab_command: 590 | seq: 591 | - id: sym_off 592 | -orig-id: symoff 593 | type: u4 594 | - id: n_syms 595 | -orig-id: nsyms 596 | type: u4 597 | - id: str_off 598 | -orig-id: stroff 599 | type: u4 600 | - id: str_size 601 | -orig-id: strsize 602 | type: u4 603 | instances: 604 | symbols: 605 | io: _root._io 606 | pos: sym_off 607 | type: nlist_64 608 | repeat: expr 609 | repeat-expr: n_syms 610 | strs: 611 | io: _root._io 612 | pos: str_off 613 | type: str_table 614 | size: str_size 615 | -webide-representation: "symbols: {n_syms:dec}, strtab: {str_off}" 616 | types: 617 | str_table: 618 | seq: 619 | - id: unknown 620 | type: u4 621 | - id: items 622 | type: strz 623 | encoding: ascii 624 | repeat: until 625 | repeat-until: _ == "" 626 | nlist_64: 627 | seq: 628 | - id: un 629 | type: u4 630 | - id: type 631 | type: u1 632 | - id: sect 633 | type: u1 634 | - id: desc 635 | type: u2 636 | - id: value 637 | type: u8 638 | -webide-representation: "un={un} type={type} sect={sect} desc={desc} value={value}" 639 | dysymtab_command: 640 | seq: 641 | - id: i_local_sym 642 | -orig-id: ilocalsym 643 | type: u4 644 | - id: n_local_sym 645 | -orig-id: nlocalsym 646 | type: u4 647 | - id: i_ext_def_sym 648 | -orig-id: iextdefsym 649 | type: u4 650 | - id: n_ext_def_sym 651 | -orig-id: nextdefsym 652 | type: u4 653 | - id: i_undef_sym 654 | -orig-id: iundefsym 655 | type: u4 656 | - id: n_undef_sym 657 | -orig-id: nundefsym 658 | type: u4 659 | - id: toc_off 660 | -orig-id: tocoff 661 | type: u4 662 | - id: n_toc 663 | -orig-id: ntoc 664 | type: u4 665 | - id: mod_tab_off 666 | -orig-id: modtaboff 667 | type: u4 668 | - id: n_mod_tab 669 | -orig-id: nmodtab 670 | type: u4 671 | - id: ext_ref_sym_off 672 | -orig-id: extrefsymoff 673 | type: u4 674 | - id: n_ext_ref_syms 675 | -orig-id: nextrefsyms 676 | type: u4 677 | - id: indirect_sym_off 678 | -orig-id: indirectsymoff 679 | type: u4 680 | - id: n_indirect_syms 681 | -orig-id: nindirectsyms 682 | type: u4 683 | - id: ext_rel_off 684 | -orig-id: extreloff 685 | type: u4 686 | - id: n_ext_rel 687 | -orig-id: nextrel 688 | type: u4 689 | - id: loc_rel_off 690 | -orig-id: locreloff 691 | type: u4 692 | - id: n_loc_rel 693 | -orig-id: nlocrel 694 | type: u4 695 | instances: 696 | indirect_symbols: 697 | io: _root._io 698 | pos: indirect_sym_off 699 | type: u4 700 | repeat: expr 701 | repeat-expr: n_indirect_syms 702 | lc_str: 703 | seq: 704 | - id: length 705 | -orig-id: offset 706 | type: u4 707 | - id: value 708 | -orig-id: ptr 709 | type: strz 710 | encoding: UTF-8 711 | -webide-representation: '{value}' 712 | dylinker_command: 713 | seq: 714 | - id: name 715 | type: lc_str 716 | -webide-representation: '{name}' 717 | uuid_command: 718 | seq: 719 | - id: uuid 720 | size: 16 721 | -webide-representation: 'uuid={uuid}' 722 | version: 723 | seq: 724 | - id: p1 725 | type: u1 726 | - id: minor 727 | type: u1 728 | - id: major 729 | type: u1 730 | - id: release 731 | type: u1 732 | -webide-representation: '{major:dec}.{minor:dec}' 733 | encryption_info_command: 734 | seq: 735 | - id: cryptoff 736 | type: u4 737 | - id: cryptsize 738 | type: u4 739 | - id: cryptid 740 | type: u4 741 | - id: pad 742 | type: u4 743 | if: _root.magic == magic_type::macho_be_x64 or _root.magic == magic_type::macho_le_x64 744 | twolevel_hints_command: 745 | seq: 746 | - id: offset 747 | type: u4 748 | - id: num_hints 749 | -orig-id: nhints 750 | type: u4 751 | linker_option_command: 752 | seq: 753 | - id: num_strings 754 | -orig-id: count 755 | type: u4 756 | - id: strings 757 | type: strz 758 | encoding: utf-8 759 | repeat: expr 760 | repeat-expr: num_strings 761 | sub_command: 762 | seq: 763 | - id: name 764 | type: lc_str 765 | routines_command_64: 766 | seq: 767 | - id: init_address 768 | type: u8 769 | - id: init_module 770 | type: u8 771 | - id: reserved 772 | size: 48 # u8 * 6 773 | routines_command: 774 | seq: 775 | - id: init_address 776 | type: u4 777 | - id: init_module 778 | type: u4 779 | - id: reserved 780 | size: 24 # u4 * 6 781 | version_min_command: 782 | seq: 783 | - id: version 784 | type: version 785 | - id: sdk 786 | type: version 787 | -webide-representation: 'v:{version}, r:{reserved}' 788 | source_version_command: 789 | seq: 790 | - id: version 791 | type: u8 792 | -webide-representation: 'v:{version:dec}' 793 | entry_point_command: 794 | seq: 795 | - id: entry_off 796 | -orig-id: entryoff 797 | type: u8 798 | - id: stack_size 799 | -orig-id: stacksize 800 | type: u8 801 | -webide-representation: 'entry_off={entry_off}, stack_size={stack_size}' 802 | dylib_command: 803 | seq: 804 | - id: name_offset 805 | type: u4 806 | - id: timestamp 807 | type: u4 808 | - id: current_version 809 | type: u4 810 | - id: compatibility_version 811 | type: u4 812 | - id: name 813 | type: strz 814 | encoding: utf-8 815 | -webide-representation: '{name}' 816 | rpath_command: 817 | seq: 818 | - id: path_offset 819 | type: u4 820 | - id: path 821 | type: strz 822 | encoding: utf-8 823 | -webide-representation: '{path}' 824 | linkedit_data_command: 825 | seq: 826 | - id: data_off 827 | -orig-id: dataoff 828 | type: u4 829 | - id: data_size 830 | -orig-id: datasize 831 | type: u4 832 | -webide-representation: 'offs={data_off}, size={data_size}' 833 | code_signature_command: 834 | seq: 835 | - id: data_off 836 | type: u4 837 | - id: data_size 838 | type: u4 839 | instances: 840 | code_signature: 841 | io: _root._io 842 | pos: data_off 843 | type: cs_blob 844 | size: data_size 845 | -webide-representation: 'offs={data_off}, size={data_size}' 846 | cs_blob: 847 | seq: 848 | - id: magic 849 | type: u4be 850 | enum: cs_magic 851 | - id: length 852 | type: u4be 853 | - id: body 854 | size: length - 8 855 | type: 856 | switch-on: magic 857 | cases: 858 | 'cs_magic::requirement' : requirement 859 | 'cs_magic::requirements' : requirements 860 | 'cs_magic::code_directory' : code_directory 861 | 'cs_magic::entitlement' : entitlement 862 | 'cs_magic::blob_wrapper' : blob_wrapper 863 | 'cs_magic::embedded_signature': super_blob 864 | 'cs_magic::detached_signature': super_blob 865 | enums: 866 | cs_magic: 867 | 0xfade0c00: requirement # CSMAGIC_REQUIREMENT 868 | 0xfade0c01: requirements # CSMAGIC_REQUIREMENTS 869 | 0xfade0c02: code_directory # CSMAGIC_CODEDIRECTORY 870 | 0xfade7171: entitlement # CSMAGIC_ENTITLEMENT 871 | 0xfade0b01: blob_wrapper # CSMAGIC_BLOBWRAPPER 872 | 0xfade0cc0: embedded_signature # CSMAGIC_EMBEDDED_SIGNATURE 873 | 0xfade0cc1: detached_signature # CSMAGIC_DETACHED_SIGNATURE 874 | types: 875 | code_directory: 876 | seq: 877 | - id: version 878 | type: u4be 879 | - id: flags 880 | type: u4be 881 | - id: hash_offset 882 | type: u4be 883 | - id: ident_offset 884 | type: u4be 885 | - id: n_special_slots 886 | type: u4be 887 | - id: n_code_slots 888 | type: u4be 889 | - id: code_limit 890 | type: u4be 891 | - id: hash_size 892 | type: u1 893 | - id: hash_type 894 | type: u1 895 | - id: spare1 896 | type: u1 897 | - id: page_size 898 | type: u1 899 | - id: spare2 900 | type: u4be 901 | - id: scatter_offset 902 | type: u4be 903 | if: version >= 0x20100 904 | - id: team_id_offset 905 | type: u4be 906 | if: version >= 0x20200 907 | instances: 908 | ident: 909 | pos: ident_offset - 8 910 | type: strz 911 | encoding: utf-8 912 | -webide-parse-mode: eager 913 | team_id: 914 | pos: team_id_offset - 8 915 | type: strz 916 | encoding: utf-8 917 | -webide-parse-mode: eager 918 | hashes: 919 | pos: hash_offset - 8 - hash_size * n_special_slots 920 | repeat: expr 921 | repeat-expr: n_special_slots + n_code_slots 922 | size: hash_size 923 | blob_index: 924 | seq: 925 | - id: type 926 | type: u4be 927 | enum: csslot_type 928 | - id: offset 929 | type: u4be 930 | instances: 931 | blob: 932 | pos: offset - 8 933 | io: _parent._io 934 | size-eos: true 935 | type: cs_blob 936 | enums: 937 | csslot_type: 938 | 0: code_directory # CSSLOT_CODEDIRECTORY 939 | 1: info_slot # CSSLOT_INFOSLOT 940 | 2: requirements # CSSLOT_REQUIREMENTS 941 | 3: resource_dir # CSSLOT_RESOURCEDIR 942 | 4: application # CSSLOT_APPLICATION 943 | 5: entitlements # CSSLOT_ENTITLEMENTS 944 | 0x1000: alternate_code_directories # CSSLOT_ALTERNATE_CODEDIRECTORIES 945 | 0x10000: signature_slot # CSSLOT_SIGNATURESLOT 946 | data: 947 | seq: 948 | - id: length 949 | type: u4be 950 | - id: value 951 | size: length 952 | - id: padding 953 | size: 4 - (length & 3) 954 | -webide-representation: "{value}" 955 | match: 956 | seq: 957 | - id: match_op 958 | type: u4be 959 | enum: op 960 | - id: data 961 | type: data 962 | if: 'match_op != op::exists' 963 | enums: 964 | op: 965 | 0: exists 966 | 1: equal 967 | 2: contains 968 | 3: begins_with 969 | 4: ends_with 970 | 5: less_than 971 | 6: greater_than 972 | 7: less_equal 973 | 8: greater_equal 974 | -webide-representation: "{match_op} {data.value:str}" 975 | expr: 976 | seq: 977 | - id: op 978 | type: u4be 979 | enum: op_enum 980 | - id: data 981 | type: 982 | switch-on: op 983 | cases: 984 | #'op_enum::false' : 'false' 985 | #'op_enum::true' : 'true' 986 | 'op_enum::ident' : ident_expr 987 | #'op_enum::apple_anchor' : 'anchor apple' 988 | 'op_enum::anchor_hash' : anchor_hash_expr 989 | 'op_enum::info_key_value' : data 990 | 'op_enum::and_op' : and_expr 991 | 'op_enum::or_op' : or_expr 992 | 'op_enum::cd_hash' : data 993 | 'op_enum::not_op' : expr 994 | 'op_enum::info_key_field' : info_key_field_expr 995 | 'op_enum::cert_field' : cert_field_expr 996 | 'op_enum::trusted_cert' : cert_slot_expr 997 | #'op_enum::trusted_certs' : 'anchor trusted' 998 | 'op_enum::cert_generic' : cert_generic_expr 999 | 'op_enum::apple_generic_anchor': apple_generic_anchor_expr 1000 | 'op_enum::entitlement_field' : entitlement_field_expr 1001 | enums: 1002 | op_enum: 1003 | 0: 'false' # unconditionally false 1004 | 1: 'true' # unconditionally true 1005 | 2: ident # match canonical code [string] 1006 | 3: apple_anchor # signed by Apple as Apple's product ("anchor apple") 1007 | 4: anchor_hash # match anchor [cert hash] 1008 | 5: info_key_value # *legacy* - use opInfoKeyField [key; value] 1009 | 6: and_op # binary prefix expr AND expr [expr; expr] 1010 | 7: or_op # binary prefix expr OR expr 1011 | 8: cd_hash # match hash of CodeDirectory directly 1012 | 9: not_op # logical inverse 1013 | 10: info_key_field # Info.plist key field [string; match suffix] 1014 | 11: cert_field # Certificate field [cert index; field name; match suffix] 1015 | 12: trusted_cert # require trust settings to approve one particular cert [cert index] 1016 | 13: trusted_certs # require trust settings to approve the cert chain 1017 | 14: cert_generic # Certificate component by OID [cert index; oid; match suffix] 1018 | 15: apple_generic_anchor # signed by Apple in any capacity ("anchor apple generic") 1019 | 16: entitlement_field # entitlement dictionary field [string; match suffix] 1020 | cert_slot: 1021 | 0xffffffff: anchor_cert 1022 | 0: left_cert 1023 | types: 1024 | ident_expr: 1025 | seq: 1026 | - id: identifier 1027 | type: data 1028 | -webide-representation: "identifier {identifier.value:str}" 1029 | apple_generic_anchor_expr: 1030 | instances: 1031 | value: 1032 | value: '"anchor apple generic"' 1033 | -webide-representation: "anchor apple generic" 1034 | cert_slot_expr: 1035 | seq: 1036 | - id: value 1037 | type: u4be 1038 | enum: cert_slot 1039 | and_expr: 1040 | seq: 1041 | - id: left 1042 | type: expr 1043 | - id: right 1044 | type: expr 1045 | -webide-representation: "({left}) AND ({right})" 1046 | or_expr: 1047 | seq: 1048 | - id: left 1049 | type: expr 1050 | - id: right 1051 | type: expr 1052 | -webide-representation: "({left}) OR ({right})" 1053 | anchor_hash_expr: 1054 | seq: 1055 | - id: cert_slot 1056 | type: u4be 1057 | enum: cert_slot 1058 | - id: data 1059 | type: data 1060 | info_key_field_expr: 1061 | seq: 1062 | - id: data 1063 | type: data 1064 | - id: match 1065 | type: match 1066 | entitlement_field_expr: 1067 | seq: 1068 | - id: data 1069 | type: data 1070 | - id: match 1071 | type: match 1072 | cert_field_expr: 1073 | seq: 1074 | - id: cert_slot 1075 | type: u4be 1076 | enum: cert_slot 1077 | - id: data 1078 | type: data 1079 | - id: match 1080 | type: match 1081 | -webide-representation: "{cert_slot}[{data.value:str}] {match}" 1082 | cert_generic_expr: 1083 | seq: 1084 | - id: cert_slot 1085 | type: u4be 1086 | enum: cert_slot 1087 | - id: data 1088 | type: data 1089 | - id: match 1090 | type: match 1091 | -webide-representation: "{cert_slot}[{data.value:hex}] {match}" 1092 | -webide-representation: '{data}' 1093 | requirement: 1094 | seq: 1095 | - id: kind 1096 | type: u4be 1097 | - id: expr 1098 | type: expr 1099 | entitlement: 1100 | seq: 1101 | - id: data 1102 | size-eos: true 1103 | -webide-representation: "{data:str}" 1104 | requirements_blob_index: 1105 | seq: 1106 | - id: type 1107 | type: u4be 1108 | enum: requirement_type 1109 | - id: offset 1110 | type: u4be 1111 | instances: 1112 | value: 1113 | type: cs_blob 1114 | pos: offset - 8 1115 | enums: 1116 | requirement_type: 1117 | 1: host # kSecHostRequirementType 1118 | 2: guest # kSecGuestRequirementType 1119 | 3: designated # kSecDesignatedRequirementtype 1120 | 4: library # kSecLibraryRequirementType 1121 | requirements: 1122 | seq: 1123 | - id: count 1124 | type: u4be 1125 | - id: items 1126 | type: requirements_blob_index 1127 | repeat: expr 1128 | repeat-expr: count 1129 | blob_wrapper: 1130 | seq: 1131 | - id: data 1132 | size-eos: true 1133 | super_blob: 1134 | seq: 1135 | - id: count 1136 | type: u4be 1137 | - id: blobs 1138 | type: blob_index 1139 | repeat: expr 1140 | repeat-expr: count 1141 | -------------------------------------------------------------------------------- /pkg/internal_api/kaitai/microsoft_pe.ksy: -------------------------------------------------------------------------------- 1 | meta: 2 | id: microsoft_pe 3 | title: Microsoft PE (Portable Executable) file format 4 | application: Microsoft Windows 5 | file-extension: 6 | - exe 7 | - dll 8 | - sys 9 | xref: 10 | justsolve: Portable_Executable 11 | pronom: 12 | - x-fmt/411 13 | - fmt/899 14 | - fmt/900 15 | wikidata: Q1076355 16 | license: CC0-1.0 17 | ks-version: 0.7 18 | endian: le 19 | doc-ref: http://www.microsoft.com/whdc/system/platform/firmware/PECOFF.mspx 20 | seq: 21 | - id: mz 22 | type: mz_placeholder 23 | instances: 24 | pe: 25 | pos: mz.ofs_pe 26 | type: pe_header 27 | enums: 28 | pe_format: 29 | 0x107: rom_image 30 | 0x10b: pe32 31 | 0x20b: pe32_plus 32 | types: 33 | mz_placeholder: 34 | seq: 35 | - id: magic 36 | contents: "MZ" 37 | - id: data1 38 | size: 0x3a 39 | - id: ofs_pe 40 | type: u4 41 | doc: In PE file, an offset to PE header 42 | pe_header: 43 | seq: 44 | - id: pe_signature 45 | contents: ["PE", 0, 0] 46 | - id: coff_hdr 47 | type: coff_header 48 | - id: optional_hdr 49 | type: optional_header 50 | size: coff_hdr.size_of_optional_header 51 | - id: sections 52 | repeat: expr 53 | repeat-expr: coff_hdr.number_of_sections 54 | type: section 55 | instances: 56 | certificate_table: 57 | pos: optional_hdr.data_dirs.certificate_table.virtual_address 58 | if: optional_hdr.data_dirs.certificate_table.virtual_address != 0 59 | size: optional_hdr.data_dirs.certificate_table.size 60 | type: certificate_table 61 | coff_header: 62 | doc-ref: 3.3. COFF File Header (Object and Image) 63 | seq: 64 | - id: machine 65 | type: u2 66 | enum: machine_type 67 | - id: number_of_sections 68 | type: u2 69 | - id: time_date_stamp 70 | type: u4 71 | - id: pointer_to_symbol_table 72 | type: u4 73 | - id: number_of_symbols 74 | type: u4 75 | - id: size_of_optional_header 76 | type: u2 77 | - id: characteristics 78 | type: u2 79 | instances: 80 | symbol_table_size: 81 | value: number_of_symbols * 18 82 | symbol_name_table_offset: 83 | value: pointer_to_symbol_table + symbol_table_size 84 | symbol_name_table_size: 85 | pos: symbol_name_table_offset 86 | type: u4 87 | symbol_table: 88 | pos: pointer_to_symbol_table 89 | type: coff_symbol 90 | repeat: expr 91 | repeat-expr: number_of_symbols 92 | enums: 93 | machine_type: 94 | # 3.3.1. Machine Types 95 | 0x0: unknown 96 | 0x1d3: am33 97 | 0x8664: amd64 98 | 0x1c0: arm 99 | 0xaa64: arm64 100 | 0x1c4: armnt 101 | 0xebc: ebc 102 | 0x14c: i386 103 | 0x200: ia64 104 | 0x9041: m32r 105 | 0x266: mips16 106 | 0x366: mipsfpu 107 | 0x466: mipsfpu16 108 | 0x1f0: powerpc 109 | 0x1f1: powerpcfp 110 | 0x166: r4000 111 | 0x5032: riscv32 112 | 0x5064: riscv64 113 | 0x5128: riscv128 114 | 0x1a2: sh3 115 | 0x1a3: sh3dsp 116 | 0x1a6: sh4 117 | 0x1a8: sh5 118 | 0x1c2: thumb 119 | 0x169: wcemipsv2 120 | # Not mentioned in Microsoft documentation, but widely regarded 121 | 0x184: alpha 122 | coff_symbol: 123 | seq: 124 | - id: name_annoying 125 | type: annoyingstring 126 | size: 8 127 | #- id: name_zeroes 128 | # type: u4 129 | #- id: name_offset 130 | # type: u4 131 | - id: value 132 | type: u4 133 | - id: section_number 134 | type: u2 135 | - id: type 136 | type: u2 137 | - id: storage_class 138 | type: u1 139 | - id: number_of_aux_symbols 140 | type: u1 141 | instances: 142 | #effective_name: 143 | # value: name_zeroes == 0 ? name_from_offset : '"fixme"' 144 | #name_from_offset: 145 | # io: _root._io 146 | # pos: name_zeroes == 0 ? _parent.symbol_name_table_offset + name_offset : 0 147 | # type: str 148 | # terminator: 0 149 | # encoding: ascii 150 | section: 151 | value: _root.pe.sections[section_number - 1] 152 | data: 153 | pos: section.pointer_to_raw_data + value 154 | size: 1 155 | annoyingstring: 156 | -webide-representation: '{name}' 157 | instances: 158 | name_zeroes: 159 | pos: 0 160 | type: u4 161 | name_offset: 162 | pos: 4 163 | type: u4 164 | name_from_offset: 165 | io: _root._io 166 | pos: 'name_zeroes == 0 ? _parent._parent.symbol_name_table_offset + name_offset : 0' 167 | type: str 168 | terminator: 0 169 | encoding: ascii 170 | eos-error: false 171 | if: name_zeroes == 0 172 | name_from_short: 173 | pos: 0 174 | type: str 175 | terminator: 0 176 | encoding: ascii 177 | eos-error: false 178 | if: name_zeroes != 0 179 | name: 180 | value: 'name_zeroes == 0 ? name_from_offset : name_from_short' 181 | optional_header: 182 | seq: 183 | - id: std 184 | type: optional_header_std 185 | - id: windows 186 | type: optional_header_windows 187 | - id: data_dirs 188 | type: optional_header_data_dirs 189 | optional_header_std: 190 | seq: 191 | - id: format 192 | type: u2 193 | enum: pe_format 194 | - id: major_linker_version 195 | type: u1 196 | - id: minor_linker_version 197 | type: u1 198 | - id: size_of_code 199 | type: u4 200 | - id: size_of_initialized_data 201 | type: u4 202 | - id: size_of_uninitialized_data 203 | type: u4 204 | - id: address_of_entry_point 205 | type: u4 206 | - id: base_of_code 207 | type: u4 208 | - id: base_of_data 209 | type: u4 210 | if: format == pe_format::pe32 211 | optional_header_windows: 212 | seq: 213 | - id: image_base_32 214 | type: u4 215 | if: _parent.std.format == pe_format::pe32 216 | - id: image_base_64 217 | type: u8 218 | if: _parent.std.format == pe_format::pe32_plus 219 | - id: section_alignment 220 | type: u4 221 | - id: file_alignment 222 | type: u4 223 | - id: major_operating_system_version 224 | type: u2 225 | - id: minor_operating_system_version 226 | type: u2 227 | - id: major_image_version 228 | type: u2 229 | - id: minor_image_version 230 | type: u2 231 | - id: major_subsystem_version 232 | type: u2 233 | - id: minor_subsystem_version 234 | type: u2 235 | - id: win32_version_value 236 | type: u4 237 | - id: size_of_image 238 | type: u4 239 | - id: size_of_headers 240 | type: u4 241 | - id: check_sum 242 | type: u4 243 | - id: subsystem 244 | type: u2 245 | enum: subsystem_enum 246 | - id: dll_characteristics 247 | type: u2 248 | - id: size_of_stack_reserve_32 249 | type: u4 250 | if: _parent.std.format == pe_format::pe32 251 | - id: size_of_stack_reserve_64 252 | type: u8 253 | if: _parent.std.format == pe_format::pe32_plus 254 | - id: size_of_stack_commit_32 255 | type: u4 256 | if: _parent.std.format == pe_format::pe32 257 | - id: size_of_stack_commit_64 258 | type: u8 259 | if: _parent.std.format == pe_format::pe32_plus 260 | - id: size_of_heap_reserve_32 261 | type: u4 262 | if: _parent.std.format == pe_format::pe32 263 | - id: size_of_heap_reserve_64 264 | type: u8 265 | if: _parent.std.format == pe_format::pe32_plus 266 | - id: size_of_heap_commit_32 267 | type: u4 268 | if: _parent.std.format == pe_format::pe32 269 | - id: size_of_heap_commit_64 270 | type: u8 271 | if: _parent.std.format == pe_format::pe32_plus 272 | - id: loader_flags 273 | type: u4 274 | - id: number_of_rva_and_sizes 275 | type: u4 276 | enums: 277 | subsystem_enum: 278 | 0: unknown 279 | 1: native 280 | 2: windows_gui 281 | 3: windows_cui 282 | 7: posix_cui 283 | 9: windows_ce_gui 284 | 10: efi_application 285 | 11: efi_boot_service_driver 286 | 12: efi_runtime_driver 287 | 13: efi_rom 288 | 14: xbox 289 | 16: windows_boot_application 290 | optional_header_data_dirs: 291 | seq: 292 | - id: export_table 293 | type: data_dir 294 | - id: import_table 295 | type: data_dir 296 | - id: resource_table 297 | type: data_dir 298 | - id: exception_table 299 | type: data_dir 300 | - id: certificate_table 301 | type: data_dir 302 | - id: base_relocation_table 303 | type: data_dir 304 | - id: debug 305 | type: data_dir 306 | - id: architecture 307 | type: data_dir 308 | - id: global_ptr 309 | type: data_dir 310 | - id: tls_table 311 | type: data_dir 312 | - id: load_config_table 313 | type: data_dir 314 | - id: bound_import 315 | type: data_dir 316 | - id: iat 317 | type: data_dir 318 | - id: delay_import_descriptor 319 | type: data_dir 320 | - id: clr_runtime_header 321 | type: data_dir 322 | data_dir: 323 | seq: 324 | - id: virtual_address 325 | type: u4 326 | - id: size 327 | type: u4 328 | section: 329 | -webide-representation: "{name}" 330 | seq: 331 | - id: name 332 | type: str 333 | encoding: UTF-8 334 | size: 8 335 | pad-right: 0 336 | - id: virtual_size 337 | type: u4 338 | - id: virtual_address 339 | type: u4 340 | - id: size_of_raw_data 341 | type: u4 342 | - id: pointer_to_raw_data 343 | type: u4 344 | - id: pointer_to_relocations 345 | type: u4 346 | - id: pointer_to_linenumbers 347 | type: u4 348 | - id: number_of_relocations 349 | type: u2 350 | - id: number_of_linenumbers 351 | type: u2 352 | - id: characteristics 353 | type: u4 354 | instances: 355 | body: 356 | pos: pointer_to_raw_data 357 | size: size_of_raw_data 358 | certificate_table: 359 | seq: 360 | - id: items 361 | type: certificate_entry 362 | repeat: eos 363 | certificate_entry: 364 | enums: 365 | certificate_revision: 366 | 0x0100: 367 | id: revision_1_0 368 | doc: | 369 | Version 1, legacy version of the Win_Certificate structure. 370 | It is supported only for purposes of verifying legacy Authenticode signatures 371 | 0x0200: 372 | id: revision_2_0 373 | doc: Version 2 is the current version of the Win_Certificate structure. 374 | certificate_type: 375 | 0x0001: 376 | id: x509 377 | doc: | 378 | bCertificate contains an X.509 Certificate 379 | Not Supported 380 | 0x0002: 381 | id: pkcs_signed_data 382 | doc: 'bCertificate contains a PKCS#7 SignedData structure' 383 | 0x0003: 384 | id: reserved_1 385 | doc: 'Reserved' 386 | 0x0004: 387 | id: ts_stack_signed 388 | doc: | 389 | Terminal Server Protocol Stack Certificate signing 390 | Not Supported 391 | seq: 392 | - id: length 393 | -orig-id: dwLength 394 | type: u4 395 | doc: Specifies the length of the attribute certificate entry. 396 | - id: revision 397 | -orig-id: wRevision 398 | type: u2 399 | enum: certificate_revision 400 | doc: Contains the certificate version number. 401 | - id: certificate_type 402 | -orig-id: wCertificateType 403 | type: u2 404 | enum: certificate_type 405 | doc: Specifies the type of content in bCertificate 406 | - id: certificate_bytes 407 | -orig-id: bCertificate 408 | size: length - 8 409 | doc: Contains a certificate, such as an Authenticode signature. 410 | doc-ref: 'https://docs.microsoft.com/en-us/windows/desktop/debug/pe-format#the-attribute-certificate-table-image-only' 411 | -------------------------------------------------------------------------------- /pkg/internal_api/kaitai/microsoft_pe.py: -------------------------------------------------------------------------------- 1 | # This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild 2 | 3 | from enum import Enum 4 | 5 | from pkg_resources import parse_version 6 | 7 | from ...vendor.kaitaistruct import __version__ as ks_version, KaitaiStruct, KaitaiStream, BytesIO 8 | 9 | if parse_version(ks_version) < parse_version('0.7'): 10 | raise Exception("Incompatible Kaitai Struct Python API: 0.7 or later is required, but you have %s" % (ks_version)) 11 | 12 | 13 | class MicrosoftPe(KaitaiStruct): 14 | """ 15 | .. seealso:: 16 | Source - http://www.microsoft.com/whdc/system/platform/firmware/PECOFF.mspx 17 | """ 18 | 19 | class PeFormat(Enum): 20 | rom_image = 263 21 | pe32 = 267 22 | pe32_plus = 523 23 | 24 | def __init__(self, _io, _parent=None, _root=None): 25 | self._io = _io 26 | self._parent = _parent 27 | self._root = _root if _root else self 28 | self._read() 29 | 30 | def _read(self): 31 | self.mz = self._root.MzPlaceholder(self._io, self, self._root) 32 | 33 | class CertificateEntry(KaitaiStruct): 34 | """ 35 | .. seealso:: 36 | Source - https://docs.microsoft.com/en-us/windows/desktop/debug/pe-format#the-attribute-certificate-table-image-only 37 | """ 38 | 39 | class CertificateRevision(Enum): 40 | revision_1_0 = 256 41 | revision_2_0 = 512 42 | 43 | class CertificateType(Enum): 44 | x509 = 1 45 | pkcs_signed_data = 2 46 | reserved_1 = 3 47 | ts_stack_signed = 4 48 | 49 | def __init__(self, _io, _parent=None, _root=None): 50 | self._io = _io 51 | self._parent = _parent 52 | self._root = _root if _root else self 53 | self._read() 54 | 55 | def _read(self): 56 | self.length = self._io.read_u4le() 57 | self.revision = self._root.CertificateEntry.CertificateRevision(self._io.read_u2le()) 58 | self.certificate_type = self._root.CertificateEntry.CertificateType(self._io.read_u2le()) 59 | self.certificate_bytes = self._io.read_bytes((self.length - 8)) 60 | 61 | class OptionalHeaderWindows(KaitaiStruct): 62 | 63 | class SubsystemEnum(Enum): 64 | unknown = 0 65 | native = 1 66 | windows_gui = 2 67 | windows_cui = 3 68 | posix_cui = 7 69 | windows_ce_gui = 9 70 | efi_application = 10 71 | efi_boot_service_driver = 11 72 | efi_runtime_driver = 12 73 | efi_rom = 13 74 | xbox = 14 75 | windows_boot_application = 16 76 | 77 | def __init__(self, _io, _parent=None, _root=None): 78 | self._io = _io 79 | self._parent = _parent 80 | self._root = _root if _root else self 81 | self._read() 82 | 83 | def _read(self): 84 | if self._parent.std.format == self._root.PeFormat.pe32: 85 | self.image_base_32 = self._io.read_u4le() 86 | 87 | if self._parent.std.format == self._root.PeFormat.pe32_plus: 88 | self.image_base_64 = self._io.read_u8le() 89 | 90 | self.section_alignment = self._io.read_u4le() 91 | self.file_alignment = self._io.read_u4le() 92 | self.major_operating_system_version = self._io.read_u2le() 93 | self.minor_operating_system_version = self._io.read_u2le() 94 | self.major_image_version = self._io.read_u2le() 95 | self.minor_image_version = self._io.read_u2le() 96 | self.major_subsystem_version = self._io.read_u2le() 97 | self.minor_subsystem_version = self._io.read_u2le() 98 | self.win32_version_value = self._io.read_u4le() 99 | self.size_of_image = self._io.read_u4le() 100 | self.size_of_headers = self._io.read_u4le() 101 | self.check_sum = self._io.read_u4le() 102 | self.subsystem = self._root.OptionalHeaderWindows.SubsystemEnum(self._io.read_u2le()) 103 | self.dll_characteristics = self._io.read_u2le() 104 | if self._parent.std.format == self._root.PeFormat.pe32: 105 | self.size_of_stack_reserve_32 = self._io.read_u4le() 106 | 107 | if self._parent.std.format == self._root.PeFormat.pe32_plus: 108 | self.size_of_stack_reserve_64 = self._io.read_u8le() 109 | 110 | if self._parent.std.format == self._root.PeFormat.pe32: 111 | self.size_of_stack_commit_32 = self._io.read_u4le() 112 | 113 | if self._parent.std.format == self._root.PeFormat.pe32_plus: 114 | self.size_of_stack_commit_64 = self._io.read_u8le() 115 | 116 | if self._parent.std.format == self._root.PeFormat.pe32: 117 | self.size_of_heap_reserve_32 = self._io.read_u4le() 118 | 119 | if self._parent.std.format == self._root.PeFormat.pe32_plus: 120 | self.size_of_heap_reserve_64 = self._io.read_u8le() 121 | 122 | if self._parent.std.format == self._root.PeFormat.pe32: 123 | self.size_of_heap_commit_32 = self._io.read_u4le() 124 | 125 | if self._parent.std.format == self._root.PeFormat.pe32_plus: 126 | self.size_of_heap_commit_64 = self._io.read_u8le() 127 | 128 | self.loader_flags = self._io.read_u4le() 129 | self.number_of_rva_and_sizes = self._io.read_u4le() 130 | 131 | class OptionalHeaderDataDirs(KaitaiStruct): 132 | def __init__(self, _io, _parent=None, _root=None): 133 | self._io = _io 134 | self._parent = _parent 135 | self._root = _root if _root else self 136 | self._read() 137 | 138 | def _read(self): 139 | self.export_table = self._root.DataDir(self._io, self, self._root) 140 | self.import_table = self._root.DataDir(self._io, self, self._root) 141 | self.resource_table = self._root.DataDir(self._io, self, self._root) 142 | self.exception_table = self._root.DataDir(self._io, self, self._root) 143 | self.certificate_table = self._root.DataDir(self._io, self, self._root) 144 | self.base_relocation_table = self._root.DataDir(self._io, self, self._root) 145 | self.debug = self._root.DataDir(self._io, self, self._root) 146 | self.architecture = self._root.DataDir(self._io, self, self._root) 147 | self.global_ptr = self._root.DataDir(self._io, self, self._root) 148 | self.tls_table = self._root.DataDir(self._io, self, self._root) 149 | self.load_config_table = self._root.DataDir(self._io, self, self._root) 150 | self.bound_import = self._root.DataDir(self._io, self, self._root) 151 | self.iat = self._root.DataDir(self._io, self, self._root) 152 | self.delay_import_descriptor = self._root.DataDir(self._io, self, self._root) 153 | self.clr_runtime_header = self._root.DataDir(self._io, self, self._root) 154 | 155 | class DataDir(KaitaiStruct): 156 | def __init__(self, _io, _parent=None, _root=None): 157 | self._io = _io 158 | self._parent = _parent 159 | self._root = _root if _root else self 160 | self._read() 161 | 162 | def _read(self): 163 | self.virtual_address = self._io.read_u4le() 164 | self.size = self._io.read_u4le() 165 | 166 | class CoffSymbol(KaitaiStruct): 167 | def __init__(self, _io, _parent=None, _root=None): 168 | self._io = _io 169 | self._parent = _parent 170 | self._root = _root if _root else self 171 | self._read() 172 | 173 | def _read(self): 174 | self._raw_name_annoying = self._io.read_bytes(8) 175 | io = KaitaiStream(BytesIO(self._raw_name_annoying)) 176 | self.name_annoying = self._root.Annoyingstring(io, self, self._root) 177 | self.value = self._io.read_u4le() 178 | self.section_number = self._io.read_u2le() 179 | self.type = self._io.read_u2le() 180 | self.storage_class = self._io.read_u1() 181 | self.number_of_aux_symbols = self._io.read_u1() 182 | 183 | @property 184 | def section(self): 185 | if hasattr(self, '_m_section'): 186 | return self._m_section if hasattr(self, '_m_section') else None 187 | 188 | self._m_section = self._root.pe.sections[(self.section_number - 1)] 189 | return self._m_section if hasattr(self, '_m_section') else None 190 | 191 | @property 192 | def data(self): 193 | if hasattr(self, '_m_data'): 194 | return self._m_data if hasattr(self, '_m_data') else None 195 | 196 | _pos = self._io.pos() 197 | self._io.seek((self.section.pointer_to_raw_data + self.value)) 198 | self._m_data = self._io.read_bytes(1) 199 | self._io.seek(_pos) 200 | return self._m_data if hasattr(self, '_m_data') else None 201 | 202 | class PeHeader(KaitaiStruct): 203 | def __init__(self, _io, _parent=None, _root=None): 204 | self._io = _io 205 | self._parent = _parent 206 | self._root = _root if _root else self 207 | self._read() 208 | 209 | def _read(self): 210 | self.pe_signature = self._io.ensure_fixed_contents(b"\x50\x45\x00\x00") 211 | self.coff_hdr = self._root.CoffHeader(self._io, self, self._root) 212 | self._raw_optional_hdr = self._io.read_bytes(self.coff_hdr.size_of_optional_header) 213 | io = KaitaiStream(BytesIO(self._raw_optional_hdr)) 214 | self.optional_hdr = self._root.OptionalHeader(io, self, self._root) 215 | self.sections = [None] * (self.coff_hdr.number_of_sections) 216 | for i in range(self.coff_hdr.number_of_sections): 217 | self.sections[i] = self._root.Section(self._io, self, self._root) 218 | 219 | @property 220 | def certificate_table(self): 221 | if hasattr(self, '_m_certificate_table'): 222 | return self._m_certificate_table if hasattr(self, '_m_certificate_table') else None 223 | 224 | if self.optional_hdr.data_dirs.certificate_table.virtual_address != 0: 225 | _pos = self._io.pos() 226 | self._io.seek(self.optional_hdr.data_dirs.certificate_table.virtual_address) 227 | self._raw__m_certificate_table = self._io.read_bytes(self.optional_hdr.data_dirs.certificate_table.size) 228 | io = KaitaiStream(BytesIO(self._raw__m_certificate_table)) 229 | self._m_certificate_table = self._root.CertificateTable(io, self, self._root) 230 | self._io.seek(_pos) 231 | 232 | return self._m_certificate_table if hasattr(self, '_m_certificate_table') else None 233 | 234 | class OptionalHeader(KaitaiStruct): 235 | def __init__(self, _io, _parent=None, _root=None): 236 | self._io = _io 237 | self._parent = _parent 238 | self._root = _root if _root else self 239 | self._read() 240 | 241 | def _read(self): 242 | self.std = self._root.OptionalHeaderStd(self._io, self, self._root) 243 | self.windows = self._root.OptionalHeaderWindows(self._io, self, self._root) 244 | self.data_dirs = self._root.OptionalHeaderDataDirs(self._io, self, self._root) 245 | 246 | class Section(KaitaiStruct): 247 | def __init__(self, _io, _parent=None, _root=None): 248 | self._io = _io 249 | self._parent = _parent 250 | self._root = _root if _root else self 251 | self._read() 252 | 253 | def _read(self): 254 | self.name = (KaitaiStream.bytes_strip_right(self._io.read_bytes(8), 0)).decode(u"UTF-8") 255 | self.virtual_size = self._io.read_u4le() 256 | self.virtual_address = self._io.read_u4le() 257 | self.size_of_raw_data = self._io.read_u4le() 258 | self.pointer_to_raw_data = self._io.read_u4le() 259 | self.pointer_to_relocations = self._io.read_u4le() 260 | self.pointer_to_linenumbers = self._io.read_u4le() 261 | self.number_of_relocations = self._io.read_u2le() 262 | self.number_of_linenumbers = self._io.read_u2le() 263 | self.characteristics = self._io.read_u4le() 264 | 265 | @property 266 | def body(self): 267 | if hasattr(self, '_m_body'): 268 | return self._m_body if hasattr(self, '_m_body') else None 269 | 270 | _pos = self._io.pos() 271 | self._io.seek(self.pointer_to_raw_data) 272 | self._m_body = self._io.read_bytes(self.size_of_raw_data) 273 | self._io.seek(_pos) 274 | return self._m_body if hasattr(self, '_m_body') else None 275 | 276 | class CertificateTable(KaitaiStruct): 277 | def __init__(self, _io, _parent=None, _root=None): 278 | self._io = _io 279 | self._parent = _parent 280 | self._root = _root if _root else self 281 | self._read() 282 | 283 | def _read(self): 284 | self.items = [] 285 | i = 0 286 | while not self._io.is_eof(): 287 | self.items.append(self._root.CertificateEntry(self._io, self, self._root)) 288 | i += 1 289 | 290 | class MzPlaceholder(KaitaiStruct): 291 | def __init__(self, _io, _parent=None, _root=None): 292 | self._io = _io 293 | self._parent = _parent 294 | self._root = _root if _root else self 295 | self._read() 296 | 297 | def _read(self): 298 | self.magic = self._io.ensure_fixed_contents(b"\x4D\x5A") 299 | self.data1 = self._io.read_bytes(58) 300 | self.ofs_pe = self._io.read_u4le() 301 | 302 | class OptionalHeaderStd(KaitaiStruct): 303 | def __init__(self, _io, _parent=None, _root=None): 304 | self._io = _io 305 | self._parent = _parent 306 | self._root = _root if _root else self 307 | self._read() 308 | 309 | def _read(self): 310 | self.format = self._root.PeFormat(self._io.read_u2le()) 311 | self.major_linker_version = self._io.read_u1() 312 | self.minor_linker_version = self._io.read_u1() 313 | self.size_of_code = self._io.read_u4le() 314 | self.size_of_initialized_data = self._io.read_u4le() 315 | self.size_of_uninitialized_data = self._io.read_u4le() 316 | self.address_of_entry_point = self._io.read_u4le() 317 | self.base_of_code = self._io.read_u4le() 318 | if self.format == self._root.PeFormat.pe32: 319 | self.base_of_data = self._io.read_u4le() 320 | 321 | class CoffHeader(KaitaiStruct): 322 | """ 323 | .. seealso:: 324 | 3.3. COFF File Header (Object and Image) 325 | """ 326 | 327 | class MachineType(Enum): 328 | unknown = 0 329 | i386 = 332 330 | r4000 = 358 331 | wcemipsv2 = 361 332 | alpha = 388 333 | sh3 = 418 334 | sh3dsp = 419 335 | sh4 = 422 336 | sh5 = 424 337 | arm = 448 338 | thumb = 450 339 | armnt = 452 340 | am33 = 467 341 | powerpc = 496 342 | powerpcfp = 497 343 | ia64 = 512 344 | mips16 = 614 345 | mipsfpu = 870 346 | mipsfpu16 = 1126 347 | ebc = 3772 348 | riscv32 = 20530 349 | riscv64 = 20580 350 | riscv128 = 20776 351 | amd64 = 34404 352 | m32r = 36929 353 | arm64 = 43620 354 | 355 | def __init__(self, _io, _parent=None, _root=None): 356 | self._io = _io 357 | self._parent = _parent 358 | self._root = _root if _root else self 359 | self._read() 360 | 361 | def _read(self): 362 | self.machine = self._root.CoffHeader.MachineType(self._io.read_u2le()) 363 | self.number_of_sections = self._io.read_u2le() 364 | self.time_date_stamp = self._io.read_u4le() 365 | self.pointer_to_symbol_table = self._io.read_u4le() 366 | self.number_of_symbols = self._io.read_u4le() 367 | self.size_of_optional_header = self._io.read_u2le() 368 | self.characteristics = self._io.read_u2le() 369 | 370 | @property 371 | def symbol_table_size(self): 372 | if hasattr(self, '_m_symbol_table_size'): 373 | return self._m_symbol_table_size if hasattr(self, '_m_symbol_table_size') else None 374 | 375 | self._m_symbol_table_size = (self.number_of_symbols * 18) 376 | return self._m_symbol_table_size if hasattr(self, '_m_symbol_table_size') else None 377 | 378 | @property 379 | def symbol_name_table_offset(self): 380 | if hasattr(self, '_m_symbol_name_table_offset'): 381 | return self._m_symbol_name_table_offset if hasattr(self, '_m_symbol_name_table_offset') else None 382 | 383 | self._m_symbol_name_table_offset = (self.pointer_to_symbol_table + self.symbol_table_size) 384 | return self._m_symbol_name_table_offset if hasattr(self, '_m_symbol_name_table_offset') else None 385 | 386 | @property 387 | def symbol_name_table_size(self): 388 | if hasattr(self, '_m_symbol_name_table_size'): 389 | return self._m_symbol_name_table_size if hasattr(self, '_m_symbol_name_table_size') else None 390 | 391 | _pos = self._io.pos() 392 | self._io.seek(self.symbol_name_table_offset) 393 | self._m_symbol_name_table_size = self._io.read_u4le() 394 | self._io.seek(_pos) 395 | return self._m_symbol_name_table_size if hasattr(self, '_m_symbol_name_table_size') else None 396 | 397 | @property 398 | def symbol_table(self): 399 | if hasattr(self, '_m_symbol_table'): 400 | return self._m_symbol_table if hasattr(self, '_m_symbol_table') else None 401 | 402 | _pos = self._io.pos() 403 | self._io.seek(self.pointer_to_symbol_table) 404 | self._m_symbol_table = [None] * (self.number_of_symbols) 405 | for i in range(self.number_of_symbols): 406 | self._m_symbol_table[i] = self._root.CoffSymbol(self._io, self, self._root) 407 | 408 | self._io.seek(_pos) 409 | return self._m_symbol_table if hasattr(self, '_m_symbol_table') else None 410 | 411 | class Annoyingstring(KaitaiStruct): 412 | def __init__(self, _io, _parent=None, _root=None): 413 | self._io = _io 414 | self._parent = _parent 415 | self._root = _root if _root else self 416 | self._read() 417 | 418 | def _read(self): 419 | pass 420 | 421 | @property 422 | def name_from_offset(self): 423 | if hasattr(self, '_m_name_from_offset'): 424 | return self._m_name_from_offset if hasattr(self, '_m_name_from_offset') else None 425 | 426 | if self.name_zeroes == 0: 427 | io = self._root._io 428 | _pos = io.pos() 429 | io.seek((( 430 | self._parent._parent.symbol_name_table_offset + self.name_offset) if self.name_zeroes == 0 else 0)) 431 | self._m_name_from_offset = (io.read_bytes_term(0, False, True, False)).decode(u"ascii") 432 | io.seek(_pos) 433 | 434 | return self._m_name_from_offset if hasattr(self, '_m_name_from_offset') else None 435 | 436 | @property 437 | def name_offset(self): 438 | if hasattr(self, '_m_name_offset'): 439 | return self._m_name_offset if hasattr(self, '_m_name_offset') else None 440 | 441 | _pos = self._io.pos() 442 | self._io.seek(4) 443 | self._m_name_offset = self._io.read_u4le() 444 | self._io.seek(_pos) 445 | return self._m_name_offset if hasattr(self, '_m_name_offset') else None 446 | 447 | @property 448 | def name(self): 449 | if hasattr(self, '_m_name'): 450 | return self._m_name if hasattr(self, '_m_name') else None 451 | 452 | self._m_name = (self.name_from_offset if self.name_zeroes == 0 else self.name_from_short) 453 | return self._m_name if hasattr(self, '_m_name') else None 454 | 455 | @property 456 | def name_zeroes(self): 457 | if hasattr(self, '_m_name_zeroes'): 458 | return self._m_name_zeroes if hasattr(self, '_m_name_zeroes') else None 459 | 460 | _pos = self._io.pos() 461 | self._io.seek(0) 462 | self._m_name_zeroes = self._io.read_u4le() 463 | self._io.seek(_pos) 464 | return self._m_name_zeroes if hasattr(self, '_m_name_zeroes') else None 465 | 466 | @property 467 | def name_from_short(self): 468 | if hasattr(self, '_m_name_from_short'): 469 | return self._m_name_from_short if hasattr(self, '_m_name_from_short') else None 470 | 471 | if self.name_zeroes != 0: 472 | _pos = self._io.pos() 473 | self._io.seek(0) 474 | self._m_name_from_short = (self._io.read_bytes_term(0, False, True, False)).decode(u"ascii") 475 | self._io.seek(_pos) 476 | 477 | return self._m_name_from_short if hasattr(self, '_m_name_from_short') else None 478 | 479 | @property 480 | def pe(self): 481 | if hasattr(self, '_m_pe'): 482 | return self._m_pe if hasattr(self, '_m_pe') else None 483 | 484 | _pos = self._io.pos() 485 | self._io.seek(self.mz.ofs_pe) 486 | self._m_pe = self._root.PeHeader(self._io, self, self._root) 487 | self._io.seek(_pos) 488 | return self._m_pe if hasattr(self, '_m_pe') else None 489 | -------------------------------------------------------------------------------- /pkg/internal_api/mac.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function 2 | 3 | from .decoder import decode_lea, RDI 4 | from .kaitai.mach_o import MachO 5 | 6 | 7 | def find_idausr_offset(ida_path): 8 | ida = MachO.from_file(ida_path) 9 | string = None 10 | 11 | segments = (cmd.body for cmd in ida.load_commands if cmd.type == cmd.type.segment_64) 12 | sections = (section for segment in segments for section in segment.sections) 13 | 14 | for sect in sections: 15 | if sect.sect_name.lower() == '__text': 16 | text = sect 17 | code = text.data 18 | 19 | value = sect.data.find(b'IDAUSR') 20 | if value != -1: 21 | string = sect.addr + value 22 | 23 | def search(code, addr, offset, size, target): 24 | end = offset + size 25 | if visited[offset]: 26 | return 27 | while offset <= end: 28 | offset = code.find(b'\x48\x8d', offset) 29 | 30 | if visited[offset]: 31 | break 32 | 33 | visited[offset] = True 34 | insn = decode_lea(addr + offset, memoryview(code)[offset:offset + 15]) 35 | if insn and target(insn): 36 | print('Found:', hex(insn.target), insn) 37 | return insn 38 | offset += 1 39 | 40 | def like_yara(code, delim, target, start=0, end=None): 41 | global visited 42 | visited = [None] * len(code) 43 | cur = code.find(delim, start) 44 | if end is None: 45 | end = len(code) 46 | while cur != -1 and cur < end: 47 | for i in range(30): 48 | res = search(code, text.addr, cur - i, i, target) 49 | if res: 50 | return res, cur - i 51 | cur = code.find(delim, cur + 1) 52 | 53 | func = like_yara(code, b'\x48\x8d\x3d', lambda insn: insn.target == string) 54 | ret = like_yara(code, b'\xe8', lambda insn: insn.reg == RDI and insn.address != func[0].address, func[1], 55 | func[1] + 0x10000)[0] 56 | 57 | offset = ret.target 58 | print('offset:', hex(offset)) 59 | return offset 60 | -------------------------------------------------------------------------------- /pkg/internal_api/win.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function 2 | 3 | from .decoder import decode_lea, RAX 4 | from .kaitai.microsoft_pe import MicrosoftPe 5 | 6 | 7 | def find_idausr_offset(ida_path): 8 | ida = MicrosoftPe.from_file(ida_path) 9 | string = None 10 | 11 | imagebase = ida.pe.optional_hdr.windows.image_base_64 12 | 13 | for sect in ida.pe.sections: 14 | if sect.name == '.text': 15 | text = sect 16 | code = text.body 17 | 18 | value = sect.body.find(b'IDAUSR') 19 | if value != -1: 20 | string = sect.virtual_address + imagebase + value 21 | 22 | def search(code, addr, offset, size, target): 23 | end = offset + size 24 | if visited[offset]: 25 | return 26 | while offset <= end: 27 | offset = code.find(b'\x48\x8d', offset) 28 | 29 | if visited[offset]: 30 | break 31 | 32 | visited[offset] = True 33 | insn = decode_lea(addr + offset, memoryview(code)[offset:offset + 15]) 34 | if insn and target(insn): 35 | print('Found:', hex(insn.target), insn) 36 | return insn 37 | offset += 1 38 | 39 | def like_yara(code, delim, target, start=0, end=None): 40 | global visited 41 | visited = [None] * len(code) 42 | cur = code.find(delim, start) 43 | if end is None: 44 | end = len(code) 45 | while cur != -1 and cur < end: 46 | for i in range(30): 47 | res = search(code, text.virtual_address + 48 | imagebase, cur - i, i, target) 49 | if res: 50 | return res, cur - i 51 | cur = code.find(delim, cur + 1) 52 | 53 | func = like_yara(code, b'\x84\xc0', lambda insn: insn.target == string)[1] 54 | ret = like_yara(code, b'\xc3', lambda insn: insn.reg == RAX, func, func + 0x10000)[0] 55 | 56 | offset = ret.target - imagebase 57 | print('offset:', hex(offset)) 58 | return offset 59 | -------------------------------------------------------------------------------- /pkg/logger.py: -------------------------------------------------------------------------------- 1 | """ 2 | Logging module 3 | """ 4 | import logging 5 | import sys 6 | 7 | 8 | def getLogger(name): 9 | """ 10 | Returns a logger. Wrapper of logging.getLogger 11 | """ 12 | logger = logging.getLogger(name) 13 | return logger 14 | 15 | 16 | def _install_handler(): 17 | logger = logging.getLogger('pkg') 18 | if not logger.handlers: 19 | console = logging.StreamHandler(sys.stdout) 20 | formatter = logging.Formatter("%(message)s", None) 21 | console.setFormatter(formatter) 22 | logger.addHandler(console) 23 | logger.setLevel(logging.INFO) 24 | return 25 | 26 | 27 | _install_handler() 28 | -------------------------------------------------------------------------------- /pkg/main.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import ida_diskio 4 | 5 | from . import __version__ 6 | from .config import g 7 | from .logger import getLogger 8 | from .package import LocalPackage 9 | from .virtualenv_utils import prepare_virtualenv 10 | 11 | log = getLogger(__name__) 12 | 13 | RC = b""" 14 | _idapkg_basedir = os.path.expanduser(os.path.join('~', 'idapkg')) 15 | 16 | def init_idapkg(basedir): 17 | "idapythonrc.py is a perfect place to initialize IDAUSR variable" 18 | import os 19 | import sys 20 | import json 21 | 22 | def usage(): 23 | print("idapkg is not installed or corrupted.") 24 | print("please use the installation script below:") 25 | print("https://github.com/Jinmo/idapkg") 26 | 27 | config = os.path.join(basedir, 'config.json') 28 | 29 | if os.path.isfile(config): 30 | try: 31 | with open(config, 'rb') as f: 32 | j = json.load(f) 33 | 34 | packages_path = j['path']['packages'] 35 | idapkg_path = os.path.join(packages_path, 'idapkg') 36 | assert os.path.isdir(idapkg_path), "idapkg package does not exist" 37 | # idapkg doesn't have any plugins. just load to path. 38 | # XXX: replace this with some package-related routines 39 | 40 | sys.path.append(idapkg_path) 41 | from pkg.main import init_environment 42 | init_environment() 43 | except Exception: 44 | import traceback 45 | traceback.print_exc() 46 | return usage() 47 | else: 48 | return usage() 49 | 50 | init_idapkg(_idapkg_basedir) 51 | del init_idapkg, _idapkg_basedir 52 | """ 53 | 54 | SEP = b'\n# idapkg version: ', b'# idapkg end\n' 55 | 56 | 57 | def update_pythonrc(): 58 | rcpath = os.path.join(ida_diskio.get_user_idadir(), "idapythonrc.py") 59 | sep_with_ver = SEP[0] + __version__.encode() 60 | payload = b'%s\n%s\n%s' % (sep_with_ver, RC.strip(), SEP[1]) 61 | if os.path.isfile(rcpath): 62 | with open(rcpath, 'rb') as f: 63 | py = f.read() 64 | if payload in py: 65 | return 66 | 67 | if all(x in py for x in SEP): 68 | py = py.split(SEP[0], 1) 69 | py = py[0] + py[1].split(SEP[1], 1)[1] 70 | py = payload + py 71 | log.info('Updating idapkg into idapythonrc.py.') 72 | else: 73 | py = payload 74 | log.info('Added idapkg into idapythonrc.py. Will work after restarting!') 75 | 76 | with open(rcpath, 'wb') as f: 77 | f.write(py) 78 | 79 | 80 | def init_environment(_load=True): 81 | """ 82 | Must be called from idapythonrc.py. I didn't test other cases. 83 | """ 84 | log.info("idapkg version %s" % __version__) 85 | 86 | update_pythonrc() 87 | prepare_virtualenv(g['path']['virtualenv']) 88 | 89 | ifred = LocalPackage.by_name('ifred') 90 | if ifred: 91 | ifred.load() 92 | from . import actions 93 | 94 | for pkg in LocalPackage.all(): 95 | pkg.populate_env() 96 | -------------------------------------------------------------------------------- /pkg/package.py: -------------------------------------------------------------------------------- 1 | """ 2 | Package-related classes and methods are in pkg.package module. All constructing arguments are accessible via property. 3 | """ 4 | 5 | import ctypes 6 | import glob 7 | import json 8 | import os 9 | import random 10 | import runpy 11 | import shutil 12 | import sys 13 | import traceback 14 | import zipfile 15 | 16 | import ida_kernwin 17 | import ida_loader 18 | import ida_diskio 19 | 20 | from .config import g 21 | from .env import ea as current_ea, os as current_os 22 | from .internal_api import invalidate_proccache, get_extlangs, idausr_remove, idausr_add 23 | from .logger import getLogger 24 | from .vendor.semantic_version import Version, Spec 25 | from .virtualenv_utils import FixInterpreter 26 | 27 | __all__ = ["LocalPackage", "InstallablePackage"] 28 | 29 | log = getLogger(__name__) 30 | 31 | 32 | def rename(old, new): 33 | if sys.platform == 'win32': 34 | if not ctypes.windll.kernel32.MoveFileExA(str(old), str(new), 0): 35 | raise WindowsError(ctypes.windll.kernel32.GetLastError()) 36 | else: 37 | return os.rename(old, new) 38 | 39 | 40 | def _get_native_suffix(): 41 | if current_os == 'win': 42 | suffix = '.dll' 43 | elif current_os == 'linux': 44 | suffix = '.so' 45 | elif current_os == 'mac': 46 | suffix = '.dylib' 47 | else: 48 | raise Exception("unknown os: %r" % current_os) 49 | return suffix 50 | 51 | 52 | class LocalPackage(object): 53 | def __init__(self, id, path, version): 54 | self.id = str(id) 55 | self.version = str(version) 56 | 57 | self.path = os.path.normpath(path) 58 | 59 | def remove(self): 60 | """ 61 | Removes a package. 62 | """ 63 | idausr_remove(self.path) 64 | 65 | with FixInterpreter(): 66 | for script in self.info().get('uninstallers', []): 67 | script = os.path.join(self.path, script) 68 | try: 69 | runpy.run_path(script) 70 | except Exception: 71 | # XXX: How can I rollback this? 72 | traceback.print_exc() 73 | log.warn( 74 | "Uninstallation script %r exited with exception!", script) 75 | 76 | if not LocalPackage._remove_package_dir(self.path): 77 | log.error( 78 | "Package directory is in use and will be removed after restart.") 79 | 80 | # If not modified, the only case this fails is, custom ld.so or windows. 81 | # Latter case is common. 82 | new_path = self.path.rstrip('/\\') + '-removed' 83 | if os.path.exists(new_path): 84 | new_path += '-%x' % random.getrandbits(64) 85 | rename(self.path, new_path) 86 | # XXX: is it good to mutate this object? 87 | self.path = new_path 88 | 89 | log.info("Done!") 90 | 91 | def install(self, remove_on_fail=False): 92 | """ 93 | Run python scripts specified by :code:`installers` field in `info.json`. 94 | 95 | :returns: None 96 | """ 97 | orig_cwd = os.getcwd() 98 | try: 99 | os.chdir(self.path) 100 | info = self.info() 101 | scripts = info.get('installers', []) 102 | if not isinstance(scripts, list): 103 | raise Exception( 104 | '%r: Corrupted package: installers key is not list' % self.id) 105 | with FixInterpreter(): 106 | for script in scripts: 107 | log.info('Executing installer path %r...', script) 108 | script = os.path.join(self.path, script) 109 | runpy.run_path(script) 110 | except Exception: 111 | log.info('Installer failed!') 112 | if remove_on_fail: 113 | self.remove() 114 | raise 115 | finally: 116 | os.chdir(orig_cwd) 117 | 118 | def load(self, force=False): 119 | """ 120 | Actually does :code:`ida_loaders.load_plugin(paths)`, and updates IDAUSR variable. 121 | """ 122 | if not force and self.path in ida_diskio.get_ida_subdirs(''): 123 | # Already loaded, just update sys.path for python imports 124 | if self.path not in sys.path: 125 | sys.path.append(self.path) 126 | return 127 | 128 | # XXX: find a more efficient way to ensure dependencies 129 | errors = [] 130 | for dependency in self.info().get('dependencies', {}).keys(): 131 | dep = LocalPackage.by_name(dependency) 132 | if not dep: 133 | errors.append('Dependency not found: %r' % dependency) 134 | continue 135 | dep.load() 136 | 137 | if errors: 138 | for error in errors: 139 | log.error(error) 140 | return 141 | 142 | def handler(): 143 | # Load plugins immediately 144 | # processors / loaders will be loaded on demand 145 | if self.path not in sys.path: 146 | sys.path.append(self.path) 147 | 148 | # Update IDAUSR variable 149 | idausr_add(self.path) 150 | 151 | # Immediately load compatible plugins 152 | self._find_loadable_modules('plugins', ida_loader.load_plugin) 153 | 154 | # Find loadable processor modules, and if exists, invalidate cached process list (proccache). 155 | invalidates = [] 156 | self._find_loadable_modules('procs', invalidates.append) 157 | 158 | if invalidates: 159 | invalidate_proccache() 160 | 161 | # Run in main thread 162 | ida_kernwin.execute_sync(handler, ida_kernwin.MFF_FAST) 163 | 164 | def populate_env(self): 165 | """ 166 | A passive version of load; it only populates IDAUSR variable. 167 | It's called at :code:`idapythonrc.py`. 168 | """ 169 | errors = [] 170 | for dependency in self.info().get('dependencies', {}).keys(): 171 | dep = LocalPackage.by_name(dependency) 172 | if not dep: 173 | errors.append('Dependency not found: %r' % dependency) 174 | continue 175 | dep.populate_env() 176 | 177 | if errors: 178 | for error in errors: 179 | log.error(error) 180 | return 181 | 182 | idausr_add(self.path) 183 | 184 | if self.path not in sys.path: 185 | sys.path.append(self.path) 186 | 187 | def plugins(self): 188 | return self._collect_modules('plugins') 189 | 190 | def loaders(self): 191 | return self._collect_modules('loaders') 192 | 193 | def procs(self): 194 | return self._collect_modules('procs') 195 | 196 | def _collect_modules(self, category): 197 | result = [] 198 | self._find_loadable_modules(category, result.append) 199 | return result 200 | 201 | def _find_loadable_modules(self, subdir, callback): 202 | # Load modules in external languages (.py, .idc, ...) 203 | for suffix in ['.' + x.fileext for x in get_extlangs()]: 204 | expr = os.path.join(self.path, subdir, '*' + suffix) 205 | for path in glob.glob(expr): 206 | callback(str(path)) 207 | 208 | # Load native modules 209 | for suffix in (_get_native_suffix(),): 210 | expr = os.path.join(self.path, subdir, '*' + suffix) 211 | for path in glob.glob(expr): 212 | is64 = path[:-len(suffix)][-2:] == '64' 213 | 214 | if is64 == (current_ea == 64): 215 | callback(str(path)) 216 | 217 | def info(self): 218 | """ 219 | Loads :code:`info.json` and returns a parsed JSON object. 220 | 221 | :rtype: dict 222 | """ 223 | with open(os.path.join(self.path, 'info.json'), 'rb') as _file: 224 | return json.load(_file) 225 | 226 | @staticmethod 227 | def by_name(name, prefix=None): 228 | """ 229 | Returns a package with specified `name`. 230 | 231 | :rtype: LocalPackage 232 | """ 233 | if prefix is None: 234 | prefix = g['path']['packages'] 235 | 236 | path = os.path.join(prefix, name) 237 | 238 | # check if the folder exists 239 | if not os.path.isdir(path): 240 | return None 241 | 242 | # filter removed package 243 | removed = os.path.join(path, '.removed') 244 | if os.path.isfile(removed): 245 | LocalPackage._remove_package_dir(path) 246 | return None 247 | 248 | info_json = os.path.join(path, 'info.json') 249 | if not os.path.isfile(info_json): 250 | log.warn('Warning: info.json is not found at %r', path) 251 | return None 252 | 253 | with open(info_json, 'rb') as _file: 254 | try: 255 | info = json.load(_file) 256 | except Exception: 257 | traceback.print_exc() 258 | log.warn('Warning: info.json is not valid at %r', path) 259 | return None 260 | 261 | result = LocalPackage( 262 | id=info['_id'], path=path, version=info['version']) 263 | return result 264 | 265 | @staticmethod 266 | def all(disabled=False): 267 | """ 268 | List all packages installed at :code:`g['path']['packages']`. 269 | 270 | :rtype: list(LocalPackage) 271 | """ 272 | prefix = g['path']['packages'] 273 | 274 | res = os.listdir(prefix) 275 | res = (x for x in res if os.path.isdir(os.path.join(prefix, x))) 276 | res = (LocalPackage.by_name(x) for x in res) 277 | res = (x for x in res if x) 278 | res = [x for x in res if (x.id in g['ignored_packages']) == disabled] 279 | return res 280 | 281 | @staticmethod 282 | def _remove_package_dir(path): 283 | errors = [] 284 | 285 | def onerror(_listdir, _path, exc_info): 286 | log.error("%s: %s", _path, str(exc_info[1])) 287 | errors.append(exc_info[1]) 288 | 289 | shutil.rmtree(path, onerror=onerror) 290 | 291 | if errors: 292 | # Mark for later removal 293 | open(os.path.join(path, '.removed'), 'wb').close() 294 | 295 | return not errors 296 | 297 | def __repr__(self): 298 | return '' % \ 299 | (self.id, self.path, self.version) 300 | 301 | 302 | class InstallablePackage(object): 303 | def __init__(self, id, name, version, description, author, repo): 304 | self.id = str(id) 305 | self.name = name 306 | self.version = str(version) 307 | self.description = description 308 | self.repo = repo 309 | self.author = author 310 | 311 | def install(self, upgrade=False): 312 | """ 313 | Just calls :code:`InstallablePackage.install_from_repo(self.repo, self.id, upgrade)`. 314 | """ 315 | install_from_repo(self.repo, self.id, allow_upgrade=upgrade) 316 | 317 | def __repr__(self): 318 | return '' % \ 319 | (self.id, self.version, self.repo) 320 | 321 | 322 | def install_from_repo(repo, name, version_spec='*', allow_upgrade=False, _visited=None): 323 | """ 324 | This method downloads a package satisfying spec. 325 | 326 | .. note :: 327 | The function waits until all of dependencies are installed. 328 | Run it as separate thread if possible. 329 | """ 330 | 331 | top_level = _visited is None 332 | _visited = _visited or {} 333 | 334 | if name in _visited: 335 | log.warn("Cyclic dependency found when installing %r <-> %r", 336 | name, _visited) 337 | return 338 | 339 | prev = LocalPackage.by_name(name) 340 | 341 | _version_spec = Spec(version_spec) 342 | satisfies_local = prev and Version(prev.version) in _version_spec 343 | 344 | if allow_upgrade or not satisfies_local: 345 | log.debug("Fetching releases for %r from %r...", name, repo) 346 | 347 | releases = repo.releases(name) 348 | if not releases: 349 | error = "Release not found on remote repository: %r on %r (error: %r)" % ( 350 | name, repo, releases['error']) 351 | raise Exception(error) 352 | 353 | releases = [release for release in releases 354 | if Version(release['version']) in _version_spec] 355 | 356 | if not releases: 357 | error = "Release satisfying the condition %r %r not found on remote repository %r" % ( 358 | name, version_spec, repo) 359 | raise Exception(error) 360 | downloading = None if ( 361 | prev and releases[-1]['version'] == prev.version) else releases[-1]['version'] 362 | else: 363 | downloading = None 364 | 365 | if downloading: 366 | log.info('Collecting %s...', name) 367 | data = repo.download(name, downloading) 368 | f = zipfile.ZipFile(data, 'r') 369 | 370 | # No /: topmost files 371 | # One /: topmost folders 372 | topmost_files = [path for path in f.namelist() if path.count('/') == 0] 373 | # From ZipInfo.is_dir() in Python 3.x 374 | topmost_folders = [path for path in f.namelist() if path.endswith('/')] 375 | common_prefix = topmost_folders[0] if len(topmost_files) == 0 and len(topmost_folders) == 1 else "" 376 | 377 | info = json.load(f.open(common_prefix + 'info.json')) 378 | packages_path = g['path']['packages'] 379 | install_path = os.path.join(packages_path, info["_id"]) 380 | 381 | # this ensures os.path.exists(install_path) == False 382 | # TODO: should we unload a already-loaded plugin? 383 | if prev: 384 | prev.remove() 385 | assert not os.path.exists(install_path) 386 | 387 | # XXX: edge case? 388 | removed = os.path.join(install_path, '.removed') 389 | if os.path.isfile(removed): 390 | os.unlink(removed) 391 | 392 | log.info('Extracting into %r...', install_path) 393 | if common_prefix: 394 | f.extractall(packages_path) 395 | os.rename(os.path.join(packages_path, common_prefix), install_path) 396 | else: 397 | f.extractall(install_path) 398 | 399 | # Initiate LocalPackage object 400 | pkg = LocalPackage(info['_id'], install_path, info['version']) 401 | else: 402 | pkg = prev 403 | 404 | log.info("Requirement already satisfied: %s%s", 405 | name, '' if version_spec == '*' else version_spec) 406 | 407 | restart_required = pkg.info().get('restart_required', False) 408 | _visited[name] = (pkg.version, restart_required) 409 | 410 | # First, install dependencies 411 | # TODO: add version check 412 | for dep_name, dep_version_spec in pkg.info().get('dependencies', {}).items(): 413 | install_from_repo(repo, dep_name, dep_version_spec, allow_upgrade, _visited) 414 | 415 | # Then, install this package. 416 | if downloading: 417 | pkg.install() 418 | 419 | if not restart_required: 420 | pkg.load() 421 | 422 | if top_level: 423 | log.info("Successfully installed %s", 424 | ' '.join('%s-%s' % (key, value[0]) for key, value in _visited.items())) 425 | 426 | delayed = [(key, value) for key, value in _visited.items() if value[1]] 427 | if delayed: 428 | log.info( 429 | "Plugins in the following packages will be loaded after restarting IDA.") 430 | log.info( 431 | " %s", " ".join('%s-%s' % (key, value[0]) for key, value in delayed)) 432 | 433 | return pkg 434 | -------------------------------------------------------------------------------- /pkg/process.py: -------------------------------------------------------------------------------- 1 | """ 2 | Both method redirects stdout to IDA Pro's console. 3 | """ 4 | from __future__ import print_function 5 | 6 | import sys 7 | import threading 8 | import time 9 | from subprocess import Popen as _Popen, PIPE, STDOUT 10 | 11 | from PyQt5.QtCore import QCoreApplication 12 | 13 | if sys.version_info.major == 3: 14 | import queue as Queue 15 | else: 16 | import Queue 17 | 18 | 19 | class Popen(_Popen): 20 | """ 21 | Subclass of :py:meth:`subprocess.Popen` that 22 | if stdout is not given, it'll redirect stdout to messages window. 23 | """ 24 | 25 | def __init__(self, *args, **kwargs): 26 | if 'stdout' not in kwargs: 27 | kwargs['stdout'] = PIPE 28 | if 'stderr' not in kwargs: 29 | kwargs['stderr'] = STDOUT 30 | 31 | queue = Queue.Queue() 32 | done = [] 33 | 34 | # Now launch the process 35 | super(Popen, self).__init__(*args, **kwargs) 36 | 37 | t_reader = threading.Thread( 38 | target=self._reader, args=(done, queue,)) 39 | t_receiver = threading.Thread( 40 | target=self._receiver, args=(done, queue,)) 41 | 42 | t_reader.start() 43 | t_receiver.start() 44 | 45 | self.threads = t_reader, t_receiver 46 | else: 47 | # No need to do anything 48 | super(Popen, self).__init__(*args, **kwargs) 49 | 50 | @staticmethod 51 | def _receiver(done, queue): 52 | buff = [] 53 | last_output_time = time.time() 54 | stdout = getattr(sys.stdout, 'buffer', sys.stdout) 55 | while not (done and queue.empty()): 56 | cur_time = time.time() 57 | if last_output_time < cur_time - 0.01: 58 | stdout.write(b''.join(buff).replace(b'\r', b'')) 59 | last_output_time = cur_time 60 | buff[:] = [] 61 | try: 62 | item = queue.get(timeout=0.01) 63 | except Queue.Empty: 64 | continue 65 | buff.append(item) 66 | queue.task_done() 67 | stdout.write(b''.join(buff).replace(b'\r', b'')) 68 | 69 | def _reader(self, done, queue): 70 | while True: 71 | byte = self.stdout.read(1) 72 | if not byte: 73 | done.append(True) 74 | break 75 | queue.put(byte) 76 | 77 | 78 | def system(cmd): 79 | """ 80 | Wrapper around :py:meth:`os.system`, except that output will be redirected to messages window. 81 | 82 | :param cmd: Command to execute. 83 | :return: exit status. 84 | :rtype: int 85 | """ 86 | process = Popen(cmd, shell=True) 87 | 88 | # call processEvents() to prevent hang 89 | timeout = 0.01 90 | while all(thread.is_alive() for thread in process.threads): 91 | for thread in process.threads: 92 | thread.join(timeout) 93 | QCoreApplication.processEvents() 94 | 95 | return process.wait() 96 | 97 | 98 | if __name__ == '__main__': 99 | print(system('pip install requests')) 100 | -------------------------------------------------------------------------------- /pkg/repo.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function 2 | 3 | import json 4 | import traceback 5 | from multiprocessing.pool import ThreadPool 6 | 7 | from .compat import quote 8 | from .config import g 9 | from .downloader import download 10 | from .logger import getLogger 11 | from .package import InstallablePackage 12 | 13 | # Connection timeout 14 | TIMEOUT = 8 15 | 16 | # Max concurrent when fetching multiple repository 17 | MAX_CONCURRENT = 10 18 | 19 | log = getLogger(__name__) 20 | 21 | 22 | class Repository(object): 23 | """ 24 | An instance of this class represents a single repository. 25 | """ 26 | 27 | def get(self, name): 28 | """ 29 | Fetch metadata for single package from the repo. 30 | 31 | :returns: None if package is not found, 32 | else a :class:`~pkg.package.InstallablePackage` object 33 | :rtype: pkg.package.InstallablePackage or None 34 | """ 35 | raise NotImplementedError 36 | 37 | def list(self): 38 | """ 39 | Fetch a list of all packages in the repo. 40 | 41 | :returns: list of InstallablePackage in the repo. 42 | :rtype: list(pkg.package.InstallablePackage) 43 | """ 44 | raise NotImplementedError 45 | 46 | def releases(self, name): 47 | """ 48 | Fetch a list of releases of specified package. 49 | """ 50 | raise NotImplementedError 51 | 52 | @staticmethod 53 | def from_url(url): 54 | def old_repo(name): 55 | return OldRepository(name) 56 | 57 | def github_repo(name): 58 | assert name.startswith('github:') 59 | return GitHubRepository(name[7:]) 60 | 61 | supported_types = { 62 | 'https': old_repo, 63 | 'http': old_repo, 64 | 'github': github_repo 65 | } 66 | 67 | return supported_types[url.split(':')[0]](url) 68 | 69 | def __repr__(self): 70 | raise NotImplementedError 71 | 72 | 73 | class OldRepository(Repository): 74 | """ 75 | S3-hosted repository. 76 | https://github.com/Jinmo/idapkg-api 77 | """ 78 | 79 | def __init__(self, url, timeout=TIMEOUT): 80 | self.url = url 81 | self.timeout = timeout 82 | 83 | def get(self, name): 84 | endpoint = '/info?id=' + quote(name) 85 | res = download(self.url + endpoint, self.timeout) 86 | if not res: # Network Error 87 | return 88 | 89 | res = json.load(res) 90 | if not res['success']: 91 | return 92 | else: 93 | item = res['data'] 94 | return InstallablePackage( 95 | name=item['name'], id=item['id'], version=item['version'], description=item['description'], 96 | author=item['author'], repo=self) 97 | 98 | def list(self): 99 | endpoint = '/search' 100 | res = download(self.url + endpoint, self.timeout) 101 | try: 102 | if res is None: 103 | raise Exception('connection error') 104 | 105 | res = json.load(res) 106 | assert isinstance(res['data'], list) 107 | 108 | return [ 109 | InstallablePackage( 110 | name=item['name'], id=item['id'], version=item['version'], description=item['description'], 111 | author=item['author'], repo=self) 112 | for item in res['data'] 113 | ] 114 | except ValueError: 115 | log.error('Error fetching repo: %r\n%s', 116 | self.url, traceback.format_exc()) 117 | 118 | def releases(self, name): 119 | endpoint = '/releases?name=' + quote(name) 120 | res = download(self.url + endpoint) 121 | 122 | if res is None: 123 | return None 124 | 125 | releases = json.load(res) 126 | if not releases['success']: 127 | raise Exception("Server returned error") 128 | else: 129 | return releases['data'] 130 | 131 | def download(self, name, version): 132 | endpoint = '/download?spec=' + quote(name) + '==' + quote(version) 133 | return download(self.url + endpoint, to_file=True) 134 | 135 | def __repr__(self): 136 | return "" % self.url 137 | 138 | 139 | class GitHubRepository(Repository): 140 | """ 141 | GitHub-hosted repository. 142 | https://github.com/Jinmo/idapkg-repo 143 | """ 144 | API_BLOB = 'https://raw.githubusercontent.com/{0}/{1}' 145 | API_ARCHIVE = 'https://github.com/{0}/archive/{1}.zip' 146 | 147 | def __init__(self, repo, timeout=TIMEOUT): 148 | assert self._is_valid_repo(repo) 149 | self.repo = repo 150 | self.timeout = timeout 151 | 152 | def get(self, name): 153 | endpoint = 'info/{0}.json'.format(quote(name)) 154 | res = download(self.API_BLOB.format(self.repo, endpoint)) 155 | item = json.load(res) 156 | return InstallablePackage( 157 | name=item['name'], id=item['id'], version=item['version'], description=item['description'], 158 | author=item['author'], repo=self) 159 | 160 | def list(self): 161 | res = download(self.API_BLOB.format(self.repo, '/list.json')) 162 | items = json.load(res) 163 | return [ 164 | InstallablePackage( 165 | name=item['name'], id=item['id'], version=item['version'], description=item['description'], 166 | author=item['author'], repo=self) 167 | for item in items 168 | ] 169 | 170 | def releases(self, name): 171 | endpoint = 'releases/{0}.json'.format(quote(name)) 172 | res = download(self.API_BLOB.format(self.repo, endpoint)) 173 | return json.load(res) 174 | 175 | def download(self, name, version): 176 | endpoint = 'releases/{0}.json'.format(quote(name)) 177 | releases = json.load(download(self.API_BLOB.format(self.repo, endpoint))) 178 | for release in releases: 179 | if release['version'] == version: 180 | repo = release['repo'] 181 | commit = release['commit'] 182 | assert self._is_valid_repo(repo) 183 | assert self._is_valid_commit(commit) 184 | return download(self.API_ARCHIVE.format(repo, commit), to_file=True) 185 | 186 | raise Exception("release not found! (%s==%s)" % (name, version)) 187 | 188 | @staticmethod 189 | def _is_valid_repo(repo): 190 | if repo.count('/') not in (1, 2): 191 | return False 192 | 193 | if repo.count('/') == 1: 194 | repo += '/master' 195 | 196 | if '..' in repo: 197 | return False 198 | 199 | owner, name, branch_or_commit = repo.split('/') 200 | 201 | if '.' in (owner, name, branch_or_commit): 202 | return False 203 | 204 | # From https://github.com/join: 205 | # Username may only contain alphanumeric characters or single hyphens, and 206 | # cannot begin or end with a hyphen. 207 | if not all('a' <= x <= 'z' or x == '-' for x in owner.lower()): 208 | return False 209 | 210 | if not owner or owner[0] == '-' or owner[-1] == '-': 211 | return False 212 | 213 | # Guesses from https://github.com/new 214 | if not all('a' <= x <= 'z' or x in '.-_' for x in name.lower()): 215 | return False 216 | 217 | # Basic names only 218 | if not all('a' <= x <= 'z' or x in '.-_' for x in branch_or_commit.lower()): 219 | return False 220 | 221 | return True 222 | 223 | @staticmethod 224 | def _is_valid_commit(commit): 225 | return len(commit) == 40 and all(x in '0123456789abcdef' for x in commit) 226 | 227 | def __repr__(self): 228 | return "" % self.repo 229 | 230 | 231 | def get_online_packages(repos=None): 232 | """ 233 | Generates a list of packages from specified repositories. 234 | 235 | :param repos: Array of repository urls (string). Default: g['repos'] 236 | :type repos: list(str) or None 237 | :returns: list(:class:`~pkg.package.InstallablePackage`) from each repos. 238 | """ 239 | 240 | if repos is None: 241 | repos = g['repos'] 242 | 243 | repos = [Repository.from_url(url) for url in repos] 244 | 245 | pool = ThreadPool(MAX_CONCURRENT) 246 | results = pool.map(lambda repo: repo.list(), repos) 247 | results = [pkgs for pkgs in results if pkgs] 248 | 249 | # flatten results 250 | return [pkg for pkgs in results for pkg in pkgs] 251 | 252 | 253 | if __name__ == '__main__': 254 | print('\n'.join(map(repr, get_online_packages(['github:Jinmo/idapkg-repo/master'])))) 255 | -------------------------------------------------------------------------------- /pkg/vendor/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Jinmo/idapkg/5d6af9bd59c5dc886d68335119fae41491f06ea7/pkg/vendor/__init__.py -------------------------------------------------------------------------------- /pkg/vendor/kaitaistruct.py: -------------------------------------------------------------------------------- 1 | import itertools 2 | import sys 3 | from struct import unpack 4 | from io import BytesIO # noqa 5 | 6 | PY2 = sys.version_info[0] == 2 7 | 8 | # Kaitai Struct runtime streaming API version, defined as per PEP-0396 9 | # standard. Used for two purposes: 10 | # 11 | # * .py files generated by ksc from .ksy check that they import proper 12 | # KS runtime library by this version number; 13 | # * distribution utils (setup.py) use this when packaging for PyPI 14 | # 15 | __version__ = '0.8' 16 | 17 | 18 | class KaitaiStruct(object): 19 | def __init__(self, stream): 20 | self._io = stream 21 | 22 | def __enter__(self): 23 | return self 24 | 25 | def __exit__(self, *args, **kwargs): 26 | self.close() 27 | 28 | def close(self): 29 | self._io.close() 30 | 31 | @classmethod 32 | def from_file(cls, filename): 33 | f = open(filename, 'rb') 34 | try: 35 | return cls(KaitaiStream(f)) 36 | except Exception: 37 | # close file descriptor, then reraise the exception 38 | f.close() 39 | raise 40 | 41 | @classmethod 42 | def from_bytes(cls, buf): 43 | return cls(KaitaiStream(BytesIO(buf))) 44 | 45 | @classmethod 46 | def from_io(cls, io): 47 | return cls(KaitaiStream(io)) 48 | 49 | 50 | class KaitaiStream(object): 51 | def __init__(self, io): 52 | self._io = io 53 | self.align_to_byte() 54 | 55 | def __enter__(self): 56 | return self 57 | 58 | def __exit__(self, *args, **kwargs): 59 | self.close() 60 | 61 | def close(self): 62 | self._io.close() 63 | 64 | # ======================================================================== 65 | # Stream positioning 66 | # ======================================================================== 67 | 68 | def is_eof(self): 69 | io = self._io 70 | t = io.read(1) 71 | if t == b'': 72 | return True 73 | else: 74 | io.seek(io.tell() - 1) 75 | return False 76 | 77 | def seek(self, n): 78 | self._io.seek(n) 79 | 80 | def pos(self): 81 | return self._io.tell() 82 | 83 | def size(self): 84 | # Python has no internal File object API function to get 85 | # current file / StringIO size, thus we use the following 86 | # trick. 87 | io = self._io 88 | # Remember our current position 89 | cur_pos = io.tell() 90 | # Seek to the end of the File object 91 | io.seek(0, 2) 92 | # Remember position, which is equal to the full length 93 | full_size = io.tell() 94 | # Seek back to the current position 95 | io.seek(cur_pos) 96 | return full_size 97 | 98 | # ======================================================================== 99 | # Integer numbers 100 | # ======================================================================== 101 | 102 | # ------------------------------------------------------------------------ 103 | # Signed 104 | # ------------------------------------------------------------------------ 105 | 106 | def read_s1(self): 107 | return unpack('b', self.read_bytes(1))[0] 108 | 109 | # ........................................................................ 110 | # Big-endian 111 | # ........................................................................ 112 | 113 | def read_s2be(self): 114 | return unpack('>h', self.read_bytes(2))[0] 115 | 116 | def read_s4be(self): 117 | return unpack('>i', self.read_bytes(4))[0] 118 | 119 | def read_s8be(self): 120 | return unpack('>q', self.read_bytes(8))[0] 121 | 122 | # ........................................................................ 123 | # Little-endian 124 | # ........................................................................ 125 | 126 | def read_s2le(self): 127 | return unpack('H', self.read_bytes(2))[0] 148 | 149 | def read_u4be(self): 150 | return unpack('>I', self.read_bytes(4))[0] 151 | 152 | def read_u8be(self): 153 | return unpack('>Q', self.read_bytes(8))[0] 154 | 155 | # ........................................................................ 156 | # Little-endian 157 | # ........................................................................ 158 | 159 | def read_u2le(self): 160 | return unpack('f', self.read_bytes(4))[0] 178 | 179 | def read_f8be(self): 180 | return unpack('>d', self.read_bytes(8))[0] 181 | 182 | # ........................................................................ 183 | # Little-endian 184 | # ........................................................................ 185 | 186 | def read_f4le(self): 187 | return unpack(' 0: 203 | # 1 bit => 1 byte 204 | # 8 bits => 1 byte 205 | # 9 bits => 2 bytes 206 | bytes_needed = ((bits_needed - 1) // 8) + 1 207 | buf = self.read_bytes(bytes_needed) 208 | for byte in buf: 209 | # Python 2 will get "byte" as one-character str, thus 210 | # we need to convert it to integer manually; Python 3 211 | # is fine as is. 212 | if isinstance(byte, str): 213 | byte = ord(byte) 214 | self.bits <<= 8 215 | self.bits |= byte 216 | self.bits_left += 8 217 | 218 | # raw mask with required number of 1s, starting from lowest bit 219 | mask = (1 << n) - 1 220 | # shift mask to align with highest bits available in self.bits 221 | shift_bits = self.bits_left - n 222 | mask <<= shift_bits 223 | # derive reading result 224 | res = (self.bits & mask) >> shift_bits 225 | # clear top bits that we've just read => AND with 1s 226 | self.bits_left -= n 227 | mask = (1 << self.bits_left) - 1 228 | self.bits &= mask 229 | 230 | return res 231 | 232 | # ======================================================================== 233 | # Byte arrays 234 | # ======================================================================== 235 | 236 | def read_bytes(self, n): 237 | if n < 0: 238 | raise ValueError( 239 | "requested invalid %d amount of bytes" % 240 | (n,) 241 | ) 242 | r = self._io.read(n) 243 | if len(r) < n: 244 | raise EOFError( 245 | "requested %d bytes, but got only %d bytes" % 246 | (n, len(r)) 247 | ) 248 | return r 249 | 250 | def read_bytes_full(self): 251 | return self._io.read() 252 | 253 | def read_bytes_term(self, term, include_term, consume_term, eos_error): 254 | r = b'' 255 | while True: 256 | c = self._io.read(1) 257 | if c == b'': 258 | if eos_error: 259 | raise Exception( 260 | "end of stream reached, but no terminator %d found" % 261 | (term,) 262 | ) 263 | else: 264 | return r 265 | elif ord(c) == term: 266 | if include_term: 267 | r += c 268 | if not consume_term: 269 | self._io.seek(self._io.tell() - 1) 270 | return r 271 | else: 272 | r += c 273 | 274 | def ensure_fixed_contents(self, expected): 275 | actual = self._io.read(len(expected)) 276 | if actual != expected: 277 | raise Exception( 278 | "unexpected fixed contents: got %r, was waiting for %r" % 279 | (actual, expected) 280 | ) 281 | return actual 282 | 283 | @staticmethod 284 | def bytes_strip_right(data, pad_byte): 285 | new_len = len(data) 286 | if PY2: 287 | # data[...] must yield an integer, to compare with integer pad_byte 288 | data = bytearray(data) 289 | 290 | while new_len > 0 and data[new_len - 1] == pad_byte: 291 | new_len -= 1 292 | 293 | return data[:new_len] 294 | 295 | @staticmethod 296 | def bytes_terminate(data, term, include_term): 297 | new_len = 0 298 | max_len = len(data) 299 | if PY2: 300 | # data[...] must yield an integer, to compare with integer term 301 | data = bytearray(data) 302 | 303 | while new_len < max_len and data[new_len] != term: 304 | new_len += 1 305 | 306 | if include_term and new_len < max_len: 307 | new_len += 1 308 | 309 | return data[:new_len] 310 | 311 | # ======================================================================== 312 | # Byte array processing 313 | # ======================================================================== 314 | 315 | @staticmethod 316 | def process_xor_one(data, key): 317 | if PY2: 318 | return bytes(bytearray(v ^ key for v in bytearray(data))) 319 | else: 320 | return bytes(v ^ key for v in data) 321 | 322 | @staticmethod 323 | def process_xor_many(data, key): 324 | if PY2: 325 | return bytes(bytearray(a ^ b for a, b in zip(bytearray(data), itertools.cycle(bytearray(key))))) 326 | else: 327 | return bytes(a ^ b for a, b in zip(data, itertools.cycle(key))) 328 | 329 | @staticmethod 330 | def process_rotate_left(data, amount, group_size): 331 | if group_size != 1: 332 | raise Exception( 333 | "unable to rotate group of %d bytes yet" % 334 | (group_size,) 335 | ) 336 | 337 | mask = group_size * 8 - 1 338 | anti_amount = -amount & mask 339 | 340 | r = bytearray(data) 341 | for i in range(len(r)): 342 | r[i] = (r[i] << amount) & 0xff | (r[i] >> anti_amount) 343 | return bytes(r) 344 | -------------------------------------------------------------------------------- /pkg/vendor/semantic_version/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Copyright (c) The python-semanticversion project 3 | # This code is distributed under the two-clause BSD License. 4 | 5 | 6 | from .base import compare, match, validate, SimpleSpec, NpmSpec, Spec, SpecItem, Version 7 | 8 | 9 | __author__ = "Raphaël Barrois " 10 | __version__ = '2.8.6.dev0' 11 | -------------------------------------------------------------------------------- /pkg/vendor/semantic_version/django_fields.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Copyright (c) The python-semanticversion project 3 | # This code is distributed under the two-clause BSD License. 4 | 5 | import warnings 6 | 7 | from django.db import models 8 | from django.utils.translation import ugettext_lazy as _ 9 | 10 | from . import base 11 | 12 | 13 | class SemVerField(models.CharField): 14 | 15 | def __init__(self, *args, **kwargs): 16 | kwargs.setdefault('max_length', 200) 17 | super(SemVerField, self).__init__(*args, **kwargs) 18 | 19 | def from_db_value(self, value, expression, connection, *args): 20 | """Convert from the database format. 21 | 22 | This should be the inverse of self.get_prep_value() 23 | """ 24 | return self.to_python(value) 25 | 26 | def get_prep_value(self, obj): 27 | return None if obj is None else str(obj) 28 | 29 | def get_db_prep_value(self, value, connection, prepared=False): 30 | if not prepared: 31 | value = self.get_prep_value(value) 32 | return value 33 | 34 | def value_to_string(self, obj): 35 | value = self.to_python(self.value_from_object(obj)) 36 | return str(value) 37 | 38 | def run_validators(self, value): 39 | return super(SemVerField, self).run_validators(str(value)) 40 | 41 | 42 | class VersionField(SemVerField): 43 | default_error_messages = { 44 | 'invalid': _("Enter a valid version number in X.Y.Z format."), 45 | } 46 | description = _("Version") 47 | 48 | def __init__(self, *args, **kwargs): 49 | self.partial = kwargs.pop('partial', False) 50 | if self.partial: 51 | warnings.warn( 52 | "Use of `partial=True` will be removed in 3.0.", 53 | DeprecationWarning, 54 | stacklevel=2, 55 | ) 56 | self.coerce = kwargs.pop('coerce', False) 57 | super(VersionField, self).__init__(*args, **kwargs) 58 | 59 | def deconstruct(self): 60 | """Handle django.db.migrations.""" 61 | name, path, args, kwargs = super(VersionField, self).deconstruct() 62 | kwargs['partial'] = self.partial 63 | kwargs['coerce'] = self.coerce 64 | return name, path, args, kwargs 65 | 66 | def to_python(self, value): 67 | """Converts any value to a base.Version field.""" 68 | if value is None or value == '': 69 | return value 70 | if isinstance(value, base.Version): 71 | return value 72 | if self.coerce: 73 | return base.Version.coerce(value, partial=self.partial) 74 | else: 75 | return base.Version(value, partial=self.partial) 76 | 77 | 78 | class SpecField(SemVerField): 79 | default_error_messages = { 80 | 'invalid': _("Enter a valid version number spec list in ==X.Y.Z,>=A.B.C format."), 81 | } 82 | description = _("Version specification list") 83 | 84 | def __init__(self, *args, **kwargs): 85 | self.syntax = kwargs.pop('syntax', base.DEFAULT_SYNTAX) 86 | super(SpecField, self).__init__(*args, **kwargs) 87 | 88 | def deconstruct(self): 89 | """Handle django.db.migrations.""" 90 | name, path, args, kwargs = super(SpecField, self).deconstruct() 91 | if self.syntax != base.DEFAULT_SYNTAX: 92 | kwargs['syntax'] = self.syntax 93 | return name, path, args, kwargs 94 | 95 | def to_python(self, value): 96 | """Converts any value to a base.Spec field.""" 97 | if value is None or value == '': 98 | return value 99 | if isinstance(value, base.BaseSpec): 100 | return value 101 | return base.BaseSpec.parse(value, syntax=self.syntax) 102 | -------------------------------------------------------------------------------- /pkg/virtualenv_utils.py: -------------------------------------------------------------------------------- 1 | import os 2 | import runpy 3 | import subprocess 4 | import sys 5 | import tempfile 6 | from hashlib import sha256 7 | 8 | from .logger import getLogger 9 | from .process import Popen, system 10 | 11 | # extracted from https://pypi.org/simple/virtualenv/ 12 | VIRTUALENV_URL = 'https://files.pythonhosted.org/packages/b3/3a' \ 13 | '/3690099fc8f5137a1d879448c49480590bf6f0529eba7b72e3a34ffd8a31/virtualenv-16.7.10-py2.py3-none-any.whl' 14 | HASH = '105893c8dc66b7817691c7371439ec18e3b6c5e323a304b5ed96cdd2e75cc1ec' 15 | 16 | log = getLogger(__name__) 17 | 18 | 19 | def _locate_python_win(): 20 | return os.path.join(sys.exec_prefix, 'python.exe') 21 | 22 | 23 | def _locate_python(): 24 | if sys.platform == 'win32': 25 | executable = _locate_python_win() 26 | elif sys.platform == 'darwin': 27 | executable = sys.executable 28 | elif sys.platform == 'linux': 29 | # TODO: test linux version 30 | log.info( 31 | 'Linux virtualenv support is not tested. If this prints "Done!", it\'s working!') 32 | executable = sys.executable 33 | else: 34 | assert False, "this platform is not supported" 35 | return executable 36 | 37 | 38 | class FixInterpreter(object): 39 | def __init__(self): 40 | pass 41 | 42 | def __enter__(self): 43 | self._executable, sys.executable = sys.executable, _locate_python() 44 | self._popen, subprocess.Popen = subprocess.Popen, Popen 45 | self._system, os.system = os.system, system 46 | 47 | def __exit__(self, type_, value, traceback): 48 | sys.executable = self._executable 49 | subprocess.Popen = self._popen 50 | os.system = self._system 51 | 52 | 53 | def _install_virtualenv(path): 54 | from .downloader import download 55 | 56 | log.info('Downloading virtualenv from %r ...', VIRTUALENV_URL) 57 | data = download(VIRTUALENV_URL).read() 58 | 59 | if sha256(data).hexdigest() != HASH: 60 | raise RuntimeError('virtualenv hash does not match!') 61 | 62 | with tempfile.NamedTemporaryFile('wb', suffix=".zip", delete=False) as zf: 63 | zf.write(data) 64 | zf.flush() 65 | sys.path.insert(0, zf.name) 66 | 67 | import virtualenv 68 | 69 | with FixInterpreter(): 70 | log.info('Creating environment using virtualenv...') 71 | virtualenv.create_environment(path, site_packages=True) 72 | log.info('Done!') 73 | 74 | 75 | def prepare_virtualenv(path, tried=False): 76 | # Normalize path first 77 | path = os.path.abspath(path) 78 | 79 | try: 80 | # 1. Run activator in virtualenv 81 | activator_path = os.path.join( 82 | path, 'Scripts' if sys.platform == 'win32' else 'bin', 'activate_this.py') 83 | 84 | if not os.path.isfile(activator_path): 85 | raise ImportError() 86 | 87 | runpy.run_path(activator_path) 88 | 89 | # 2. Check if pip is in the virtualenv 90 | import pip 91 | if not os.path.abspath(pip.__file__).startswith(path): 92 | raise ImportError() 93 | 94 | except ImportError: 95 | if tried: 96 | log.error("Failed installing virtualenv!") 97 | return 98 | 99 | log.info('pip is not found in the virtualenv.') 100 | log.info('Will install virtualenv at %r...', path) 101 | 102 | # Install and try again 103 | _install_virtualenv(path) 104 | prepare_virtualenv(path, True) 105 | -------------------------------------------------------------------------------- /uninstaller.py: -------------------------------------------------------------------------------- 1 | import os 2 | import ida_diskio 3 | 4 | from pkg import __version__ 5 | from pkg.main import RC 6 | 7 | SEP = b'\n# idapkg version: ', b'# idapkg end\n' 8 | 9 | 10 | def update_pythonrc(): 11 | rcpath = os.path.join(ida_diskio.get_user_idadir(), "idapythonrc.py") 12 | sep_with_ver = SEP[0] + __version__.encode() 13 | payload = b'%s\n%s\n%s' % (sep_with_ver, RC.strip(), SEP[1]) 14 | if os.path.isfile(rcpath): 15 | with open(rcpath, 'rb') as f: 16 | py = f.read() 17 | if payload in py and all(x in py for x in SEP): 18 | py = py.split(SEP[0], 1) 19 | py = py[0] + py[1].split(SEP[1], 1)[1] 20 | else: 21 | py = payload 22 | 23 | print('Removed idapkg from idapythonrc.py. ' 24 | 'I hope to see you again! :)') 25 | 26 | print(' You can remove ~/idapkg directory to remove packages and configurations.') 27 | 28 | with open(rcpath, 'wb') as f: 29 | f.write(py) 30 | 31 | 32 | update_pythonrc() 33 | --------------------------------------------------------------------------------