├── .gitignore
├── .github
└── FUNDING.yml
├── modules
└── frida-gadget-ios
│ ├── .npmignore
│ ├── index.d.ts
│ ├── index.js
│ ├── package.json
│ └── download.js
├── meson-scripts
├── BSDmakefile
├── configure
├── Makefile
├── configure.bat
└── make.bat
├── .gitmodules
├── progress.py
├── devkit-assets
├── frida-gum-example.vcxproj.filters
├── frida-core-example.vcxproj.filters
├── frida-gumjs-example.vcxproj.filters
├── frida-gum-example.sln
├── frida-core-example.sln
├── frida-gumjs-example.sln
├── frida-gumjs-example-unix.c
├── frida-gumjs-example-windows.c
├── frida-gum-example-unix.c
├── frida-gum-example-windows.c
├── frida-core-example-unix.c
├── frida-core-example-windows.c
├── frida-gum-example.vcxproj
├── frida-core-example.vcxproj
└── frida-gumjs-example.vcxproj
├── machine_file.py
├── README.md
├── mkfatmacho.py
├── frida_version.py
├── post-process-oabi.py
├── make-changelog.sh
├── meson_make.py
├── mkdevkit.py
├── env_android.py
├── winenv.py
├── env_apple.py
├── sync-from-upstream.py
├── deps.toml
├── machine_spec.py
├── env_generic.py
├── env.py
├── meson_configure.py
└── devkit.py
/.gitignore:
--------------------------------------------------------------------------------
1 | /__pycache__/
2 |
--------------------------------------------------------------------------------
/.github/FUNDING.yml:
--------------------------------------------------------------------------------
1 | github: frida
2 |
--------------------------------------------------------------------------------
/modules/frida-gadget-ios/.npmignore:
--------------------------------------------------------------------------------
1 | /*.dylib
2 |
--------------------------------------------------------------------------------
/meson-scripts/BSDmakefile:
--------------------------------------------------------------------------------
1 | all: .DEFAULT
2 |
3 | .DEFAULT:
4 | @gmake ${.MAKEFLAGS} ${.TARGETS}
5 |
6 | .PHONY: all
7 |
--------------------------------------------------------------------------------
/.gitmodules:
--------------------------------------------------------------------------------
1 | [submodule "meson"]
2 | path = meson
3 | url = https://github.com/frida/meson.git
4 | [submodule "tomlkit"]
5 | path = tomlkit
6 | url = https://github.com/python-poetry/tomlkit.git
7 |
--------------------------------------------------------------------------------
/modules/frida-gadget-ios/index.d.ts:
--------------------------------------------------------------------------------
1 | /**
2 | * Filesystem path to the universal gadget dylib for iOS.
3 | */
4 | export declare const path: string;
5 |
6 | /**
7 | * The gadget's version number as a string, e.g. `"1.2.3"`.
8 | */
9 | export declare const version: string;
10 |
--------------------------------------------------------------------------------
/progress.py:
--------------------------------------------------------------------------------
1 | from dataclasses import dataclass
2 | from typing import Callable
3 |
4 |
5 | @dataclass
6 | class Progress:
7 | message: str
8 |
9 |
10 | ProgressCallback = Callable[[Progress], None]
11 |
12 |
13 | def print_progress(progress: Progress):
14 | print(f"{progress.message}...", flush=True)
15 |
--------------------------------------------------------------------------------
/modules/frida-gadget-ios/index.js:
--------------------------------------------------------------------------------
1 | const path = require('path');
2 | const pkg = require('./package.json');
3 |
4 | const pkgDir = path.dirname(require.resolve('.'));
5 | const pkgVersion = pkg.version.split('-')[0];
6 |
7 | module.exports = {
8 | path: path.join(pkgDir, `frida-gadget-${pkgVersion}-ios-universal.dylib`),
9 | version: pkgVersion
10 | };
11 |
--------------------------------------------------------------------------------
/meson-scripts/configure:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | [ -z "$PYTHON" ] && PYTHON=$(which python3 >/dev/null && echo python3 || echo python)
4 |
5 | cd $(dirname $0)
6 |
7 | srcroot=$(pwd)
8 |
9 | if [ ! -f releng/meson/meson.py ]; then
10 | git submodule update --init --recursive --depth 1 || exit $?
11 | fi
12 |
13 | cd - >/dev/null
14 |
15 | exec "$PYTHON" \
16 | -c "import sys; sys.path.insert(0, sys.argv[1]); from releng.meson_configure import main; main()" \
17 | "$srcroot" \
18 | "$@"
19 |
--------------------------------------------------------------------------------
/meson-scripts/Makefile:
--------------------------------------------------------------------------------
1 | PYTHON ?= $(shell which python3 >/dev/null && echo python3 || echo python)
2 |
3 | all $(MAKECMDGOALS):
4 | @$(PYTHON) \
5 | -c "import sys; sys.path.insert(0, sys.argv[1]); from releng.meson_make import main; main()" \
6 | "$(shell pwd)" \
7 | ./build \
8 | $(MAKECMDGOALS)
9 |
10 | git-submodules:
11 | @if [ ! -f releng/meson/meson.py ]; then \
12 | git submodule update --init --recursive --depth 1; \
13 | fi
14 | -include git-submodules
15 |
16 | .PHONY: all $(MAKECMDGOALS)
17 |
--------------------------------------------------------------------------------
/modules/frida-gadget-ios/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "frida-gadget-ios",
3 | "version": "0.0.0",
4 | "description": "Gadget for iOS",
5 | "main": "index.js",
6 | "types": "index.d.ts",
7 | "scripts": {
8 | "install": "node download.js"
9 | },
10 | "repository": {
11 | "type": "git",
12 | "url": "git+https://github.com/frida/frida.git"
13 | },
14 | "keywords": [
15 | "instrumentation",
16 | "hooking",
17 | "ios"
18 | ],
19 | "author": "Frida Developers",
20 | "license": "LGPL-2.0 WITH WxWindows-exception-3.1",
21 | "bugs": {
22 | "url": "https://github.com/frida/frida/issues"
23 | },
24 | "homepage": "https://github.com/frida/frida#readme"
25 | }
26 |
--------------------------------------------------------------------------------
/meson-scripts/configure.bat:
--------------------------------------------------------------------------------
1 | @setlocal
2 | @echo off
3 | rem:: Based on: https://github.com/microsoft/terminal/issues/217#issuecomment-737594785
4 | goto :_start_
5 |
6 | :set_real_dp0
7 | set dp0=%~dp0
8 | set "dp0=%dp0:~0,-1%"
9 | goto :eof
10 |
11 | :_start_
12 | call :set_real_dp0
13 |
14 | if not exist "%dp0%\releng\meson\meson.py" (
15 | pushd "%dp0%" & git submodule update --init --recursive --depth 1 & popd
16 | if %errorlevel% neq 0 exit /b %errorlevel%
17 | )
18 |
19 | endlocal & goto #_undefined_# 2>nul || title %COMSPEC% & python ^
20 | -c "import sys; sys.path.insert(0, sys.argv[1]); from releng.meson_configure import main; main()" ^
21 | "%dp0%" ^
22 | %*
23 |
--------------------------------------------------------------------------------
/meson-scripts/make.bat:
--------------------------------------------------------------------------------
1 | @setlocal
2 | @echo off
3 | rem:: Based on: https://github.com/microsoft/terminal/issues/217#issuecomment-737594785
4 | goto :_start_
5 |
6 | :set_real_dp0
7 | set dp0=%~dp0
8 | set "dp0=%dp0:~0,-1%"
9 | goto :eof
10 |
11 | :_start_
12 | call :set_real_dp0
13 |
14 | if not exist "%dp0%\releng\meson\meson.py" (
15 | pushd "%dp0%" & git submodule update --init --recursive --depth 1 & popd
16 | if %errorlevel% neq 0 exit /b %errorlevel%
17 | )
18 |
19 | endlocal & goto #_undefined_# 2>nul || title %COMSPEC% & python ^
20 | -c "import sys; sys.path.insert(0, sys.argv[1]); from releng.meson_make import main; main()" ^
21 | "%dp0%" ^
22 | .\build ^
23 | %*
24 |
--------------------------------------------------------------------------------
/devkit-assets/frida-gum-example.vcxproj.filters:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | {4FC737F1-C7A5-4376-A066-2A32D752A2FF}
6 | cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx
7 |
8 |
9 | {93995380-89BD-4b04-88EB-625FBE52EBFB}
10 | h;hh;hpp;hxx;hm;inl;inc;xsd
11 |
12 |
13 | {67DA6AB6-F800-4c08-8B7A-83BB121AAD01}
14 | rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms
15 |
16 |
17 |
18 |
19 | Source Files
20 |
21 |
22 |
23 |
24 | Header Files
25 |
26 |
27 |
--------------------------------------------------------------------------------
/devkit-assets/frida-core-example.vcxproj.filters:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | {4FC737F1-C7A5-4376-A066-2A32D752A2FF}
6 | cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx
7 |
8 |
9 | {93995380-89BD-4b04-88EB-625FBE52EBFB}
10 | h;hh;hpp;hxx;hm;inl;inc;xsd
11 |
12 |
13 | {67DA6AB6-F800-4c08-8B7A-83BB121AAD01}
14 | rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms
15 |
16 |
17 |
18 |
19 | Source Files
20 |
21 |
22 |
23 |
24 | Header Files
25 |
26 |
27 |
--------------------------------------------------------------------------------
/devkit-assets/frida-gumjs-example.vcxproj.filters:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | {4FC737F1-C7A5-4376-A066-2A32D752A2FF}
6 | cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx
7 |
8 |
9 | {93995380-89BD-4b04-88EB-625FBE52EBFB}
10 | h;hh;hpp;hxx;hm;inl;inc;xsd
11 |
12 |
13 | {67DA6AB6-F800-4c08-8B7A-83BB121AAD01}
14 | rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms
15 |
16 |
17 |
18 |
19 | Source Files
20 |
21 |
22 |
23 |
24 | Header Files
25 |
26 |
27 |
--------------------------------------------------------------------------------
/machine_file.py:
--------------------------------------------------------------------------------
1 | from configparser import ConfigParser
2 | from pathlib import Path
3 | from typing import Dict, List, Sequence, Union
4 |
5 |
6 | def load(mfile: Path) -> Dict[str, Union[str, List[str]]]:
7 | config = ConfigParser()
8 | config.read(mfile)
9 |
10 | hidden_constants = {
11 | "true": True,
12 | "false": False,
13 | }
14 |
15 | items = {}
16 | if config.has_section("constants"):
17 | for name, raw_value in config.items("constants"):
18 | items[name] = eval(raw_value, hidden_constants, items)
19 |
20 | for section_name, section in config.items():
21 | if section_name in ("DEFAULT", "constants"):
22 | continue
23 | for name, raw_value in section.items():
24 | value = eval(raw_value, hidden_constants, items)
25 | if section_name == "binaries" and isinstance(value, str):
26 | value = [value]
27 | items[name] = value
28 |
29 | if len(items) == 0:
30 | return None
31 |
32 | return items
33 |
34 |
35 | def bool_to_meson(b: bool) -> str:
36 | return "true" if b else "false"
37 |
38 |
39 | def strv_to_meson(strv: Sequence[str]) -> str:
40 | return "[" + ", ".join(map(str_to_meson, strv)) + "]"
41 |
42 |
43 | def str_to_meson(s: str) -> str:
44 | return "'" + s + "'"
45 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # releng
2 |
3 | Build system infrastructure to ensure fast and consistent builds across projects.
4 |
5 | Intended to be used as a git submodule at `/releng` in projects.
6 |
7 | ## Setting up a new project
8 |
9 | 1. Set up the repo:
10 |
11 | ```sh
12 | $ git init my-project
13 | $ cd my-project
14 | $ git submodule add https://github.com/frida/releng.git
15 | $ cp releng/meson-scripts/* .
16 | $ echo -e '/build/\n/deps/' > .gitignore
17 | ```
18 |
19 | 2. Create `meson.build` containing:
20 |
21 | ```meson
22 | project('my-project', 'vala', version: '1.0.0')
23 | executable('hello', 'hello.vala', dependencies: dependency('glib-2.0'))
24 | ```
25 |
26 | 3. Create `hello.vala` containing:
27 |
28 | ```vala
29 | int main (string[] args) {
30 | print ("Hello World from Vala!\n");
31 | return 0;
32 | }
33 | ```
34 |
35 | 4. Build and run:
36 |
37 | ```sh
38 | $ make
39 | $ ./build/hello
40 | Hello World from Vala!
41 | $
42 | ```
43 |
44 | ## Cross-compiling
45 |
46 | ### iOS
47 |
48 | ```sh
49 | $ ./configure --host=ios-arm64
50 | $ make
51 | ```
52 |
53 | ### Android
54 |
55 | ```sh
56 | $ ./configure --host=android-arm64
57 | $ make
58 | ```
59 |
60 | ### Raspberry Pi
61 |
62 | ```sh
63 | $ sudo apt-get install g++-arm-linux-gnueabihf
64 | $ ./configure --host=arm-linux-gnueabihf
65 | $ make
66 | ```
67 |
--------------------------------------------------------------------------------
/devkit-assets/frida-gum-example.sln:
--------------------------------------------------------------------------------
1 |
2 | Microsoft Visual Studio Solution File, Format Version 12.00
3 | # Visual Studio 14
4 | VisualStudioVersion = 14.0.25420.1
5 | MinimumVisualStudioVersion = 10.0.40219.1
6 | Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "frida-gum-example", "frida-gum-example.vcxproj", "{9EF5A94D-9907-4E7A-98E5-B03CB1750740}"
7 | EndProject
8 | Global
9 | GlobalSection(SolutionConfigurationPlatforms) = preSolution
10 | Debug|x64 = Debug|x64
11 | Debug|x86 = Debug|x86
12 | Release|x64 = Release|x64
13 | Release|x86 = Release|x86
14 | EndGlobalSection
15 | GlobalSection(ProjectConfigurationPlatforms) = postSolution
16 | {9EF5A94D-9907-4E7A-98E5-B03CB1750740}.Debug|x64.ActiveCfg = Debug|x64
17 | {9EF5A94D-9907-4E7A-98E5-B03CB1750740}.Debug|x64.Build.0 = Debug|x64
18 | {9EF5A94D-9907-4E7A-98E5-B03CB1750740}.Debug|x86.ActiveCfg = Debug|Win32
19 | {9EF5A94D-9907-4E7A-98E5-B03CB1750740}.Debug|x86.Build.0 = Debug|Win32
20 | {9EF5A94D-9907-4E7A-98E5-B03CB1750740}.Release|x64.ActiveCfg = Release|x64
21 | {9EF5A94D-9907-4E7A-98E5-B03CB1750740}.Release|x64.Build.0 = Release|x64
22 | {9EF5A94D-9907-4E7A-98E5-B03CB1750740}.Release|x86.ActiveCfg = Release|Win32
23 | {9EF5A94D-9907-4E7A-98E5-B03CB1750740}.Release|x86.Build.0 = Release|Win32
24 | EndGlobalSection
25 | GlobalSection(SolutionProperties) = preSolution
26 | HideSolutionNode = FALSE
27 | EndGlobalSection
28 | EndGlobal
29 |
--------------------------------------------------------------------------------
/devkit-assets/frida-core-example.sln:
--------------------------------------------------------------------------------
1 |
2 | Microsoft Visual Studio Solution File, Format Version 12.00
3 | # Visual Studio 14
4 | VisualStudioVersion = 14.0.25420.1
5 | MinimumVisualStudioVersion = 10.0.40219.1
6 | Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "frida-core-example", "frida-core-example.vcxproj", "{1A424DA8-5C62-4AEA-A1A9-465359E0C17D}"
7 | EndProject
8 | Global
9 | GlobalSection(SolutionConfigurationPlatforms) = preSolution
10 | Debug|x64 = Debug|x64
11 | Debug|x86 = Debug|x86
12 | Release|x64 = Release|x64
13 | Release|x86 = Release|x86
14 | EndGlobalSection
15 | GlobalSection(ProjectConfigurationPlatforms) = postSolution
16 | {1A424DA8-5C62-4AEA-A1A9-465359E0C17D}.Debug|x64.ActiveCfg = Debug|x64
17 | {1A424DA8-5C62-4AEA-A1A9-465359E0C17D}.Debug|x64.Build.0 = Debug|x64
18 | {1A424DA8-5C62-4AEA-A1A9-465359E0C17D}.Debug|x86.ActiveCfg = Debug|Win32
19 | {1A424DA8-5C62-4AEA-A1A9-465359E0C17D}.Debug|x86.Build.0 = Debug|Win32
20 | {1A424DA8-5C62-4AEA-A1A9-465359E0C17D}.Release|x64.ActiveCfg = Release|x64
21 | {1A424DA8-5C62-4AEA-A1A9-465359E0C17D}.Release|x64.Build.0 = Release|x64
22 | {1A424DA8-5C62-4AEA-A1A9-465359E0C17D}.Release|x86.ActiveCfg = Release|Win32
23 | {1A424DA8-5C62-4AEA-A1A9-465359E0C17D}.Release|x86.Build.0 = Release|Win32
24 | EndGlobalSection
25 | GlobalSection(SolutionProperties) = preSolution
26 | HideSolutionNode = FALSE
27 | EndGlobalSection
28 | EndGlobal
29 |
--------------------------------------------------------------------------------
/devkit-assets/frida-gumjs-example.sln:
--------------------------------------------------------------------------------
1 |
2 | Microsoft Visual Studio Solution File, Format Version 12.00
3 | # Visual Studio 14
4 | VisualStudioVersion = 14.0.25420.1
5 | MinimumVisualStudioVersion = 10.0.40219.1
6 | Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "frida-gumjs-example", "frida-gumjs-example.vcxproj", "{30827938-6C79-4226-8A2F-22154CD45F5B}"
7 | EndProject
8 | Global
9 | GlobalSection(SolutionConfigurationPlatforms) = preSolution
10 | Debug|x64 = Debug|x64
11 | Debug|x86 = Debug|x86
12 | Release|x64 = Release|x64
13 | Release|x86 = Release|x86
14 | EndGlobalSection
15 | GlobalSection(ProjectConfigurationPlatforms) = postSolution
16 | {30827938-6C79-4226-8A2F-22154CD45F5B}.Debug|x64.ActiveCfg = Debug|x64
17 | {30827938-6C79-4226-8A2F-22154CD45F5B}.Debug|x64.Build.0 = Debug|x64
18 | {30827938-6C79-4226-8A2F-22154CD45F5B}.Debug|x86.ActiveCfg = Debug|Win32
19 | {30827938-6C79-4226-8A2F-22154CD45F5B}.Debug|x86.Build.0 = Debug|Win32
20 | {30827938-6C79-4226-8A2F-22154CD45F5B}.Release|x64.ActiveCfg = Release|x64
21 | {30827938-6C79-4226-8A2F-22154CD45F5B}.Release|x64.Build.0 = Release|x64
22 | {30827938-6C79-4226-8A2F-22154CD45F5B}.Release|x86.ActiveCfg = Release|Win32
23 | {30827938-6C79-4226-8A2F-22154CD45F5B}.Release|x86.Build.0 = Release|Win32
24 | EndGlobalSection
25 | GlobalSection(SolutionProperties) = preSolution
26 | HideSolutionNode = FALSE
27 | EndGlobalSection
28 | EndGlobal
29 |
--------------------------------------------------------------------------------
/mkfatmacho.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import os
4 | import shutil
5 | import struct
6 | import sys
7 |
8 |
9 | def make_fat_macho(output_path, input_paths):
10 | """
11 | Used to create a FAT Mach-O when Apple's lipo tool refuses to do so, such as
12 | when needing two arm64e slices to support both the new and the old arm64e ABI.
13 | """
14 | input_slices = []
15 | offset = 0x8000
16 | slice_alignment = 0x4000
17 | for input_path in input_paths:
18 | delta = offset % slice_alignment
19 | if delta != 0:
20 | offset += slice_alignment - delta
21 |
22 | offset_bits = bin(offset)
23 | alignment = len(offset_bits[offset_bits.rfind("1") + 1:])
24 |
25 | f = open(input_path, "rb+")
26 |
27 | f.seek(4)
28 | cpu_type, cpu_subtype = struct.unpack("II", 0xcafebabe, len(input_slices))
39 | output_file.write(header)
40 |
41 | for (_, cpu_type, cpu_subtype, offset, size, alignment) in input_slices:
42 | slice_spec = struct.pack(">IIIII", cpu_type, cpu_subtype, offset, size, alignment)
43 | output_file.write(slice_spec)
44 |
45 | for (input_file, _, _, offset, _, _) in input_slices:
46 | input_file.seek(0)
47 | output_file.seek(offset)
48 | shutil.copyfileobj(input_file, output_file)
49 |
50 |
51 | if __name__ == '__main__':
52 | output_path = sys.argv[1]
53 | input_paths = sys.argv[2:]
54 | make_fat_macho(output_path, input_paths)
55 |
--------------------------------------------------------------------------------
/devkit-assets/frida-gumjs-example-unix.c:
--------------------------------------------------------------------------------
1 | #include "frida-gumjs.h"
2 |
3 | #include
4 | #include
5 | #include
6 |
7 | static void on_message (const gchar * message, GBytes * data, gpointer user_data);
8 |
9 | int
10 | main (int argc,
11 | char * argv[])
12 | {
13 | GumScriptBackend * backend;
14 | GCancellable * cancellable = NULL;
15 | GError * error = NULL;
16 | GumScript * script;
17 | GMainContext * context;
18 |
19 | gum_init_embedded ();
20 |
21 | backend = gum_script_backend_obtain_qjs ();
22 |
23 | script = gum_script_backend_create_sync (backend, "example",
24 | "Interceptor.attach(Module.getGlobalExportByName('open'), {\n"
25 | " onEnter(args) {\n"
26 | " console.log(`[*] open(\"${args[0].readUtf8String()}\")`);\n"
27 | " }\n"
28 | "});\n"
29 | "Interceptor.attach(Module.getGlobalExportByName('close'), {\n"
30 | " onEnter(args) {\n"
31 | " console.log(`[*] close(${args[0].toInt32()})`);\n"
32 | " }\n"
33 | "});",
34 | NULL, cancellable, &error);
35 | g_assert (error == NULL);
36 |
37 | gum_script_set_message_handler (script, on_message, NULL, NULL);
38 |
39 | gum_script_load_sync (script, cancellable);
40 |
41 | close (open ("/etc/hosts", O_RDONLY));
42 | close (open ("/etc/fstab", O_RDONLY));
43 |
44 | context = g_main_context_get_thread_default ();
45 | while (g_main_context_pending (context))
46 | g_main_context_iteration (context, FALSE);
47 |
48 | gum_script_unload_sync (script, cancellable);
49 |
50 | g_object_unref (script);
51 |
52 | gum_deinit_embedded ();
53 |
54 | return 0;
55 | }
56 |
57 | static void
58 | on_message (const gchar * message,
59 | GBytes * data,
60 | gpointer user_data)
61 | {
62 | JsonParser * parser;
63 | JsonObject * root;
64 | const gchar * type;
65 |
66 | parser = json_parser_new ();
67 | json_parser_load_from_data (parser, message, -1, NULL);
68 | root = json_node_get_object (json_parser_get_root (parser));
69 |
70 | type = json_object_get_string_member (root, "type");
71 | if (strcmp (type, "log") == 0)
72 | {
73 | const gchar * log_message;
74 |
75 | log_message = json_object_get_string_member (root, "payload");
76 | g_print ("%s\n", log_message);
77 | }
78 | else
79 | {
80 | g_print ("on_message: %s\n", message);
81 | }
82 |
83 | g_object_unref (parser);
84 | }
85 |
--------------------------------------------------------------------------------
/devkit-assets/frida-gumjs-example-windows.c:
--------------------------------------------------------------------------------
1 | /*
2 | * To build, set up your Release configuration like this:
3 | *
4 | * [Runtime Library]
5 | * Multi-threaded (/MT)
6 | *
7 | * Visit https://frida.re to learn more about Frida.
8 | */
9 |
10 | #include "frida-gumjs.h"
11 |
12 | #include
13 |
14 | static void on_message (const gchar * message, GBytes * data, gpointer user_data);
15 |
16 | int
17 | main (int argc,
18 | char * argv[])
19 | {
20 | GumScriptBackend * backend;
21 | GCancellable * cancellable = NULL;
22 | GError * error = NULL;
23 | GumScript * script;
24 | GMainContext * context;
25 |
26 | gum_init_embedded ();
27 |
28 | backend = gum_script_backend_obtain_qjs ();
29 |
30 | script = gum_script_backend_create_sync (backend, "example",
31 | "Interceptor.attach(Process.getModuleByName('user32.dll').getExportByName('MessageBeep'), {\n"
32 | " onEnter(args) {\n"
33 | " console.log(`[*] MessageBeep(${args[0].toInt32()})`);\n"
34 | " }\n"
35 | "});\n"
36 | "Interceptor.attach(Process.getModuleByName('kernel32.dll').getExportByName('Sleep'), {\n"
37 | " onEnter(args) {\n"
38 | " console.log(`[*] Sleep(${args[0].toInt32()})`);\n"
39 | " }\n"
40 | "});",
41 | NULL, cancellable, &error);
42 | g_assert (error == NULL);
43 |
44 | gum_script_set_message_handler (script, on_message, NULL, NULL);
45 |
46 | gum_script_load_sync (script, cancellable);
47 |
48 | MessageBeep (MB_ICONINFORMATION);
49 | Sleep (1);
50 |
51 | context = g_main_context_get_thread_default ();
52 | while (g_main_context_pending (context))
53 | g_main_context_iteration (context, FALSE);
54 |
55 | gum_script_unload_sync (script, cancellable);
56 |
57 | g_object_unref (script);
58 |
59 | gum_deinit_embedded ();
60 |
61 | return 0;
62 | }
63 |
64 | static void
65 | on_message (const gchar * message,
66 | GBytes * data,
67 | gpointer user_data)
68 | {
69 | JsonParser * parser;
70 | JsonObject * root;
71 | const gchar * type;
72 |
73 | parser = json_parser_new ();
74 | json_parser_load_from_data (parser, message, -1, NULL);
75 | root = json_node_get_object (json_parser_get_root (parser));
76 |
77 | type = json_object_get_string_member (root, "type");
78 | if (strcmp (type, "log") == 0)
79 | {
80 | const gchar * log_message;
81 |
82 | log_message = json_object_get_string_member (root, "payload");
83 | g_print ("%s\n", log_message);
84 | }
85 | else
86 | {
87 | g_print ("on_message: %s\n", message);
88 | }
89 |
90 | g_object_unref (parser);
91 | }
92 |
--------------------------------------------------------------------------------
/devkit-assets/frida-gum-example-unix.c:
--------------------------------------------------------------------------------
1 | #include "frida-gum.h"
2 |
3 | #include
4 | #include
5 |
6 | typedef struct _ExampleListenerData ExampleListenerData;
7 | typedef enum _ExampleHookId ExampleHookId;
8 |
9 | struct _ExampleListenerData
10 | {
11 | guint num_calls;
12 | };
13 |
14 | enum _ExampleHookId
15 | {
16 | EXAMPLE_HOOK_OPEN,
17 | EXAMPLE_HOOK_CLOSE
18 | };
19 |
20 | static void example_listener_on_enter (GumInvocationContext * ic, gpointer user_data);
21 | static void example_listener_on_leave (GumInvocationContext * ic, gpointer user_data);
22 |
23 | int
24 | main (int argc,
25 | char * argv[])
26 | {
27 | GumInterceptor * interceptor;
28 | ExampleListenerData * data;
29 | GumInvocationListener * listener;
30 |
31 | gum_init_embedded ();
32 |
33 | interceptor = gum_interceptor_obtain ();
34 |
35 | data = g_new0 (ExampleListenerData, 1);
36 | listener = gum_make_call_listener (example_listener_on_enter, example_listener_on_leave, data, g_free);
37 |
38 | gum_interceptor_begin_transaction (interceptor);
39 | gum_interceptor_attach (interceptor,
40 | GSIZE_TO_POINTER (gum_module_find_global_export_by_name ("open")),
41 | listener,
42 | GSIZE_TO_POINTER (EXAMPLE_HOOK_OPEN),
43 | GUM_ATTACH_FLAGS_NONE);
44 | gum_interceptor_attach (interceptor,
45 | GSIZE_TO_POINTER (gum_module_find_global_export_by_name ("close")),
46 | listener,
47 | GSIZE_TO_POINTER (EXAMPLE_HOOK_CLOSE),
48 | GUM_ATTACH_FLAGS_NONE);
49 | gum_interceptor_end_transaction (interceptor);
50 |
51 | close (open ("/etc/hosts", O_RDONLY));
52 | close (open ("/etc/fstab", O_RDONLY));
53 |
54 | g_print ("[*] listener got %u calls\n", data->num_calls);
55 |
56 | gum_interceptor_detach (interceptor, listener);
57 |
58 | close (open ("/etc/hosts", O_RDONLY));
59 | close (open ("/etc/fstab", O_RDONLY));
60 |
61 | g_print ("[*] listener still has %u calls\n", data->num_calls);
62 |
63 | g_object_unref (listener);
64 | g_object_unref (interceptor);
65 |
66 | gum_deinit_embedded ();
67 |
68 | return 0;
69 | }
70 |
71 | static void
72 | example_listener_on_enter (GumInvocationContext * ic,
73 | gpointer user_data)
74 | {
75 | ExampleListenerData * data = user_data;
76 | ExampleHookId hook_id;
77 |
78 | hook_id = GUM_IC_GET_FUNC_DATA (ic, ExampleHookId);
79 |
80 | switch (hook_id)
81 | {
82 | case EXAMPLE_HOOK_OPEN:
83 | g_print ("[*] open(\"%s\")\n", (const gchar *) gum_invocation_context_get_nth_argument (ic, 0));
84 | break;
85 | case EXAMPLE_HOOK_CLOSE:
86 | g_print ("[*] close(%d)\n", GPOINTER_TO_INT (gum_invocation_context_get_nth_argument (ic, 0)));
87 | break;
88 | }
89 |
90 | data->num_calls++;
91 | }
92 |
93 | static void
94 | example_listener_on_leave (GumInvocationContext * ic,
95 | gpointer user_data)
96 | {
97 | }
98 |
--------------------------------------------------------------------------------
/frida_version.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import argparse
4 | from dataclasses import dataclass
5 | import os
6 | from pathlib import Path
7 | import re
8 | import subprocess
9 | import sys
10 | from typing import List
11 |
12 |
13 | RELENG_DIR = Path(__file__).resolve().parent
14 | ROOT_DIR = RELENG_DIR.parent
15 |
16 |
17 | @dataclass
18 | class FridaVersion:
19 | name: str
20 | major: int
21 | minor: int
22 | micro: int
23 | nano: int
24 | commit: str
25 |
26 |
27 | def main(argv: List[str]):
28 | parser = argparse.ArgumentParser()
29 | parser.add_argument("repo", nargs="?", type=Path, default=ROOT_DIR)
30 | args = parser.parse_args()
31 |
32 | version = detect(args.repo)
33 | print(version.name)
34 |
35 |
36 | def detect(repo: Path) -> FridaVersion:
37 | version_name = "0.0.0"
38 | major = 0
39 | minor = 0
40 | micro = 0
41 | nano = 0
42 | commit = ""
43 |
44 | if not (repo / ".git").exists():
45 | return FridaVersion(version_name, major, minor, micro, nano, commit)
46 |
47 | proc = subprocess.run(
48 | ["git", "describe", "--tags", "--always", "--long"],
49 | cwd=repo,
50 | capture_output=True,
51 | encoding="utf-8",
52 | check=False,
53 | )
54 | description = proc.stdout.strip()
55 |
56 | if "-" not in description:
57 | commit = description
58 | return FridaVersion(version_name, major, minor, micro, nano, commit)
59 |
60 | parts = description.rsplit("-", 2)
61 | if len(parts) != 3:
62 | raise VersionParseError(f"Unexpected format from git describe: {description!r}")
63 |
64 | tag_part, distance_str, commit_part = parts
65 | commit = commit_part.lstrip("g")
66 |
67 | try:
68 | distance = int(distance_str)
69 | except ValueError as exc:
70 | raise VersionParseError(f"Invalid distance in {description!r}") from exc
71 |
72 | nano = distance
73 |
74 | m = re.match(r"^(\d+)\.(\d+)\.(\d+)(?:-(.+))?$", tag_part)
75 | if m is None:
76 | raise VersionParseError(
77 | f"Tag does not match expected semver pattern: {tag_part!r}"
78 | )
79 |
80 | major = int(m.group(1))
81 | minor = int(m.group(2))
82 | micro = int(m.group(3))
83 | suffix = m.group(4)
84 |
85 | if suffix is None:
86 | if distance == 0:
87 | version_name = f"{major}.{minor}.{micro}"
88 | else:
89 | micro += 1
90 | version_name = f"{major}.{minor}.{micro}-dev.{distance - 1}"
91 | else:
92 | base = f"{major}.{minor}.{micro}-{suffix}"
93 | if distance == 0:
94 | version_name = base
95 | else:
96 | version_name = f"{base}-dev.{distance - 1}"
97 |
98 | return FridaVersion(version_name, major, minor, micro, nano, commit)
99 |
100 |
101 | class VersionParseError(Exception):
102 | pass
103 |
104 |
105 | if __name__ == "__main__":
106 | main(sys.argv)
107 |
--------------------------------------------------------------------------------
/devkit-assets/frida-gum-example-windows.c:
--------------------------------------------------------------------------------
1 | /*
2 | * To build, set up your Release configuration like this:
3 | *
4 | * [Runtime Library]
5 | * Multi-threaded (/MT)
6 | *
7 | * Visit https://frida.re to learn more about Frida.
8 | */
9 |
10 | #include "frida-gum.h"
11 |
12 | #include
13 |
14 | typedef struct _ExampleListenerData ExampleListenerData;
15 | typedef enum _ExampleHookId ExampleHookId;
16 |
17 | struct _ExampleListenerData
18 | {
19 | guint num_calls;
20 | };
21 |
22 | enum _ExampleHookId
23 | {
24 | EXAMPLE_HOOK_MESSAGE_BEEP,
25 | EXAMPLE_HOOK_SLEEP
26 | };
27 |
28 | static void example_listener_on_enter (GumInvocationContext * ic, gpointer user_data);
29 | static void example_listener_on_leave (GumInvocationContext * ic, gpointer user_data);
30 |
31 | int
32 | main (int argc,
33 | char * argv[])
34 | {
35 | GumInterceptor * interceptor;
36 | ExampleListenerData * data;
37 | GumInvocationListener * listener;
38 | GumModule * user32, * kernel32;
39 |
40 | gum_init_embedded ();
41 |
42 | interceptor = gum_interceptor_obtain ();
43 |
44 | data = g_new0 (ExampleListenerData, 1);
45 | listener = gum_make_call_listener (example_listener_on_enter, example_listener_on_leave, data, g_free);
46 |
47 | user32 = gum_process_find_module_by_name ("user32.dll");
48 | kernel32 = gum_process_find_module_by_name ("kernel32.dll");
49 |
50 | gum_interceptor_begin_transaction (interceptor);
51 | gum_interceptor_attach (interceptor,
52 | GSIZE_TO_POINTER (gum_module_find_export_by_name (user32, "MessageBeep")),
53 | listener,
54 | GSIZE_TO_POINTER (EXAMPLE_HOOK_MESSAGE_BEEP),
55 | GUM_ATTACH_FLAGS_NONE);
56 | gum_interceptor_attach (interceptor,
57 | GSIZE_TO_POINTER (gum_module_find_export_by_name (kernel32, "Sleep")),
58 | listener,
59 | GSIZE_TO_POINTER (EXAMPLE_HOOK_SLEEP),
60 | GUM_ATTACH_FLAGS_NONE);
61 | gum_interceptor_end_transaction (interceptor);
62 |
63 | MessageBeep (MB_ICONINFORMATION);
64 | Sleep (1);
65 |
66 | g_print ("[*] listener got %u calls\n", data->num_calls);
67 |
68 | gum_interceptor_detach (interceptor, listener);
69 |
70 | MessageBeep (MB_ICONINFORMATION);
71 | Sleep (1);
72 |
73 | g_print ("[*] listener still has %u calls\n", data->num_calls);
74 |
75 | g_object_unref (kernel32);
76 | g_object_unref (user32);
77 | g_object_unref (listener);
78 | g_object_unref (interceptor);
79 |
80 | gum_deinit_embedded ();
81 |
82 | return 0;
83 | }
84 |
85 | static void
86 | example_listener_on_enter (GumInvocationContext * ic,
87 | gpointer user_data)
88 | {
89 | ExampleListenerData * data = user_data;
90 | ExampleHookId hook_id;
91 |
92 | hook_id = GUM_IC_GET_FUNC_DATA (ic, ExampleHookId);
93 |
94 | switch (hook_id)
95 | {
96 | case EXAMPLE_HOOK_MESSAGE_BEEP:
97 | g_print ("[*] MessageBeep(%u)\n", GPOINTER_TO_UINT (gum_invocation_context_get_nth_argument (ic, 0)));
98 | break;
99 | case EXAMPLE_HOOK_SLEEP:
100 | g_print ("[*] Sleep(%u)\n", GPOINTER_TO_UINT (gum_invocation_context_get_nth_argument (ic, 0)));
101 | break;
102 | }
103 |
104 | data->num_calls++;
105 | }
106 |
107 | static void
108 | example_listener_on_leave (GumInvocationContext * ic,
109 | gpointer user_data)
110 | {
111 | }
112 |
--------------------------------------------------------------------------------
/post-process-oabi.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | import argparse
3 | from pathlib import Path
4 | import shutil
5 | import struct
6 | import subprocess
7 | import tarfile
8 | import tempfile
9 | import urllib.request
10 |
11 |
12 | ARM64E_URL = "https://build.frida.re/deps/{version}/sdk-ios-arm64e.tar.xz"
13 |
14 |
15 | class CommandError(Exception):
16 | pass
17 |
18 |
19 | def main():
20 | parser = argparse.ArgumentParser()
21 | parser.add_argument("--bundle", required=True)
22 | parser.add_argument("--host", required=True)
23 | parser.add_argument("--artifact", required=True)
24 | parser.add_argument("--version", required=True)
25 | args = parser.parse_args()
26 |
27 | if args.bundle != "sdk":
28 | raise CommandError("wrong bundle")
29 | if args.host != "ios-arm64eoabi":
30 | raise CommandError("wrong host")
31 |
32 | arm64e_sdk_url = ARM64E_URL.format(version=args.version)
33 |
34 | print(f"Downloading {arm64e_sdk_url}")
35 | with urllib.request.urlopen(arm64e_sdk_url) as response, \
36 | tempfile.NamedTemporaryFile(suffix=".tar.xz") as archive:
37 | shutil.copyfileobj(response, archive)
38 | archive.flush()
39 | arm64e_artifact_path = Path(archive.name)
40 |
41 | with tempfile.TemporaryDirectory() as patched_artifact_dir:
42 | patched_artifact_file = Path(patched_artifact_dir) / "patched.tar.xz"
43 |
44 | with tempfile.TemporaryDirectory() as artifact_extracted_dir, \
45 | tempfile.TemporaryDirectory() as arm64e_extracted_dir:
46 | artifact_extracted_path = Path(artifact_extracted_dir)
47 | arm64e_extracted_path = Path(arm64e_extracted_dir)
48 |
49 | with tarfile.open(arm64e_artifact_path, "r:xz") as arm64e_tar:
50 | arm64e_tar.extractall(arm64e_extracted_path)
51 |
52 | artifact_path = Path(args.artifact)
53 | with tarfile.open(artifact_path, "r:xz") as tar:
54 | tar.extractall(artifact_extracted_path)
55 |
56 | print("Patching libffi.a...")
57 | steal_object(artifact_extracted_path / "lib" / "libffi.a",
58 | arm64e_extracted_path / "lib" / "libffi.a")
59 | with tarfile.open(patched_artifact_file, "w:xz") as patched_tar:
60 | patched_tar.add(artifact_extracted_path, arcname="./")
61 |
62 | print(f"Overwriting {artifact_path}")
63 | shutil.copy(patched_artifact_file, artifact_path)
64 |
65 |
66 | def steal_object(arm64eoabi_libffi_a_path: Path, arm64e_libffi_a_path: Path):
67 | """
68 | Steal just the aarch64_sysv.S.o object file from the arm64e libffi.a in
69 | order to get the CIE info from the future compiler. Then patch the Mach-O
70 | header of the stolen object to match the old arm64e ABI. It works because
71 | the __text section is exactly the same.
72 | """
73 | if not arm64eoabi_libffi_a_path.exists():
74 | raise RuntimeError("input arm64eoabi libffi.a not found")
75 | if not arm64e_libffi_a_path.exists():
76 | raise RuntimeError("input arm64e libffi.a not found")
77 |
78 | with tempfile.TemporaryDirectory() as oabi_dir, tempfile.TemporaryDirectory() as nabi_dir:
79 | perform("ar", "-x", arm64eoabi_libffi_a_path.absolute(), cwd=oabi_dir)
80 | perform("ar", "-x", arm64e_libffi_a_path.absolute(), cwd=nabi_dir)
81 | dst = Path(oabi_dir) / "aarch64_sysv.S.o"
82 | dst.unlink()
83 | shutil.copy(Path(nabi_dir) / "aarch64_sysv.S.o", dst)
84 | with dst.open("rb+") as f:
85 | f.seek(0xb)
86 | f.write(struct.pack("B", 0))
87 |
88 | perform("ar", "-r", arm64eoabi_libffi_a_path.absolute(), dst.name, cwd=oabi_dir)
89 |
90 |
91 | def perform(*args, **kwargs):
92 | print(">", " ".join([str(arg) for arg in args]), flush=True)
93 | return subprocess.run(args, check=True, **kwargs)
94 |
95 |
96 | if __name__ == "__main__":
97 | main()
98 |
--------------------------------------------------------------------------------
/modules/frida-gadget-ios/download.js:
--------------------------------------------------------------------------------
1 | const fs = require('fs');
2 | const gadget = require('.');
3 | const https = require('https');
4 | const path = require('path');
5 | const util = require('util');
6 | const zlib = require('zlib');
7 |
8 | const access = util.promisify(fs.access);
9 | const readdir = util.promisify(fs.readdir);
10 | const rename = util.promisify(fs.rename);
11 | const unlink = util.promisify(fs.unlink);
12 |
13 | async function run() {
14 | await pruneOldVersions();
15 |
16 | if (await alreadyDownloaded())
17 | return;
18 |
19 | await download();
20 | }
21 |
22 | async function alreadyDownloaded() {
23 | try {
24 | await access(gadget.path, fs.constants.F_OK);
25 | return true;
26 | } catch (e) {
27 | return false;
28 | }
29 | }
30 |
31 | async function download() {
32 | const response = await httpsGet(`https://github.com/frida/frida/releases/download/${gadget.version}/frida-gadget-${gadget.version}-ios-universal.dylib.gz`);
33 |
34 | const tempGadgetPath = gadget.path + '.download';
35 | const tempGadgetStream = fs.createWriteStream(tempGadgetPath);
36 | await pump(response, zlib.createGunzip(), tempGadgetStream);
37 |
38 | await rename(tempGadgetPath, gadget.path);
39 | }
40 |
41 | async function pruneOldVersions() {
42 | const gadgetDir = path.dirname(gadget.path);
43 | const currentName = path.basename(gadget.path);
44 | for (const name of await readdir(gadgetDir)) {
45 | if (name.startsWith('frida-gadget-') && name.endsWith('-ios-universal.dylib') && name !== currentName) {
46 | await unlink(path.join(gadgetDir, name));
47 | }
48 | }
49 | }
50 |
51 | function httpsGet(url) {
52 | return new Promise((resolve, reject) => {
53 | let redirects = 0;
54 |
55 | tryGet(url);
56 |
57 | function tryGet(url) {
58 | const request = https.get(url, response => {
59 | tearDown();
60 |
61 | const {statusCode, headers} = response;
62 |
63 | if (statusCode === 200) {
64 | resolve(response);
65 | } else {
66 | response.resume();
67 |
68 | if (statusCode >= 300 && statusCode < 400 && headers.location !== undefined) {
69 | if (redirects === 10) {
70 | reject(new Error('Too many redirects'));
71 | return;
72 | }
73 |
74 | redirects++;
75 | tryGet(headers.location);
76 | } else {
77 | reject(new Error(`Download failed (code=${statusCode})`));
78 | }
79 | }
80 | });
81 |
82 | request.addListener('error', onError);
83 |
84 | function onError(error) {
85 | tearDown();
86 | reject(error);
87 | }
88 |
89 | function tearDown() {
90 | request.removeListener('error', onError);
91 | }
92 | }
93 | });
94 | }
95 |
96 | function pump(...streams) {
97 | return new Promise((resolve, reject) => {
98 | let done = false;
99 |
100 | streams.forEach(stream => {
101 | stream.addListener('error', onError);
102 | });
103 |
104 | for (let i = 0; i !== streams.length - 1; i++) {
105 | const cur = streams[i];
106 | const next = streams[i + 1];
107 | cur.pipe(next);
108 | }
109 |
110 | const last = streams[streams.length - 1];
111 | last.addListener('finish', onFinish);
112 |
113 | function onFinish() {
114 | if (done)
115 | return;
116 | done = true;
117 |
118 | tearDown();
119 | resolve();
120 | }
121 |
122 | function onError(error) {
123 | if (done)
124 | return;
125 | done = true;
126 |
127 | tearDown();
128 | reject(error);
129 | }
130 |
131 | function tearDown() {
132 | last.removeListener('finish', onFinish);
133 |
134 | streams.forEach(stream => {
135 | stream.removeListener('error', onError);
136 | stream.destroy();
137 | });
138 | }
139 | });
140 | }
141 |
142 | run().catch(onError);
143 |
144 | function onError(error) {
145 | console.error(error.message);
146 | process.exitCode = 1;
147 | }
148 |
--------------------------------------------------------------------------------
/make-changelog.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | submodules="frida-gum frida-core frida-python frida-node frida-qml frida-clr"
4 |
5 | from=$1
6 | to=$2
7 | if [ -z "$from" -o -z "$to" ]; then
8 | echo "usage: $0 "
9 | exit 1
10 | fi
11 | range=${from}..${to}
12 |
13 | set -e
14 |
15 | intdir="$(mktemp -d /tmp/make-changelog.XXXXXX)"
16 | scratch=$intdir/scratch
17 | summary=$intdir/summary
18 | log=$intdir/log
19 | authors=$intdir/authors
20 |
21 | cleanup ()
22 | {
23 | rm -rf "$intdir"
24 | }
25 | trap cleanup EXIT
26 |
27 | summarize_repo_changes ()
28 | {
29 | local repo=$1
30 | local from=$2
31 | local to=$3
32 | local type=$4
33 |
34 | echo
35 | echo "## $repo"
36 | echo " > https://github.com/frida/$repo/compare/$from...$to"
37 | git \
38 | --no-pager \
39 | log \
40 | "--format=%ae<%an<%s" \
41 | --no-decorate \
42 | --color=never \
43 | ${from}..${to} \
44 | | sort -t "<" -k 3 \
45 | | egrep -v "\\bci: " \
46 | | egrep -v "\\b\\bsubmodules: Bump releng" \
47 | | egrep -v "\\bsubprojects: Bump outdated" \
48 | | egrep -v "\\btest: " \
49 | | egrep -v "\\btests: " \
50 | > "$summary" || true
51 | if [ "$repo" == "frida" ]; then
52 | grep -Ev "(submodules: Bump outdated|deps: Bump)" "$summary" > "${summary}-filtered" || true
53 | mv "${summary}-filtered" "$summary"
54 | fi
55 |
56 | export IFS="<"
57 | cat "$summary" | while read author_email author_name title; do
58 | printf -- '- %s.' $title
59 | if [ $type == internal ] && echo $author_email | grep -Evq "dependabot|oleavr"; then
60 | printf " Thanks [$author_name][]!"
61 | fi
62 | echo
63 | done
64 | unset IFS
65 | }
66 |
67 | append_log ()
68 | {
69 | git --no-pager log --decorate=short --color=never $1 >> "$log"
70 | }
71 |
72 | append_log $range
73 | echo "Released $(head -3 "$log" | grep "^Date: " | cut -c 9-)"
74 | summarize_repo_changes frida $from $to internal
75 |
76 | for module in frida-gum frida-core frida-python frida-node frida-qml frida-clr; do
77 | mod_range=${from}..${to}
78 | pushd subprojects/$module > /dev/null
79 | append_log $mod_range
80 | summarize_repo_changes $module $from $to internal
81 | if [ $module == frida-gum ]; then
82 | git diff $mod_range -- bindings/gumjs/generate-runtime.py > "$intdir/bridge-changes"
83 | for bridge in $(grep "^-" "$intdir/bridge-changes" | grep -- '-bridge": "' | cut -d '"' -f 2); do
84 | bridge_from=$(grep "^-" "$intdir/bridge-changes" | grep '"'$bridge'": "' | cut -d '"' -f 4)
85 | bridge_to=$(grep "^+" "$intdir/bridge-changes" | grep '"'$bridge'": "' | cut -d '"' -f 4)
86 | pushd ~/src/$bridge > /dev/null
87 | summarize_repo_changes $bridge v$bridge_from v$bridge_to internal
88 | popd > /dev/null
89 | done
90 | fi
91 | popd > /dev/null
92 | done
93 |
94 | git --no-pager diff $range releng/deps.toml > "$scratch"
95 | bumped_deps=$(grep "_version = " "$scratch" \
96 | | grep "^-" \
97 | | cut -c 2- \
98 | | awk '{ print $1 }' \
99 | | grep -Ev "frida_(deps|bootstrap)_version" \
100 | | sed -e 's,_version$,,')
101 | for id in $bumped_deps; do
102 | case $id in
103 | bison|depot_tools|flex|gn|v8_api)
104 | continue
105 | ;;
106 | esac
107 | repo=$(echo $id | tr -s "_" "-")
108 | dep_from=$(grep "${id}_version = " "$scratch" | grep "^-" | awk '{ print $3 }')
109 | dep_to=$(grep "${id}_version = " "$scratch" | grep "^+" | awk '{ print $3 }')
110 | if [ ! -d deps/$repo ]; then
111 | make -f Makefile.sdk.mk deps/.${repo}-stamp
112 | fi
113 | pushd deps/$repo > /dev/null
114 | summarize_repo_changes $repo $dep_from $dep_to external
115 | popd > /dev/null
116 | done
117 |
118 | echo
119 | echo "# Summary"
120 |
121 | releases=$(grep "(tag: " "$log" | wc -l)
122 | grep "^Author: " "$log" | cut -d : -f 2- | cut -c 2- | grep -v dependabot | sort -u > "$authors"
123 | echo
124 | echo "- Releases: $releases"
125 | echo "- Commits: $(grep "^commit " "$log" | wc -l)"
126 | echo "- Contributors: $(wc -l "$authors" | awk '{ print $1 }')"
127 | echo
128 | cat "$authors" | awk '{ print "> " $0 }'
129 |
--------------------------------------------------------------------------------
/meson_make.py:
--------------------------------------------------------------------------------
1 | import argparse
2 | import os
3 | from pathlib import Path
4 | import pickle
5 | import shlex
6 | import shutil
7 | import sys
8 | from typing import Callable, Dict, List
9 |
10 | from . import env
11 | from .meson_configure import configure
12 |
13 |
14 | STANDARD_TARGET_NAMES = ["all", "clean", "distclean", "install", "test"]
15 |
16 |
17 | def main():
18 | default_sourcedir = Path(sys.argv.pop(1)).resolve()
19 | sourcedir = Path(os.environ.get("MESON_SOURCE_ROOT", default_sourcedir)).resolve()
20 |
21 | default_builddir = Path(sys.argv.pop(1)).resolve()
22 | builddir = Path(os.environ.get("MESON_BUILD_ROOT", default_builddir)).resolve()
23 |
24 | parser = argparse.ArgumentParser(prog="make")
25 | parser.add_argument("targets",
26 | help="Targets to build, e.g.: " + ", ".join(STANDARD_TARGET_NAMES),
27 | nargs="*",
28 | default="all")
29 | options = parser.parse_args()
30 |
31 | targets = options.targets
32 | if isinstance(targets, str):
33 | targets = [targets]
34 |
35 | try:
36 | make(sourcedir, builddir, targets)
37 | except Exception as e:
38 | print(e, file=sys.stderr)
39 | sys.exit(1)
40 |
41 |
42 | def make(sourcedir: Path,
43 | builddir: Path,
44 | targets: List[str],
45 | environ: Dict[str, str] = os.environ,
46 | call_meson: Callable = env.call_meson):
47 | if not (builddir / "build.ninja").exists():
48 | configure(sourcedir, builddir, environ=environ)
49 |
50 | compile_options = []
51 | if environ.get("V") == "1":
52 | compile_options += ["-v"]
53 |
54 | test_options = shlex.split(environ.get("FRIDA_TEST_OPTIONS", "-v"))
55 |
56 | standard_targets = {
57 | "all": ["compile"] + compile_options,
58 | "clean": ["compile", "--clean"] + compile_options,
59 | "distclean": lambda: distclean(sourcedir, builddir),
60 | "install": ["install"],
61 | "test": ["test"] + test_options,
62 | }
63 |
64 | env_state = pickle.loads((builddir / "frida-env.dat").read_bytes())
65 |
66 | machine_config = env_state["host"]
67 | if machine_config is None:
68 | machine_config = env_state["build"]
69 | meson_env = machine_config.make_merged_environment(environ)
70 | meson_env["FRIDA_ALLOWED_PREBUILDS"] = ",".join(env_state["allowed_prebuilds"])
71 | meson_env["FRIDA_DEPS"] = str(env_state["deps"])
72 |
73 | def do_meson_command(args):
74 | call_meson(args,
75 | use_submodule=env_state["meson"] == "internal",
76 | cwd=builddir,
77 | env=meson_env,
78 | check=True)
79 |
80 | pending_targets = targets.copy()
81 | pending_compile = None
82 |
83 | while pending_targets:
84 | target = pending_targets.pop(0)
85 |
86 | action = standard_targets.get(target, None)
87 | if action is None:
88 | meson_command = "compile"
89 | elif not callable(action):
90 | meson_command = action[0]
91 | else:
92 | meson_command = None
93 |
94 | if meson_command == "compile":
95 | if pending_compile is None:
96 | pending_compile = ["compile"]
97 | if action is not None:
98 | pending_compile += action[1:]
99 | else:
100 | pending_compile += [target]
101 | continue
102 |
103 | if pending_compile is not None:
104 | do_meson_command(pending_compile)
105 | pending_compile = None
106 |
107 | if meson_command is not None:
108 | do_meson_command(action)
109 | else:
110 | action()
111 |
112 | if pending_compile is not None:
113 | do_meson_command(pending_compile)
114 |
115 |
116 | def distclean(sourcedir: Path, builddir: Path):
117 | items_to_delete = []
118 |
119 | if not builddir.is_relative_to(sourcedir):
120 | items_to_delete += list(builddir.iterdir())
121 |
122 | items_to_delete += [
123 | sourcedir / "build",
124 | sourcedir / "deps",
125 | ]
126 |
127 | for item in items_to_delete:
128 | try:
129 | shutil.rmtree(item)
130 | except:
131 | pass
132 |
--------------------------------------------------------------------------------
/mkdevkit.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import argparse
4 | from enum import Enum
5 | import hashlib
6 | from pathlib import Path
7 | import subprocess
8 | import sys
9 | import textwrap
10 | from typing import Dict, List, Optional
11 |
12 | REPO_ROOT = Path(__file__).resolve().parent.parent
13 | sys.path.insert(0, str(REPO_ROOT))
14 | from releng import devkit, env, machine_spec
15 |
16 | DepSymbolScope = devkit.DepSymbolScope
17 |
18 |
19 | def main():
20 | raw_args: List[str] = []
21 | ool_optvals: Dict[str, List[str]] = {}
22 | pending_raw_args = sys.argv[1:]
23 | while len(pending_raw_args) > 0:
24 | cur = pending_raw_args.pop(0)
25 | if cur == ">>>":
26 | ool_hash = hashlib.sha256()
27 | ool_strv = []
28 | while True:
29 | cur = pending_raw_args.pop(0)
30 | if cur == "<<<":
31 | break
32 | ool_hash.update(cur.encode("utf-8"))
33 | ool_strv.append(cur)
34 | val_id = "ool:" + ool_hash.hexdigest()
35 | ool_optvals[val_id] = ool_strv
36 | raw_args.append(val_id)
37 | else:
38 | raw_args.append(cur)
39 |
40 | parser = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter)
41 | parser.add_argument("kit")
42 | parser.add_argument("machine",
43 | type=machine_spec.MachineSpec.parse)
44 | parser.add_argument("outdir",
45 | type=Path)
46 | parser.add_argument("-t", "--thin",
47 | help="build without cross-arch support",
48 | action="store_const",
49 | dest="flavor",
50 | const="_thin",
51 | default="")
52 | parser.add_argument("--dep-symbol-scope",
53 | metavar="S",
54 | type=DepSymbolScope,
55 | choices=list(DepSymbolScope),
56 | default=DepSymbolScope.PREFIXED.value,
57 | help=textwrap.dedent("""\
58 | how to scope symbols from third-party archives:
59 | - prefixed: add the '_frida_' prefix
60 | - original: keep symbols exactly as upstream
61 | """))
62 | parser.add_argument("--cc",
63 | help="C compiler to use",
64 | type=lambda v: parse_array_option_value(v, ool_optvals))
65 | machine_options = dict.fromkeys(["c_args", "lib", "libtool", "ar", "nm", "objcopy", "pkg_config", "pkg_config_path"])
66 | for name in machine_options.keys():
67 | pretty_name = name.replace("_", "-")
68 | parser.add_argument("--" + pretty_name,
69 | help=f"the {pretty_name} to use",
70 | type=lambda v: parse_array_option_value(v, ool_optvals))
71 |
72 | options = parser.parse_args(raw_args)
73 |
74 | kit = options.kit
75 | machine = options.machine
76 | outdir = options.outdir.resolve()
77 | flavor = options.flavor
78 |
79 | cc = options.cc
80 | if cc is not None:
81 | meson_config = {"c": cc}
82 | for k, v in vars(options).items():
83 | if k in machine_options and v is not None:
84 | name = "pkg-config" if k == "pkg_config" else k
85 | meson_config[name] = v
86 | else:
87 | build_dir = REPO_ROOT / "build"
88 |
89 | if flavor == "":
90 | fat_machine_file = env.query_machine_file_path(machine, flavor, build_dir)
91 | if not fat_machine_file.exists() \
92 | and env.query_machine_file_path(machine, "_thin", build_dir).exists():
93 | flavor = "_thin"
94 |
95 | meson_config = env.load_meson_config(machine, flavor, build_dir)
96 | assert meson_config is not None
97 |
98 | try:
99 | app = devkit.CompilerApplication(kit, machine, meson_config, outdir, options.dep_symbol_scope)
100 | app.run()
101 | except subprocess.CalledProcessError as e:
102 | print(e, file=sys.stderr)
103 | if e.output:
104 | print("Stdout:", e.output, file=sys.stderr)
105 | if e.stderr:
106 | print("Stderr:", e.stderr, file=sys.stderr)
107 | sys.exit(1)
108 |
109 |
110 | def parse_array_option_value(val: str, ool_optvals: Dict[str, List[str]]) -> Optional[List[str]]:
111 | if val == "":
112 | return None
113 | if val.startswith("ool:"):
114 | ool_val = ool_optvals.get(val)
115 | if ool_val is not None:
116 | return ool_val
117 | return [val]
118 |
119 |
120 | if __name__ == "__main__":
121 | main()
122 |
--------------------------------------------------------------------------------
/env_android.py:
--------------------------------------------------------------------------------
1 | from configparser import ConfigParser
2 | from pathlib import Path
3 | import shlex
4 | from typing import Callable, Dict, List, Optional
5 |
6 | from .machine_file import strv_to_meson
7 | from .machine_spec import MachineSpec
8 |
9 |
10 | def init_machine_config(machine: MachineSpec,
11 | build_machine: MachineSpec,
12 | is_cross_build: bool,
13 | environ: Dict[str, str],
14 | toolchain_prefix: Optional[Path],
15 | sdk_prefix: Optional[Path],
16 | call_selected_meson: Callable,
17 | config: ConfigParser,
18 | outpath: List[str],
19 | outenv: Dict[str, str],
20 | outdir: Path):
21 | ndk_found = False
22 | try:
23 | ndk_root = Path(environ["ANDROID_NDK_ROOT"])
24 | if ndk_root.is_absolute():
25 | ndk_props_file = ndk_root / "source.properties"
26 | ndk_found = ndk_props_file.exists()
27 | except:
28 | pass
29 | if not ndk_found:
30 | raise NdkNotFoundError(f"ANDROID_NDK_ROOT must be set to the location of your r{NDK_REQUIRED} NDK")
31 |
32 | if sdk_prefix is not None:
33 | props = ConfigParser()
34 | raw_props = ndk_props_file.read_text(encoding="utf-8")
35 | props.read_string("[source]\n" + raw_props)
36 | rev = props["source"]["Pkg.Revision"]
37 | tokens = rev.split(".")
38 | major_version = int(tokens[0])
39 | if major_version != NDK_REQUIRED:
40 | raise NdkVersionError(f"NDK r{NDK_REQUIRED} is required (found r{major_version}, which is unsupported)")
41 |
42 | android_build_os = "darwin" if build_machine.os == "macos" else build_machine.os
43 | android_build_arch = "x86_64" if build_machine.os in {"macos", "linux"} else build_machine.arch
44 | android_api = 19 if machine.arch in {"x86", "arm"} else 21
45 |
46 | llvm_bindir = ndk_root / "toolchains" / "llvm" / "prebuilt" / f"{android_build_os}-{android_build_arch}" / "bin"
47 |
48 | binaries = config["binaries"]
49 | for (identifier, tool_name, *rest) in NDK_BINARIES:
50 | path = llvm_bindir / f"{tool_name}{build_machine.executable_suffix}"
51 |
52 | argv = [str(path)]
53 | if len(rest) != 0:
54 | argv += rest[0]
55 |
56 | raw_val = strv_to_meson(argv)
57 | if identifier in {"c", "cpp"}:
58 | raw_val += " + common_flags"
59 |
60 | binaries[identifier] = raw_val
61 |
62 | common_flags = [
63 | "-target", f"{machine.cpu}-none-linux-android{android_api}",
64 | ]
65 | c_like_flags = [
66 | "-DANDROID",
67 | "-ffunction-sections",
68 | "-fdata-sections",
69 | ]
70 | cxx_like_flags = []
71 | cxx_link_flags = [
72 | "-static-libstdc++",
73 | ]
74 | linker_flags = [
75 | "-Wl,-z,relro",
76 | "-Wl,-z,noexecstack",
77 | "-Wl,--gc-sections",
78 | ]
79 |
80 | read_envflags = lambda name: shlex.split(environ.get(name, ""))
81 |
82 | common_flags += ARCH_COMMON_FLAGS.get(machine.arch, [])
83 | c_like_flags += ARCH_C_LIKE_FLAGS.get(machine.arch, [])
84 | c_like_flags += read_envflags("CPPFLAGS")
85 | linker_flags += ARCH_LINKER_FLAGS.get(machine.arch, [])
86 | linker_flags += read_envflags("LDFLAGS")
87 |
88 | if android_api < 24:
89 | cxx_like_flags += ["-D_LIBCPP_HAS_NO_OFF_T_FUNCTIONS"]
90 |
91 | constants = config["constants"]
92 | constants["common_flags"] = strv_to_meson(common_flags)
93 | constants["c_like_flags"] = strv_to_meson(c_like_flags)
94 | constants["linker_flags"] = strv_to_meson(linker_flags)
95 | constants["cxx_like_flags"] = strv_to_meson(cxx_like_flags)
96 | constants["cxx_link_flags"] = strv_to_meson(cxx_link_flags)
97 |
98 | options = config["built-in options"]
99 | options["c_args"] = "c_like_flags + " + strv_to_meson(read_envflags("CFLAGS"))
100 | options["cpp_args"] = "c_like_flags + cxx_like_flags + " + strv_to_meson(read_envflags("CXXFLAGS"))
101 | options["c_link_args"] = "linker_flags"
102 | options["cpp_link_args"] = "linker_flags + cxx_link_flags"
103 | options["b_lundef"] = "true"
104 |
105 |
106 | class NdkNotFoundError(Exception):
107 | pass
108 |
109 |
110 | class NdkVersionError(Exception):
111 | pass
112 |
113 |
114 | NDK_REQUIRED = 25
115 |
116 | NDK_BINARIES = [
117 | ("c", "clang"),
118 | ("cpp", "clang++"),
119 | ("ar", "llvm-ar"),
120 | ("nm", "llvm-nm"),
121 | ("ranlib", "llvm-ranlib"),
122 | ("strip", "llvm-strip", ["--strip-all"]),
123 | ("readelf", "llvm-readelf"),
124 | ("objcopy", "llvm-objcopy"),
125 | ("objdump", "llvm-objdump"),
126 | ]
127 |
128 | ARCH_COMMON_FLAGS = {
129 | "x86": [
130 | "-march=pentium4",
131 | ],
132 | "arm": [
133 | "-march=armv7-a",
134 | "-mfloat-abi=softfp",
135 | "-mfpu=vfpv3-d16",
136 | "-mthumb",
137 | ]
138 | }
139 |
140 | ARCH_C_LIKE_FLAGS = {
141 | "x86": [
142 | "-mfpmath=sse",
143 | "-mstackrealign",
144 | ]
145 | }
146 |
147 | ARCH_LINKER_FLAGS = {
148 | "arm": [
149 | "-Wl,--fix-cortex-a8",
150 | ]
151 | }
152 |
--------------------------------------------------------------------------------
/devkit-assets/frida-core-example-unix.c:
--------------------------------------------------------------------------------
1 | #include "frida-core.h"
2 |
3 | #include
4 | #include
5 |
6 | static void on_detached (FridaSession * session, FridaSessionDetachReason reason, FridaCrash * crash, gpointer user_data);
7 | static void on_message (FridaScript * script, const gchar * message, GBytes * data, gpointer user_data);
8 | static void on_signal (int signo);
9 | static gboolean stop (gpointer user_data);
10 |
11 | static GMainLoop * loop = NULL;
12 |
13 | int
14 | main (int argc,
15 | char * argv[])
16 | {
17 | guint target_pid;
18 | FridaDeviceManager * manager;
19 | GError * error = NULL;
20 | FridaDeviceList * devices;
21 | gint num_devices, i;
22 | FridaDevice * local_device;
23 | FridaSession * session;
24 |
25 | frida_init ();
26 |
27 | if (argc != 2 || (target_pid = atoi (argv[1])) == 0)
28 | {
29 | g_printerr ("Usage: %s \n", argv[0]);
30 | return 1;
31 | }
32 |
33 | loop = g_main_loop_new (NULL, TRUE);
34 |
35 | signal (SIGINT, on_signal);
36 | signal (SIGTERM, on_signal);
37 |
38 | manager = frida_device_manager_new ();
39 |
40 | devices = frida_device_manager_enumerate_devices_sync (manager, NULL, &error);
41 | g_assert (error == NULL);
42 |
43 | local_device = NULL;
44 | num_devices = frida_device_list_size (devices);
45 | for (i = 0; i != num_devices; i++)
46 | {
47 | FridaDevice * device = frida_device_list_get (devices, i);
48 |
49 | g_print ("[*] Found device: \"%s\"\n", frida_device_get_name (device));
50 |
51 | if (frida_device_get_dtype (device) == FRIDA_DEVICE_TYPE_LOCAL)
52 | local_device = g_object_ref (device);
53 |
54 | g_object_unref (device);
55 | }
56 | g_assert (local_device != NULL);
57 |
58 | frida_unref (devices);
59 | devices = NULL;
60 |
61 | session = frida_device_attach_sync (local_device, target_pid, NULL, NULL, &error);
62 | if (error == NULL)
63 | {
64 | FridaScript * script;
65 | FridaScriptOptions * options;
66 |
67 | g_signal_connect (session, "detached", G_CALLBACK (on_detached), NULL);
68 | if (frida_session_is_detached (session))
69 | goto session_detached_prematurely;
70 |
71 | g_print ("[*] Attached\n");
72 |
73 | options = frida_script_options_new ();
74 | frida_script_options_set_name (options, "example");
75 | frida_script_options_set_runtime (options, FRIDA_SCRIPT_RUNTIME_QJS);
76 |
77 | script = frida_session_create_script_sync (session,
78 | "Interceptor.attach(Module.getExportByName(null, 'open'), {\n"
79 | " onEnter(args) {\n"
80 | " console.log(`[*] open(\"${args[0].readUtf8String()}\")`);\n"
81 | " }\n"
82 | "});\n"
83 | "Interceptor.attach(Module.getExportByName(null, 'close'), {\n"
84 | " onEnter(args) {\n"
85 | " console.log(`[*] close(${args[0].toInt32()})`);\n"
86 | " }\n"
87 | "});",
88 | options, NULL, &error);
89 | g_assert (error == NULL);
90 |
91 | g_clear_object (&options);
92 |
93 | g_signal_connect (script, "message", G_CALLBACK (on_message), NULL);
94 |
95 | frida_script_load_sync (script, NULL, &error);
96 | g_assert (error == NULL);
97 |
98 | g_print ("[*] Script loaded\n");
99 |
100 | if (g_main_loop_is_running (loop))
101 | g_main_loop_run (loop);
102 |
103 | g_print ("[*] Stopped\n");
104 |
105 | frida_script_unload_sync (script, NULL, NULL);
106 | frida_unref (script);
107 | g_print ("[*] Unloaded\n");
108 |
109 | frida_session_detach_sync (session, NULL, NULL);
110 | session_detached_prematurely:
111 | frida_unref (session);
112 | g_print ("[*] Detached\n");
113 | }
114 | else
115 | {
116 | g_printerr ("Failed to attach: %s\n", error->message);
117 | g_error_free (error);
118 | }
119 |
120 | frida_unref (local_device);
121 |
122 | frida_device_manager_close_sync (manager, NULL, NULL);
123 | frida_unref (manager);
124 | g_print ("[*] Closed\n");
125 |
126 | g_main_loop_unref (loop);
127 |
128 | return 0;
129 | }
130 |
131 | static void
132 | on_detached (FridaSession * session,
133 | FridaSessionDetachReason reason,
134 | FridaCrash * crash,
135 | gpointer user_data)
136 | {
137 | gchar * reason_str;
138 |
139 | reason_str = g_enum_to_string (FRIDA_TYPE_SESSION_DETACH_REASON, reason);
140 | g_print ("on_detached: reason=%s crash=%p\n", reason_str, crash);
141 | g_free (reason_str);
142 |
143 | g_idle_add (stop, NULL);
144 | }
145 |
146 | static void
147 | on_message (FridaScript * script,
148 | const gchar * message,
149 | GBytes * data,
150 | gpointer user_data)
151 | {
152 | JsonParser * parser;
153 | JsonObject * root;
154 | const gchar * type;
155 |
156 | parser = json_parser_new ();
157 | json_parser_load_from_data (parser, message, -1, NULL);
158 | root = json_node_get_object (json_parser_get_root (parser));
159 |
160 | type = json_object_get_string_member (root, "type");
161 | if (strcmp (type, "log") == 0)
162 | {
163 | const gchar * log_message;
164 |
165 | log_message = json_object_get_string_member (root, "payload");
166 | g_print ("%s\n", log_message);
167 | }
168 | else
169 | {
170 | g_print ("on_message: %s\n", message);
171 | }
172 |
173 | g_object_unref (parser);
174 | }
175 |
176 | static void
177 | on_signal (int signo)
178 | {
179 | g_idle_add (stop, NULL);
180 | }
181 |
182 | static gboolean
183 | stop (gpointer user_data)
184 | {
185 | g_main_loop_quit (loop);
186 |
187 | return FALSE;
188 | }
189 |
--------------------------------------------------------------------------------
/winenv.py:
--------------------------------------------------------------------------------
1 | import json
2 | from operator import attrgetter
3 | import os
4 | from pathlib import Path
5 | import platform
6 | import subprocess
7 | from typing import List, Optional, Tuple
8 | if platform.system() == "Windows":
9 | import winreg
10 |
11 | from .machine_spec import MachineSpec
12 |
13 |
14 | cached_msvs_dir = None
15 | cached_msvc_dir = None
16 | cached_winsdk = None
17 |
18 |
19 | def detect_msvs_installation_dir(toolchain_prefix: Optional[Path]) -> Path:
20 | global cached_msvs_dir
21 | if cached_msvs_dir is None:
22 | vswhere = Path(os.environ.get("ProgramFiles(x86)", os.environ["ProgramFiles"])) \
23 | / "Microsoft Visual Studio" / "Installer" / "vswhere.exe"
24 | if not vswhere.exists():
25 | if toolchain_prefix is None:
26 | raise MissingDependencyError("unable to locate vswhere.exe")
27 | vswhere = toolchain_prefix / "bin" / "vswhere.exe"
28 | installations = json.loads(
29 | subprocess.run([
30 | vswhere,
31 | "-latest",
32 | "-format", "json",
33 | "-property", "installationPath"
34 | ],
35 | capture_output=True,
36 | encoding="utf-8",
37 | check=True).stdout
38 | )
39 | if len(installations) == 0:
40 | raise MissingDependencyError("Visual Studio is not installed")
41 | cached_msvs_dir = Path(installations[0]["installationPath"])
42 | return cached_msvs_dir
43 |
44 |
45 | def detect_msvc_tool_dir(toolchain_prefix: Optional[Path]) -> Path:
46 | global cached_msvc_dir
47 | if cached_msvc_dir is None:
48 | msvs_dir = detect_msvs_installation_dir(toolchain_prefix)
49 | version = sorted((msvs_dir / "VC" / "Tools" / "MSVC").glob("*.*.*"),
50 | key=attrgetter("name"),
51 | reverse=True)[0].name
52 | cached_msvc_dir = msvs_dir / "VC" / "Tools" / "MSVC" / version
53 | return cached_msvc_dir
54 |
55 |
56 | def detect_windows_sdk() -> Tuple[Path, str]:
57 | global cached_winsdk
58 | if cached_winsdk is None:
59 | try:
60 | key = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, r"SOFTWARE\Microsoft\Windows Kits\Installed Roots")
61 | try:
62 | (install_dir, _) = winreg.QueryValueEx(key, "KitsRoot10")
63 | install_dir = Path(install_dir)
64 | version = sorted((install_dir / "Include").glob("*.*.*"),
65 | key=attrgetter("name"),
66 | reverse=True)[0].name
67 | cached_winsdk = (install_dir, version)
68 | finally:
69 | winreg.CloseKey(key)
70 | except Exception as e:
71 | raise MissingDependencyError("Windows 10 SDK is not installed")
72 | return cached_winsdk
73 |
74 |
75 | def detect_msvs_tool_path(machine: MachineSpec,
76 | build_machine: MachineSpec,
77 | tool: str,
78 | toolchain_prefix: Optional[Path]) -> Path:
79 | return detect_msvc_tool_dir(toolchain_prefix) / "bin" / f"Host{build_machine.msvc_platform}" \
80 | / machine.msvc_platform / tool
81 |
82 |
83 | def detect_msvs_runtime_path(machine: MachineSpec,
84 | build_machine: MachineSpec,
85 | toolchain_prefix: Optional[Path]) -> List[Path]:
86 | msvc_platform = machine.msvc_platform
87 | native_msvc_platform = build_machine.msvc_platform
88 |
89 | msvc_dir = detect_msvc_tool_dir(toolchain_prefix)
90 | msvc_bindir = msvc_dir / "bin" / f"Host{native_msvc_platform}" / msvc_platform
91 |
92 | msvc_dll_dirs = []
93 | if msvc_platform != native_msvc_platform:
94 | msvc_dll_dirs.append(msvc_dir / "bin" / f"Host{native_msvc_platform}" / native_msvc_platform)
95 |
96 | (winsdk_dir, winsdk_version) = detect_windows_sdk()
97 | winsdk_bindir = winsdk_dir / "Bin" / winsdk_version / msvc_platform
98 |
99 | return [winsdk_bindir, msvc_bindir] + msvc_dll_dirs
100 |
101 |
102 | def detect_msvs_include_path(toolchain_prefix: Optional[Path]) -> List[Path]:
103 | msvc_dir = detect_msvc_tool_dir(toolchain_prefix)
104 | vc_dir = detect_msvs_installation_dir(toolchain_prefix) / "VC"
105 |
106 | (winsdk_dir, winsdk_version) = detect_windows_sdk()
107 | winsdk_inc_dirs = [
108 | winsdk_dir / "Include" / winsdk_version / "um",
109 | winsdk_dir / "Include" / winsdk_version / "shared",
110 | ]
111 |
112 | return [
113 | msvc_dir / "include",
114 | msvc_dir / "atlmfc" / "include",
115 | vc_dir / "Auxiliary" / "VS" / "include",
116 | winsdk_dir / "Include" / winsdk_version / "ucrt",
117 | ] + winsdk_inc_dirs
118 |
119 |
120 | def detect_msvs_library_path(machine: MachineSpec,
121 | toolchain_prefix: Optional[Path]) -> List[Path]:
122 | msvc_platform = machine.msvc_platform
123 |
124 | msvc_dir = detect_msvc_tool_dir(toolchain_prefix)
125 | vc_dir = detect_msvs_installation_dir(toolchain_prefix) / "VC"
126 |
127 | (winsdk_dir, winsdk_version) = detect_windows_sdk()
128 | winsdk_lib_dir = winsdk_dir / "Lib" / winsdk_version / "um" / msvc_platform
129 |
130 | return [
131 | msvc_dir / "lib" / msvc_platform,
132 | msvc_dir / "atlmfc" / "lib" / msvc_platform,
133 | vc_dir / "Auxiliary" / "VS" / "lib" / msvc_platform,
134 | winsdk_dir / "Lib" / winsdk_version / "ucrt" / msvc_platform,
135 | winsdk_lib_dir,
136 | ]
137 |
138 |
139 | class MissingDependencyError(Exception):
140 | pass
141 |
--------------------------------------------------------------------------------
/devkit-assets/frida-core-example-windows.c:
--------------------------------------------------------------------------------
1 | /*
2 | * To build, set up your Release configuration like this:
3 | *
4 | * [Runtime Library]
5 | * Multi-threaded (/MT)
6 | *
7 | * Visit https://frida.re to learn more about Frida.
8 | */
9 |
10 | #include "frida-core.h"
11 |
12 | #include
13 | #include
14 |
15 | static void on_detached (FridaSession * session, FridaSessionDetachReason reason, FridaCrash * crash, gpointer user_data);
16 | static void on_message (FridaScript * script, const gchar * message, GBytes * data, gpointer user_data);
17 | static void on_signal (int signo);
18 | static gboolean stop (gpointer user_data);
19 |
20 | static GMainLoop * loop = NULL;
21 |
22 | int
23 | main (int argc,
24 | char * argv[])
25 | {
26 | guint target_pid;
27 | FridaDeviceManager * manager;
28 | GError * error = NULL;
29 | FridaDeviceList * devices;
30 | gint num_devices, i;
31 | FridaDevice * local_device;
32 | FridaSession * session;
33 |
34 | frida_init ();
35 |
36 | if (argc != 2 || (target_pid = atoi (argv[1])) == 0)
37 | {
38 | g_printerr ("Usage: %s \n", argv[0]);
39 | return 1;
40 | }
41 |
42 | loop = g_main_loop_new (NULL, TRUE);
43 |
44 | signal (SIGINT, on_signal);
45 | signal (SIGTERM, on_signal);
46 |
47 | manager = frida_device_manager_new ();
48 |
49 | devices = frida_device_manager_enumerate_devices_sync (manager, NULL, &error);
50 | g_assert (error == NULL);
51 |
52 | local_device = NULL;
53 | num_devices = frida_device_list_size (devices);
54 | for (i = 0; i != num_devices; i++)
55 | {
56 | FridaDevice * device = frida_device_list_get (devices, i);
57 |
58 | g_print ("[*] Found device: \"%s\"\n", frida_device_get_name (device));
59 |
60 | if (frida_device_get_dtype (device) == FRIDA_DEVICE_TYPE_LOCAL)
61 | local_device = g_object_ref (device);
62 |
63 | g_object_unref (device);
64 | }
65 | g_assert (local_device != NULL);
66 |
67 | frida_unref (devices);
68 | devices = NULL;
69 |
70 | session = frida_device_attach_sync (local_device, target_pid, NULL, NULL, &error);
71 | if (error == NULL)
72 | {
73 | FridaScript * script;
74 | FridaScriptOptions * options;
75 |
76 | g_signal_connect (session, "detached", G_CALLBACK (on_detached), NULL);
77 | if (frida_session_is_detached (session))
78 | goto session_detached_prematurely;
79 |
80 | g_print ("[*] Attached\n");
81 |
82 | options = frida_script_options_new ();
83 | frida_script_options_set_name (options, "example");
84 | frida_script_options_set_runtime (options, FRIDA_SCRIPT_RUNTIME_QJS);
85 |
86 | script = frida_session_create_script_sync (session,
87 | "Interceptor.attach(Module.getExportByName('kernel32.dll', 'CreateFileW'), {\n"
88 | " onEnter(args) {\n"
89 | " console.log(`[*] CreateFileW(\"${args[0].readUtf16String()}\")`);\n"
90 | " }\n"
91 | "});\n"
92 | "Interceptor.attach(Module.getExportByName('kernel32.dll', 'CloseHandle'), {\n"
93 | " onEnter(args) {\n"
94 | " console.log(`[*] CloseHandle(${args[0]})`);\n"
95 | " }\n"
96 | "});",
97 | options, NULL, &error);
98 | g_assert (error == NULL);
99 |
100 | g_clear_object (&options);
101 |
102 | g_signal_connect (script, "message", G_CALLBACK (on_message), NULL);
103 |
104 | frida_script_load_sync (script, NULL, &error);
105 | g_assert (error == NULL);
106 |
107 | g_print ("[*] Script loaded\n");
108 |
109 | if (g_main_loop_is_running (loop))
110 | g_main_loop_run (loop);
111 |
112 | g_print ("[*] Stopped\n");
113 |
114 | frida_script_unload_sync (script, NULL, NULL);
115 | frida_unref (script);
116 | g_print ("[*] Unloaded\n");
117 |
118 | frida_session_detach_sync (session, NULL, NULL);
119 | session_detached_prematurely:
120 | frida_unref (session);
121 | g_print ("[*] Detached\n");
122 | }
123 | else
124 | {
125 | g_printerr ("Failed to attach: %s\n", error->message);
126 | g_error_free (error);
127 | }
128 |
129 | frida_unref (local_device);
130 |
131 | frida_device_manager_close_sync (manager, NULL, NULL);
132 | frida_unref (manager);
133 | g_print ("[*] Closed\n");
134 |
135 | g_main_loop_unref (loop);
136 |
137 | return 0;
138 | }
139 |
140 | static void
141 | on_detached (FridaSession * session,
142 | FridaSessionDetachReason reason,
143 | FridaCrash * crash,
144 | gpointer user_data)
145 | {
146 | gchar * reason_str;
147 |
148 | reason_str = g_enum_to_string (FRIDA_TYPE_SESSION_DETACH_REASON, reason);
149 | g_print ("on_detached: reason=%s crash=%p\n", reason_str, crash);
150 | g_free (reason_str);
151 |
152 | g_idle_add (stop, NULL);
153 | }
154 |
155 | static void
156 | on_message (FridaScript * script,
157 | const gchar * message,
158 | GBytes * data,
159 | gpointer user_data)
160 | {
161 | JsonParser * parser;
162 | JsonObject * root;
163 | const gchar * type;
164 |
165 | parser = json_parser_new ();
166 | json_parser_load_from_data (parser, message, -1, NULL);
167 | root = json_node_get_object (json_parser_get_root (parser));
168 |
169 | type = json_object_get_string_member (root, "type");
170 | if (strcmp (type, "log") == 0)
171 | {
172 | const gchar * log_message;
173 |
174 | log_message = json_object_get_string_member (root, "payload");
175 | g_print ("%s\n", log_message);
176 | }
177 | else
178 | {
179 | g_print ("on_message: %s\n", message);
180 | }
181 |
182 | g_object_unref (parser);
183 | }
184 |
185 | static void
186 | on_signal (int signo)
187 | {
188 | g_idle_add (stop, NULL);
189 | }
190 |
191 | static gboolean
192 | stop (gpointer user_data)
193 | {
194 | g_main_loop_quit (loop);
195 |
196 | return FALSE;
197 | }
198 |
--------------------------------------------------------------------------------
/env_apple.py:
--------------------------------------------------------------------------------
1 | from configparser import ConfigParser
2 | from pathlib import Path
3 | import shlex
4 | import subprocess
5 | from typing import Callable, Dict, List, Optional
6 |
7 | from .machine_file import strv_to_meson
8 | from .machine_spec import MachineSpec
9 |
10 |
11 | def init_machine_config(machine: MachineSpec,
12 | build_machine: MachineSpec,
13 | is_cross_build: bool,
14 | environ: Dict[str, str],
15 | toolchain_prefix: Optional[Path],
16 | sdk_prefix: Optional[Path],
17 | call_selected_meson: Callable,
18 | config: ConfigParser,
19 | outpath: List[str],
20 | outenv: Dict[str, str],
21 | outdir: Path):
22 | xcenv = {**environ}
23 | if machine.arch == "arm64eoabi":
24 | try:
25 | xcenv["DEVELOPER_DIR"] = (Path(xcenv["XCODE11"]) / "Contents" / "Developer").as_posix()
26 | except KeyError:
27 | raise Xcode11NotFoundError("for arm64eoabi support, XCODE11 must be set to the location of your Xcode 11 app bundle")
28 |
29 | def xcrun(*args):
30 | try:
31 | return subprocess.run(["xcrun"] + list(args),
32 | env=xcenv,
33 | capture_output=True,
34 | encoding="utf-8",
35 | check=True).stdout.strip()
36 | except subprocess.CalledProcessError as e:
37 | raise XCRunError("\n\t| ".join(e.stderr.strip().split("\n")))
38 |
39 | clang_arch = APPLE_CLANG_ARCHS.get(machine.arch, machine.arch)
40 |
41 | os_minver = APPLE_MINIMUM_OS_VERSIONS.get(machine.os_dash_arch,
42 | APPLE_MINIMUM_OS_VERSIONS[machine.os])
43 |
44 | target = f"{clang_arch}-apple-{machine.os}{os_minver}"
45 | if machine.config is not None:
46 | target += "-" + machine.config
47 |
48 | sdk_name = APPLE_SDKS[machine.os_dash_config]
49 | sdk_path = xcrun("--sdk", sdk_name, "--show-sdk-path")
50 |
51 | use_static_libcxx = sdk_prefix is not None \
52 | and (sdk_prefix / "lib" / "c++" / "libc++.a").exists() \
53 | and machine.os != "watchos"
54 |
55 | binaries = config["binaries"]
56 | clang_path = None
57 | for (identifier, tool_name, *rest) in APPLE_BINARIES:
58 | if tool_name.startswith("#"):
59 | binaries[identifier] = binaries[tool_name[1:]]
60 | continue
61 |
62 | path = xcrun("--sdk", sdk_name, "-f", tool_name)
63 | if tool_name == "clang":
64 | clang_path = Path(path)
65 |
66 | argv = [path]
67 | if len(rest) != 0:
68 | argv += rest[0]
69 | if identifier == "cpp" and not use_static_libcxx:
70 | argv += ["-stdlib=libc++"]
71 | if identifier == "swift":
72 | argv += ["-target", target, "-sdk", sdk_path]
73 |
74 | raw_val = str(argv)
75 | if identifier in {"c", "cpp"}:
76 | raw_val += " + common_flags"
77 |
78 | binaries[identifier] = raw_val
79 |
80 | read_envflags = lambda name: shlex.split(environ.get(name, ""))
81 |
82 | c_like_flags = read_envflags("CPPFLAGS")
83 |
84 | linker_flags = ["-Wl,-dead_strip"]
85 | if (clang_path.parent / "ld-classic").exists():
86 | # New linker links with libresolv even if we're not using any symbols from it,
87 | # at least as of Xcode 15.0 beta 7.
88 | linker_flags += ["-Wl,-ld_classic"]
89 | linker_flags += read_envflags("LDFLAGS")
90 |
91 | constants = config["constants"]
92 | constants["common_flags"] = strv_to_meson([
93 | "-target", target,
94 | "-isysroot", sdk_path,
95 | ])
96 | constants["c_like_flags"] = strv_to_meson(c_like_flags)
97 | constants["linker_flags"] = strv_to_meson(linker_flags)
98 |
99 | if use_static_libcxx:
100 | constants["cxx_like_flags"] = strv_to_meson([
101 | "-nostdinc++",
102 | "-isystem" + str(sdk_prefix / "include" / "c++"),
103 | ])
104 | constants["cxx_link_flags"] = strv_to_meson([
105 | "-nostdlib++",
106 | "-L" + str(sdk_prefix / "lib" / "c++"),
107 | "-lc++",
108 | "-lc++abi",
109 | ])
110 | else:
111 | constants["cxx_like_flags"] = strv_to_meson([])
112 | constants["cxx_link_flags"] = strv_to_meson([])
113 |
114 | options = config["built-in options"]
115 | options["c_args"] = "c_like_flags + " + strv_to_meson(read_envflags("CFLAGS"))
116 | options["cpp_args"] = "c_like_flags + cxx_like_flags + " + strv_to_meson(read_envflags("CXXFLAGS"))
117 | options["objc_args"] = "c_like_flags + " + strv_to_meson(read_envflags("OBJCFLAGS"))
118 | options["objcpp_args"] = "c_like_flags + cxx_like_flags + " + strv_to_meson(read_envflags("OBJCXXFLAGS"))
119 | options["c_link_args"] = "linker_flags"
120 | options["cpp_link_args"] = "linker_flags + cxx_link_flags"
121 | options["objc_link_args"] = "linker_flags"
122 | options["objcpp_link_args"] = "linker_flags + cxx_link_flags"
123 | options["b_lundef"] = "true"
124 |
125 |
126 | class XCRunError(Exception):
127 | pass
128 |
129 |
130 | class Xcode11NotFoundError(Exception):
131 | pass
132 |
133 |
134 | APPLE_SDKS = {
135 | "macos": "macosx",
136 | "ios": "iphoneos",
137 | "ios-simulator": "iphonesimulator",
138 | "watchos": "watchos",
139 | "watchos-simulator": "watchsimulator",
140 | "tvos": "appletvos",
141 | "tvos-simulator": "appletvsimulator",
142 | "xros": "xros",
143 | }
144 |
145 | APPLE_CLANG_ARCHS = {
146 | "x86": "i386",
147 | "arm": "armv7",
148 | "arm64eoabi": "arm64e",
149 | }
150 |
151 | APPLE_MINIMUM_OS_VERSIONS = {
152 | "macos": "10.13",
153 | "macos-arm64": "11.0",
154 | "macos-arm64e": "11.0",
155 | "ios": "8.0",
156 | "watchos": "9.0",
157 | "tvos": "13.0",
158 | "xros": "26.0",
159 | }
160 |
161 | APPLE_BINARIES = [
162 | ("c", "clang"),
163 | ("cpp", "clang++"),
164 | ("objc", "#c"),
165 | ("objcpp", "#cpp"),
166 | ("swift", "swiftc"),
167 |
168 | ("ar", "ar"),
169 | ("nm", "llvm-nm"),
170 | ("ranlib", "ranlib"),
171 | ("strip", "strip", ["-Sx"]),
172 | ("libtool", "libtool"),
173 |
174 | ("install_name_tool", "install_name_tool"),
175 | ("otool", "otool"),
176 | ("codesign", "codesign"),
177 | ("lipo", "lipo"),
178 | ]
179 |
--------------------------------------------------------------------------------
/sync-from-upstream.py:
--------------------------------------------------------------------------------
1 | import os
2 | from pathlib import Path
3 | import re
4 | import subprocess
5 | import sys
6 |
7 |
8 | def make_gnome_url(repo_name):
9 | return "https://gitlab.gnome.org/GNOME/{}.git".format(repo_name)
10 |
11 |
12 | upstreams = {
13 | "meson": ("https://github.com/mesonbuild/meson.git", "master"),
14 | "termux-elf-cleaner": "https://github.com/termux/termux-elf-cleaner.git",
15 | "libiconv": "https://git.savannah.gnu.org/git/libiconv.git",
16 | "zlib": ("https://github.com/madler/zlib.git", "develop"),
17 | "brotli": "https://github.com/google/brotli.git",
18 | "minizip": "https://github.com/zlib-ng/minizip-ng.git",
19 | "libffi": "https://github.com/libffi/libffi.git",
20 | "libunwind": "https://github.com/libunwind/libunwind.git",
21 | "glib": make_gnome_url("glib"),
22 | "glib-networking": (make_gnome_url("glib-networking"), "master"),
23 | "libnice": "https://gitlab.freedesktop.org/libnice/libnice.git",
24 | "usrsctp": "https://github.com/sctplab/usrsctp.git",
25 | "libgee": make_gnome_url("libgee"),
26 | "json-glib": make_gnome_url("json-glib"),
27 | "libpsl": "https://github.com/rockdaboot/libpsl.git",
28 | "libxml2": make_gnome_url("libxml2"),
29 | "libsoup": make_gnome_url("libsoup"),
30 | "vala": make_gnome_url("vala"),
31 | "libdwarf": "https://github.com/davea42/libdwarf-code.git",
32 | "xz": "https://git.tukaani.org/xz.git",
33 | "pkg-config": "https://gitlab.freedesktop.org/pkg-config/pkg-config.git",
34 | "quickjs": ("https://github.com/bellard/quickjs.git", "master"),
35 | "gn": "https://gn.googlesource.com/gn",
36 | "v8": "https://chromium.googlesource.com/v8/v8",
37 | "capstone": ("https://github.com/capstone-engine/capstone.git", "v5"),
38 | "tinycc": "https://repo.or.cz/tinycc.git",
39 | }
40 |
41 |
42 | def sync(repo_path):
43 | repo_name = os.path.basename(repo_path)
44 |
45 | patches_path = os.path.join(str(Path.home()), ".frida-sync-" + re.sub(r"[^\w\d]", "-", repo_path.lower()).lstrip("-"))
46 | if os.path.exists(patches_path):
47 | patches = PendingPatches.load(patches_path)
48 |
49 | print("Applying {} pending patches".format(patches.count))
50 | else:
51 | entry = upstreams.get(repo_name, None)
52 | if entry is None:
53 | raise UnknownUpstreamError("Unknown upstream: {}".format(repo_name))
54 | if isinstance(entry, tuple):
55 | upstream_url, upstream_branch = entry
56 | else:
57 | upstream_url = entry
58 | upstream_branch = "main"
59 | upstream_target = "upstream/" + upstream_branch
60 |
61 | print("Synchronizing with {}".format(upstream_url))
62 |
63 | subprocess.run(["git", "checkout", "main"], cwd=repo_path, capture_output=True, check=True)
64 | subprocess.run(["git", "pull"], cwd=repo_path, capture_output=True, check=True)
65 | result = subprocess.run(["git", "status"], cwd=repo_path, capture_output=True, check=True, encoding='utf-8')
66 | if not "working tree clean" in result.stdout:
67 | raise WorkingTreeDirtyError("Working tree is dirty")
68 |
69 | subprocess.run(["git", "remote", "add", "upstream", upstream_url], cwd=repo_path, capture_output=True)
70 | subprocess.run(["git", "fetch", "upstream"], cwd=repo_path, check=True)
71 |
72 | patches, base = list_our_patches(repo_path)
73 | print("We have {} patches on top of upstream".format(patches.count))
74 |
75 | new_entries = list_upstream_changes(repo_path, upstream_target, base)
76 | if len(new_entries) == 0:
77 | print("Already up-to-date")
78 | return
79 |
80 | print("Upstream has {} new commits".format(len(new_entries)))
81 |
82 | print("Merging...")
83 | subprocess.run(["git", "merge", "-s", "ours", upstream_target], cwd=repo_path, capture_output=True, check=True)
84 | subprocess.run(["git", "checkout", "--detach", upstream_target], cwd=repo_path, capture_output=True, check=True)
85 | subprocess.run(["git", "reset", "--soft", "main"], cwd=repo_path, capture_output=True, check=True)
86 | subprocess.run(["git", "checkout", "main"], cwd=repo_path, capture_output=True, check=True)
87 | subprocess.run(["git", "commit", "--amend", "-C", "HEAD"], cwd=repo_path, capture_output=True, check=True)
88 |
89 | patches.save(patches_path)
90 |
91 | while True:
92 | index, cid, message = patches.try_pop()
93 | if index is None:
94 | break
95 |
96 | print("Cherry-picking {}/{}: {}".format(index + 1, patches.count, message))
97 | try:
98 | subprocess.run(["git", "cherry-pick", cid], cwd=repo_path, capture_output=True, encoding='utf-8', check=True)
99 | except subprocess.CalledProcessError as e:
100 | patches.save(patches_path)
101 |
102 | print("\n*** Unable to apply this patch:")
103 | print(e.stderr)
104 | print("Run `git cherry-pick --abort` and re-run script to skip it.")
105 |
106 | return
107 |
108 | os.remove(patches_path)
109 | print("Done!")
110 |
111 | def list_our_patches(repo_path):
112 | items = []
113 | base = None
114 | entries = list_recent_commits(repo_path, "--max-count=1000")
115 | for index, entry in enumerate(entries):
116 | cid, message = entry
117 | if message.startswith("Merge"):
118 | base = entries[index + 1][0]
119 | break
120 | items.append(("pending", cid, message))
121 | items.reverse()
122 | return (PendingPatches(items), base)
123 |
124 | def list_upstream_changes(repo_path, upstream_target, since):
125 | return list(reversed(list_recent_commits(repo_path, since + ".." + upstream_target)))
126 |
127 | def list_recent_commits(repo_path, *args):
128 | result = subprocess.run(["git", "log", "--pretty=oneline", "--abbrev-commit", "--topo-order"] + list(args),
129 | cwd=repo_path, capture_output=True, check=True, encoding='utf-8', errors='surrogateescape')
130 | return [line.split(" ", 1) for line in result.stdout.rstrip().split("\n")]
131 |
132 |
133 | class PendingPatches(object):
134 | def __init__(self, items):
135 | self._items = items
136 |
137 | offset = 0
138 | for status, cid, message in items:
139 | if status == "applied":
140 | offset += 1
141 | else:
142 | break
143 | self._offset = offset
144 |
145 | @property
146 | def count(self):
147 | return len(self._items)
148 |
149 | def try_pop(self):
150 | index = self._offset
151 | if index == len(self._items):
152 | return (None, None, None)
153 |
154 | _, cid, message = self._items[index]
155 | self._items[index] = ("applied", cid, message)
156 | self._offset += 1
157 |
158 | return (index, cid, message)
159 |
160 | @classmethod
161 | def load(cls, path):
162 | with open(path, "r", encoding='utf-8') as f:
163 | data = f.read()
164 |
165 | items = []
166 | for line in data.strip().split("\n"):
167 | status, cid, message = line.split(" ", maxsplit=2)
168 | items.append((status, cid, message))
169 | return PendingPatches(items)
170 |
171 | def save(self, path):
172 | data = "\n".join([" ".join(item) for item in self._items]) + "\n"
173 | with open(path, "w", encoding='utf-8') as f:
174 | f.write(data)
175 |
176 |
177 | class WorkingTreeDirtyError(Exception):
178 | pass
179 |
180 |
181 | class UnknownUpstreamError(Exception):
182 | pass
183 |
184 |
185 | if __name__ == '__main__':
186 | sync(os.path.abspath(sys.argv[1]))
187 |
--------------------------------------------------------------------------------
/devkit-assets/frida-gum-example.vcxproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | Debug
6 | Win32
7 |
8 |
9 | Release
10 | Win32
11 |
12 |
13 | Debug
14 | x64
15 |
16 |
17 | Release
18 | x64
19 |
20 |
21 |
22 | {9EF5A94D-9907-4E7A-98E5-B03CB1750740}
23 | Win32Proj
24 | fridagumexample
25 | 10.0
26 |
27 |
28 |
29 | Application
30 | true
31 | v143
32 | Unicode
33 |
34 |
35 | Application
36 | false
37 | v143
38 | true
39 | Unicode
40 |
41 |
42 | Application
43 | true
44 | v143
45 | Unicode
46 |
47 |
48 | Application
49 | false
50 | v143
51 | true
52 | Unicode
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 | true
74 |
75 |
76 | true
77 |
78 |
79 | false
80 |
81 |
82 | false
83 |
84 |
85 |
86 |
87 |
88 | Level3
89 | Disabled
90 | WIN32;_DEBUG;_CONSOLE;%(PreprocessorDefinitions)
91 | MultiThreadedDebug
92 |
93 |
94 | Console
95 | true
96 |
97 |
98 |
99 |
100 |
101 |
102 | Level3
103 | Disabled
104 | _DEBUG;_CONSOLE;%(PreprocessorDefinitions)
105 | MultiThreadedDebug
106 |
107 |
108 | Console
109 | true
110 |
111 |
112 |
113 |
114 | Level3
115 |
116 |
117 | MaxSpeed
118 | true
119 | true
120 | WIN32;NDEBUG;_CONSOLE;%(PreprocessorDefinitions)
121 | MultiThreaded
122 |
123 |
124 | Console
125 | true
126 | true
127 | true
128 |
129 |
130 |
131 |
132 | Level3
133 |
134 |
135 | MaxSpeed
136 | true
137 | true
138 | NDEBUG;_CONSOLE;%(PreprocessorDefinitions)
139 | MultiThreaded
140 |
141 |
142 | Console
143 | true
144 | true
145 | true
146 |
147 |
148 |
149 |
150 |
151 |
152 |
153 |
154 |
155 |
156 |
157 |
--------------------------------------------------------------------------------
/devkit-assets/frida-core-example.vcxproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | Debug
6 | Win32
7 |
8 |
9 | Release
10 | Win32
11 |
12 |
13 | Debug
14 | x64
15 |
16 |
17 | Release
18 | x64
19 |
20 |
21 |
22 | {1A424DA8-5C62-4AEA-A1A9-465359E0C17D}
23 | Win32Proj
24 | fridacoreexample
25 | 10.0
26 |
27 |
28 |
29 | Application
30 | true
31 | v143
32 | Unicode
33 |
34 |
35 | Application
36 | false
37 | v143
38 | true
39 | Unicode
40 |
41 |
42 | Application
43 | true
44 | v143
45 | Unicode
46 |
47 |
48 | Application
49 | false
50 | v143
51 | true
52 | Unicode
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 | true
74 |
75 |
76 | true
77 |
78 |
79 | false
80 |
81 |
82 | false
83 |
84 |
85 |
86 |
87 |
88 | Level3
89 | Disabled
90 | WIN32;_DEBUG;_CONSOLE;%(PreprocessorDefinitions)
91 | MultiThreadedDebug
92 |
93 |
94 | Console
95 | true
96 |
97 |
98 |
99 |
100 |
101 |
102 | Level3
103 | Disabled
104 | _DEBUG;_CONSOLE;%(PreprocessorDefinitions)
105 | MultiThreadedDebug
106 |
107 |
108 | Console
109 | true
110 |
111 |
112 |
113 |
114 | Level3
115 |
116 |
117 | MaxSpeed
118 | true
119 | true
120 | WIN32;NDEBUG;_CONSOLE;%(PreprocessorDefinitions)
121 | MultiThreaded
122 |
123 |
124 | Console
125 | true
126 | true
127 | true
128 |
129 |
130 |
131 |
132 | Level3
133 |
134 |
135 | MaxSpeed
136 | true
137 | true
138 | NDEBUG;_CONSOLE;%(PreprocessorDefinitions)
139 | MultiThreaded
140 |
141 |
142 | Console
143 | true
144 | true
145 | true
146 |
147 |
148 |
149 |
150 |
151 |
152 |
153 |
154 |
155 |
156 |
157 |
--------------------------------------------------------------------------------
/devkit-assets/frida-gumjs-example.vcxproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | Debug
6 | Win32
7 |
8 |
9 | Release
10 | Win32
11 |
12 |
13 | Debug
14 | x64
15 |
16 |
17 | Release
18 | x64
19 |
20 |
21 |
22 | {30827938-6C79-4226-8A2F-22154CD45F5B}
23 | Win32Proj
24 | fridagumjsexample
25 | 10.0
26 |
27 |
28 |
29 | Application
30 | true
31 | v143
32 | Unicode
33 |
34 |
35 | Application
36 | false
37 | v143
38 | true
39 | Unicode
40 |
41 |
42 | Application
43 | true
44 | v143
45 | Unicode
46 |
47 |
48 | Application
49 | false
50 | v143
51 | true
52 | Unicode
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 | true
74 |
75 |
76 | true
77 |
78 |
79 | false
80 |
81 |
82 | false
83 |
84 |
85 |
86 |
87 |
88 | Level3
89 | Disabled
90 | WIN32;_DEBUG;_CONSOLE;%(PreprocessorDefinitions)
91 | MultiThreadedDebug
92 |
93 |
94 | Console
95 | true
96 |
97 |
98 |
99 |
100 |
101 |
102 | Level3
103 | Disabled
104 | _DEBUG;_CONSOLE;%(PreprocessorDefinitions)
105 | MultiThreadedDebug
106 |
107 |
108 | Console
109 | true
110 |
111 |
112 |
113 |
114 | Level3
115 |
116 |
117 | MaxSpeed
118 | true
119 | true
120 | WIN32;NDEBUG;_CONSOLE;%(PreprocessorDefinitions)
121 | MultiThreaded
122 |
123 |
124 | Console
125 | true
126 | true
127 | true
128 |
129 |
130 |
131 |
132 | Level3
133 |
134 |
135 | MaxSpeed
136 | true
137 | true
138 | NDEBUG;_CONSOLE;%(PreprocessorDefinitions)
139 | MultiThreaded
140 |
141 |
142 | Console
143 | true
144 | true
145 | true
146 |
147 |
148 |
149 |
150 |
151 |
152 |
153 |
154 |
155 |
156 |
157 |
--------------------------------------------------------------------------------
/deps.toml:
--------------------------------------------------------------------------------
1 | [dependencies]
2 | version = "20250919"
3 | bootstrap_version = "20250801"
4 |
5 | [ninja]
6 | scope = "toolchain"
7 | name = "Ninja"
8 | version = "516800b093d1a2e5589ee1cdd7393dffdf9c702e"
9 | url = "https://github.com/frida/ninja.git"
10 |
11 | [pkg-config]
12 | scope = "toolchain"
13 | name = "pkg-config"
14 | version = "4696795673d1d3dec46b663df48f8cbf66461d14"
15 | url = "https://github.com/frida/pkg-config.git"
16 | dependencies = [
17 | "glib",
18 | ]
19 |
20 | [vala]
21 | scope = "toolchain"
22 | name = "Vala"
23 | version = "9feabf0f8076c33b702d7cba612edfe0c1e45a00"
24 | url = "https://github.com/frida/vala.git"
25 | dependencies = [
26 | "glib",
27 | ]
28 |
29 | [libiconv]
30 | # Needed on non-Windows systems where iconv is not part of the libc.
31 | # On Apple OSes we include it in SDKs to avoid dependencies beyond libSystem.
32 | # We use Apple's implementation in toolchains to make them smaller.
33 | when = """ \
34 | (machine.is_apple and bundle is Bundle.SDK) \
35 | or machine.os in {'android', 'qnx', 'none'} \
36 | or machine.config == 'uclibc' \
37 | """
38 | name = "libiconv"
39 | version = "bbbf4561da4847bf95ce9458da76e072b77cabd1"
40 | url = "https://github.com/frida/libiconv.git"
41 |
42 | [zlib]
43 | name = "zlib"
44 | version = "171a3eacaea8b731ef1fc586e7777b77742e2a1d"
45 | url = "https://github.com/frida/zlib.git"
46 |
47 | [libffi]
48 | name = "libffi"
49 | version = "cca9fc2909f67d340f4a13ae274587eaf273371d"
50 | url = "https://github.com/frida/libffi.git"
51 | options = [
52 | "-Dexe_static_tramp=false",
53 | "-Dtests=false",
54 | ]
55 |
56 | [pcre2]
57 | name = "PCRE2"
58 | version = "b47486922fdc3486499b310dc9cf903449700474"
59 | url = "https://github.com/frida/pcre2.git"
60 | options = [
61 | "-Dgrep=false",
62 | "-Dtest=false",
63 | ]
64 |
65 | [selinux]
66 | when = "machine.os == 'android'"
67 | name = "SELinux Userspace"
68 | version = "6d5513fd8069e9ff9b7aa10970d34457b32970c8"
69 | url = "https://github.com/frida/selinux.git"
70 | options = [
71 | "-Dregex=disabled",
72 | ]
73 |
74 | [glib]
75 | name = "GLib"
76 | version = "9dc59b1b5503789ada22f7699e53256fa3287217"
77 | url = "https://github.com/frida/glib.git"
78 | options = [
79 | "-Dcocoa=disabled",
80 | "-Dselinux=disabled",
81 | "-Dxattr=false",
82 | "-Dlibmount=disabled",
83 | "-Dtests=false",
84 | "--force-fallback-for=pcre",
85 | { value = "-Dglib_debug=disabled", when = "machine.config_is_optimized" },
86 | { value = "-Dglib_assert=false", when = "machine.config_is_optimized" },
87 | { value = "-Dglib_checks=false", when = "machine.config_is_optimized" },
88 | { value = "-Diconv=external", when = """ \
89 | machine.is_apple \
90 | or machine.os in {'android', 'qnx', 'none'} \
91 | or machine.config == 'uclibc' \
92 | """ }
93 | ]
94 | dependencies = [
95 | "pcre2",
96 | "libffi",
97 | "zlib",
98 | { id = "libiconv", when = """ \
99 | (machine.is_apple and bundle is Bundle.SDK) \
100 | or machine.os in {'android', 'qnx', 'none'} \
101 | or machine.config == 'uclibc' \
102 | """ }
103 | ]
104 |
105 | [libdwarf]
106 | when = "machine.os in {'linux', 'android', 'freebsd', 'qnx'}"
107 | name = "libdwarf"
108 | version = "50e3115b340c6a58d2f61af96f120a9d111ac024"
109 | url = "https://github.com/frida/libdwarf.git"
110 | options = [
111 | "-Ddecompression=false",
112 | ]
113 |
114 | [xz]
115 | when = "machine.os != 'none'"
116 | name = "XZ Utils"
117 | version = "e70f5800ab5001c9509d374dbf3e7e6b866c43fe"
118 | url = "https://github.com/frida/xz.git"
119 | options = [
120 | "-Dcli=disabled",
121 | ]
122 |
123 | [brotli]
124 | when = "machine.os != 'none'"
125 | name = "Brotli"
126 | version = "01d9e2922ca878965ebcd71ee8965d2a7aadb47a"
127 | url = "https://github.com/frida/brotli.git"
128 |
129 | [lzfse]
130 | name = "LZFSE"
131 | version = "5cfb7c86919d3c2c636d0d7552b51855a611ba1c"
132 | url = "https://github.com/frida/lzfse.git"
133 |
134 | [minizip-ng]
135 | when = "machine.os != 'none'"
136 | name = "minizip-ng"
137 | version = "dfc1ccc070ff7bb50726c80215cac515253a8ba0"
138 | url = "https://github.com/frida/minizip-ng.git"
139 | options = [
140 | "-Dzlib=enabled",
141 | "-Dlzma=disabled",
142 | ]
143 | dependencies = [
144 | "zlib",
145 | { id = "libiconv", when = """ \
146 | machine.is_apple \
147 | or machine.os in {'android', 'qnx'} \
148 | or machine.config == 'uclibc' \
149 | """ },
150 | ]
151 |
152 | [sqlite]
153 | when = "machine.os != 'none'"
154 | name = "SQLite"
155 | version = "9337327a50008f2d2236112ccb6f44059b1bafbd"
156 | url = "https://github.com/frida/sqlite.git"
157 |
158 | [libunwind]
159 | when = "machine.os in {'linux', 'android', 'freebsd', 'qnx'}"
160 | name = "libunwind"
161 | version = "4d0abea0effd3c80916e70abe38c2a6156596f05"
162 | url = "https://github.com/frida/libunwind.git"
163 | options = [
164 | "-Dgeneric_library=disabled",
165 | "-Dcoredump_library=disabled",
166 | "-Dptrace_library=disabled",
167 | "-Dsetjmp_library=disabled",
168 | "-Dmsabi_support=false",
169 | "-Dminidebuginfo=enabled",
170 | "-Dzlibdebuginfo=enabled",
171 | ]
172 | dependencies = [
173 | "zlib",
174 | "xz",
175 | ]
176 |
177 | [glib-networking]
178 | when = "machine.os != 'none'"
179 | name = "glib-networking"
180 | version = "af4b017028e695528951c749a7096e96359521d8"
181 | url = "https://github.com/frida/glib-networking.git"
182 | options = [
183 | "-Dgnutls=disabled",
184 | "-Dopenssl=enabled",
185 | "-Dlibproxy=disabled",
186 | "-Dgnome_proxy=disabled",
187 | "-Dtests=false",
188 | ]
189 | dependencies = [
190 | "glib",
191 | "openssl",
192 | ]
193 |
194 | [libnice]
195 | when = "machine.os != 'none'"
196 | name = "libnice"
197 | version = "e12567b0a16a0c2eb5dfe5e0782baba8496772ff"
198 | url = "https://github.com/frida/libnice.git"
199 | options = [
200 | "-Dgupnp=disabled",
201 | "-Dgstreamer=disabled",
202 | "-Dcrypto-library=openssl",
203 | "-Dexamples=disabled",
204 | "-Dtests=disabled",
205 | "-Dintrospection=disabled",
206 | ]
207 | dependencies = [
208 | "glib",
209 | "openssl",
210 | ]
211 |
212 | [libusb]
213 | when = "machine.os in {'windows', 'macos', 'linux'}"
214 | name = "libusb"
215 | version = "ffff4bdfe8faa38cecfad5aab106cae923502d55"
216 | url = "https://github.com/frida/libusb.git"
217 |
218 | [lwip]
219 | when = "machine.os != 'none'"
220 | name = "lwIP"
221 | version = "4fe7223c2e80dc328266fb2483e789ee5fad7c79"
222 | url = "https://github.com/frida/lwip.git"
223 | options = [
224 | "-Dipv4=disabled",
225 | "-Dipv6=enabled",
226 | "-Ddns=disabled",
227 | "-Darp=disabled",
228 | "-Dethernet=enabled",
229 | { value = "-Dlwip_debug=disabled", when = "machine.config_is_optimized" },
230 | ]
231 | dependencies = [
232 | "glib",
233 | ]
234 |
235 | [usrsctp]
236 | when = "machine.os != 'none'"
237 | name = "usrsctp"
238 | version = "f459ae9d3700c06e59d709901e92c08e31c6e623"
239 | url = "https://github.com/frida/usrsctp.git"
240 | options = [
241 | "-Dsctp_inet=false",
242 | "-Dsctp_inet6=false",
243 | "-Dsctp_build_programs=false",
244 | ]
245 |
246 | [libgee]
247 | when = "machine.os != 'none'"
248 | name = "libgee"
249 | version = "ad17ed847039469fcc2dc711ecfee2bbf7d2bf87"
250 | url = "https://github.com/frida/libgee.git"
251 | options = [
252 | "-Ddisable-internal-asserts=true",
253 | "-Ddisable-introspection=true",
254 | ]
255 | dependencies = [
256 | "glib",
257 | ]
258 |
259 | [json-glib]
260 | name = "JSON-GLib"
261 | version = "1a39cbe151b02c4192987c8fcc98997a59db2154"
262 | url = "https://github.com/frida/json-glib.git"
263 | options = [
264 | "-Dintrospection=disabled",
265 | "-Dgtk_doc=disabled",
266 | "-Dtests=false",
267 | "-Dnls=disabled",
268 | ]
269 | dependencies = [
270 | "glib",
271 | ]
272 |
273 | [libpsl]
274 | when = "machine.os != 'none'"
275 | name = "libpsl"
276 | version = "b76c0fed2e27353d5fbb067ecdfdf76d2281eb91"
277 | url = "https://github.com/frida/libpsl.git"
278 | options = [
279 | "-Druntime=no",
280 | "-Dbuiltin=false",
281 | "-Dtests=false",
282 | ]
283 |
284 | [libxml2]
285 | when = "machine.os != 'none'"
286 | name = "libxml2"
287 | version = "f09ad5551829b7f2df3666759e701644a0ea8558"
288 | url = "https://github.com/frida/libxml2.git"
289 | options = [
290 | "-Dhttp=disabled",
291 | "-Dlzma=disabled",
292 | "-Dzlib=disabled",
293 | ]
294 |
295 | [ngtcp2]
296 | when = "machine.os != 'none'"
297 | name = "ngtcp2"
298 | version = "828dcaed498b40954e1b496664a3309796968db6"
299 | url = "https://github.com/frida/ngtcp2.git"
300 | dependencies = [
301 | "openssl",
302 | ]
303 |
304 | [nghttp2]
305 | when = "machine.os != 'none'"
306 | name = "nghttp2"
307 | version = "ae13d24ea59c30e36ca53d1b22c4e664588d0445"
308 | url = "https://github.com/frida/nghttp2.git"
309 |
310 | [libsoup]
311 | when = "machine.os != 'none'"
312 | name = "libsoup"
313 | version = "80dc080951c9037aef51a40ffbe4508d3ce98d1b"
314 | url = "https://github.com/frida/libsoup.git"
315 | options = [
316 | "-Dgssapi=disabled",
317 | "-Dntlm=disabled",
318 | "-Dbrotli=enabled",
319 | "-Dtls_check=false",
320 | "-Dintrospection=disabled",
321 | "-Dvapi=disabled",
322 | "-Ddocs=disabled",
323 | "-Dexamples=disabled",
324 | "-Dtests=false",
325 | "-Dsysprof=disabled",
326 | ]
327 | dependencies = [
328 | "glib",
329 | "nghttp2",
330 | "sqlite",
331 | "libpsl",
332 | "brotli",
333 | ]
334 |
335 | [capstone]
336 | name = "Capstone"
337 | version = "27fb5a53bc77baf3c3af7ccd466041418cecca35"
338 | url = "https://github.com/frida/capstone.git"
339 | options = [
340 | "-Darchs=all",
341 | "-Duse_arch_registration=true",
342 | "-Dx86_att_disable=true",
343 | "-Dcli=disabled",
344 | ]
345 |
346 | [quickjs]
347 | name = "QuickJS"
348 | version = "f48e71e5240106dbca136e60f751e206d15cdb3f"
349 | url = "https://github.com/frida/quickjs.git"
350 | options = [
351 | "-Dlibc=false",
352 | "-Dbignum=true",
353 | "-Datomics=disabled",
354 | "-Dstack_check=disabled",
355 | { value = "-Dstack_mode=optimize", when = "machine.os == 'none'" },
356 | ]
357 |
358 | [tinycc]
359 | when = """ \
360 | machine.arch in { \
361 | 'x86', 'x86_64', \
362 | 'arm', 'armbe8', 'armeabi', 'armhf', \
363 | 'arm64', 'arm64e', 'arm64eoabi' \
364 | } \
365 | """
366 | name = "TinyCC"
367 | version = "86c2ba69e97af8f311dff04fcf517b760b3e4491"
368 | url = "https://github.com/frida/tinycc.git"
369 |
370 | [openssl]
371 | when = "machine.os != 'none'"
372 | name = "OpenSSL"
373 | version = "5029c4562d23547d8e29768e1b0de573c52bf3fc"
374 | url = "https://github.com/frida/openssl.git"
375 | options = [
376 | "-Dcli=disabled",
377 | { value = "-Dasm=disabled", when = "machine.config == 'mingw'" }
378 | ]
379 |
380 | [v8]
381 | when = """ \
382 | machine.config != 'mingw' \
383 | and machine.arch != 'arm64beilp32' \
384 | and not machine.arch.startswith('mips') \
385 | and not machine.arch.startswith('powerpc') \
386 | and machine.os not in {'none', 'qnx'} \
387 | """
388 | name = "V8"
389 | version = "669119f601663b73fc01f8eee02cf2f093bbf25b"
390 | url = "https://github.com/frida/v8.git"
391 | options = [
392 | "-Ddebug=false",
393 | "-Dembedder_string=-frida",
394 | "-Dsnapshot_compression=disabled",
395 | "-Dpointer_compression=disabled",
396 | "-Dshared_ro_heap=disabled",
397 | "-Dcppgc_caged_heap=disabled",
398 | ]
399 | dependencies = [
400 | "zlib",
401 | { id = "zlib", for_machine = "build" },
402 | ]
403 |
404 | [libcxx]
405 | when = "machine.is_apple"
406 | name = "libc++"
407 | version = "626b6731a24ed412a70b60b5fdaab3f36632d6f6"
408 | url = "https://github.com/frida/libcxx.git"
409 |
--------------------------------------------------------------------------------
/machine_spec.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 | from dataclasses import dataclass
3 | import platform
4 | import re
5 | import subprocess
6 | from typing import List, Optional
7 |
8 | if platform.system() == "Windows":
9 | import ctypes
10 | from ctypes import wintypes
11 |
12 |
13 | @dataclass
14 | class MachineSpec:
15 | os: str
16 | arch: str
17 | config: Optional[str] = None
18 | triplet: Optional[str] = None
19 |
20 | @staticmethod
21 | def make_from_local_system() -> MachineSpec:
22 | os = detect_os()
23 | arch = detect_arch()
24 | config = None
25 |
26 | if os == "linux":
27 | try:
28 | output = subprocess.run(["ldd", "--version"],
29 | stdout=subprocess.PIPE,
30 | stderr=subprocess.STDOUT,
31 | encoding="utf-8").stdout
32 | if "musl" in output:
33 | config = "musl"
34 | except:
35 | pass
36 |
37 | return MachineSpec(os, arch, config)
38 |
39 | @staticmethod
40 | def parse(raw_spec: str) -> MachineSpec:
41 | os = None
42 | arch = None
43 | config = None
44 | triplet = None
45 |
46 | tokens = raw_spec.split("-")
47 | if len(tokens) in {3, 4}:
48 | arch = tokens[0]
49 | m = TARGET_TRIPLET_ARCH_PATTERN.match(arch)
50 | if m is not None:
51 | kernel = tokens[-2]
52 | system = tokens[-1]
53 |
54 | if kernel == "w64":
55 | os = "windows"
56 | elif kernel == "nto":
57 | os = "qnx"
58 | else:
59 | os = kernel
60 |
61 | if arch[0] == "i":
62 | arch = "x86"
63 | elif arch == "arm":
64 | if system.endswith("eabihf"):
65 | arch = "armhf"
66 | elif os == "qnx" and system.endswith("eabi"):
67 | arch = "armeabi"
68 | elif arch == "armeb":
69 | arch = "armbe8"
70 | elif arch == "aarch64":
71 | arch = "arm64"
72 | elif arch == "aarch64_be":
73 | arch = "arm64be"
74 | if system.endswith("_ilp32"):
75 | arch += "ilp32"
76 |
77 | if system.startswith("musl"):
78 | config = "musl"
79 | elif kernel == "w64":
80 | config = "mingw"
81 |
82 | triplet = raw_spec
83 |
84 | if os is None:
85 | os, arch, *rest = tokens
86 | if rest:
87 | assert len(rest) == 1
88 | config = rest[0]
89 |
90 | return MachineSpec(os, arch, config, triplet)
91 |
92 | def evolve(self,
93 | os: Optional[str] = None,
94 | arch: Optional[str] = None,
95 | config: Optional[str] = None,
96 | triplet: Optional[str] = None) -> MachineSpec:
97 | return MachineSpec(
98 | os if os is not None else self.os,
99 | arch if arch is not None else self.arch,
100 | config if config is not None else self.config,
101 | triplet if triplet is not None else self.triplet,
102 | )
103 |
104 | def default_missing(self, recommended_vscrt: Optional[str] = None) -> MachineSpec:
105 | config = self.config
106 | if config is None and self.toolchain_is_msvc:
107 | if recommended_vscrt is not None:
108 | config = recommended_vscrt
109 | else:
110 | config = "mt"
111 | return self.evolve(config=config)
112 |
113 | def maybe_adapt_to_host(self, host_machine: MachineSpec) -> MachineSpec:
114 | if self.identifier == host_machine.identifier and host_machine.triplet is not None:
115 | return host_machine
116 | if self.os == "windows":
117 | if host_machine.arch in {"x86_64", "x86"}:
118 | return host_machine
119 | if self.arch == host_machine.arch:
120 | return host_machine
121 | return self
122 |
123 | @property
124 | def identifier(self) -> str:
125 | parts = [self.os, self.arch]
126 | if self.config is not None:
127 | parts += [self.config]
128 | return "-".join(parts)
129 |
130 | @property
131 | def os_dash_arch(self) -> str:
132 | return f"{self.os}-{self.arch}"
133 |
134 | @property
135 | def os_dash_config(self) -> str:
136 | parts = [self.os]
137 | if self.config is not None:
138 | parts += [self.config]
139 | return "-".join(parts)
140 |
141 | @property
142 | def config_is_optimized(self) -> bool:
143 | if self.toolchain_is_msvc:
144 | return self.config in {"md", "mt"}
145 | return True
146 |
147 | @property
148 | def meson_optimization_options(self) -> List[str]:
149 | if self.config_is_optimized:
150 | optimization = "s"
151 | ndebug = "true"
152 | else:
153 | optimization = "0"
154 | ndebug = "false"
155 | return [
156 | f"-Doptimization={optimization}",
157 | f"-Db_ndebug={ndebug}",
158 | ]
159 |
160 | @property
161 | def executable_suffix(self) -> str:
162 | return ".exe" if self.os == "windows" else ""
163 |
164 | @property
165 | def msvc_platform(self) -> str:
166 | return "x64" if self.arch == "x86_64" else self.arch
167 |
168 | @property
169 | def is_apple(self) -> str:
170 | return self.os in {"macos", "ios", "watchos", "tvos", "xros"}
171 |
172 | @property
173 | def system(self) -> str:
174 | return "darwin" if self.is_apple else self.os
175 |
176 | @property
177 | def subsystem(self) -> str:
178 | return self.os_dash_config if self.is_apple else self.os
179 |
180 | @property
181 | def kernel(self) -> str:
182 | return KERNELS.get(self.os, self.os)
183 |
184 | @property
185 | def cpu_family(self) -> str:
186 | arch = self.arch
187 | return CPU_FAMILIES.get(arch, arch)
188 |
189 | @property
190 | def cpu(self) -> str:
191 | arch = self.arch
192 |
193 | mappings_to_search = [
194 | CPU_TYPES_PER_OS_OVERRIDES.get(self.os, {}),
195 | CPU_TYPES,
196 | ]
197 | for m in mappings_to_search:
198 | cpu = m.get(arch, None)
199 | if cpu is not None:
200 | return cpu
201 |
202 | return arch
203 |
204 | @property
205 | def endian(self) -> str:
206 | return "big" if self.arch in BIG_ENDIAN_ARCHS else "little"
207 |
208 | @property
209 | def pointer_size(self) -> int:
210 | arch = self.arch
211 | if arch in {"x86_64", "s390x"}:
212 | return 8
213 | if (arch.startswith("arm64") and not arch.endswith("ilp32")) or arch.startswith("mips64"):
214 | return 8
215 | return 4
216 |
217 | @property
218 | def libdatadir(self) -> str:
219 | return "libdata" if self.os == "freebsd" else "lib"
220 |
221 | @property
222 | def toolchain_is_msvc(self) -> bool:
223 | return self.os == "windows" and self.config != "mingw"
224 |
225 | @property
226 | def toolchain_can_strip(self) -> bool:
227 | return not self.toolchain_is_msvc
228 |
229 | def __eq__(self, other):
230 | if isinstance(other, MachineSpec):
231 | return other.identifier == self.identifier
232 | return False
233 |
234 |
235 | def detect_os() -> str:
236 | os = platform.system().lower()
237 | if os == "darwin":
238 | os = "macos"
239 | return os
240 |
241 |
242 | def detect_arch() -> str:
243 | if platform.system() == "Windows":
244 | return detect_arch_windows()
245 | arch = platform.machine().lower()
246 | return ARCHS.get(arch, arch)
247 |
248 | def detect_arch_windows():
249 | try:
250 | code = detect_arch_windows_modern()
251 | except AttributeError:
252 | code = detect_arch_windows_legacy()
253 | if code == PROCESSOR_ARCHITECTURE_INTEL:
254 | return "x86"
255 | elif code in {PROCESSOR_ARCHITECTURE_AMD64, IMAGE_FILE_MACHINE_AMD64}:
256 | return "x86_64"
257 | elif code in {PROCESSOR_ARCHITECTURE_ARM64, IMAGE_FILE_MACHINE_ARM64}:
258 | return "arm64"
259 | else:
260 | raise RuntimeError(f"unrecognized native architecture code: {code!r}")
261 |
262 | def detect_arch_windows_modern():
263 | kernel32 = ctypes.WinDLL("kernel32", use_last_error=True)
264 |
265 | try:
266 | is_wow64_process = kernel32.IsWow64Process2
267 | except AttributeError:
268 | raise
269 |
270 | is_wow64_process.argtypes = (
271 | wintypes.HANDLE,
272 | ctypes.POINTER(wintypes.WORD),
273 | ctypes.POINTER(wintypes.WORD),
274 | )
275 | is_wow64_process.restype = wintypes.BOOL
276 |
277 | process_machine = wintypes.WORD(0)
278 | native_machine = wintypes.WORD(0)
279 |
280 | ok = is_wow64_process(
281 | kernel32.GetCurrentProcess(),
282 | ctypes.byref(process_machine),
283 | ctypes.byref(native_machine)
284 | )
285 | if not ok:
286 | raise ctypes.WinError(ctypes.get_last_error())
287 |
288 | return native_machine.value
289 |
290 | def detect_arch_windows_legacy():
291 | class SYSTEM_INFO(ctypes.Structure):
292 | _fields_ = [
293 | ("wProcessorArchitecture", wintypes.WORD),
294 | ("wReserved", wintypes.WORD),
295 | ("dwPageSize", wintypes.DWORD),
296 | ("lpMinimumApplicationAddress", ctypes.c_void_p),
297 | ("lpMaximumApplicationAddress", ctypes.c_void_p),
298 | ("dwActiveProcessorMask", ctypes.c_void_p),
299 | ("dwNumberOfProcessors", wintypes.DWORD),
300 | ("dwProcessorType", wintypes.DWORD),
301 | ("dwAllocationGranularity", wintypes.DWORD),
302 | ("wProcessorLevel", wintypes.WORD),
303 | ("wProcessorRevision", wintypes.WORD),
304 | ]
305 |
306 | kernel32 = ctypes.WinDLL("kernel32", use_last_error=True)
307 |
308 | get_native_system_info = kernel32.GetNativeSystemInfo
309 | get_native_system_info.argtypes = (ctypes.POINTER(SYSTEM_INFO),)
310 | get_native_system_info.restype = None
311 |
312 | info = SYSTEM_INFO()
313 | get_native_system_info(ctypes.byref(info))
314 | return info.wProcessorArchitecture
315 |
316 |
317 | ARCHS = {
318 | "amd64": "x86_64",
319 | "armv7l": "armhf",
320 | "aarch64": "arm64",
321 | }
322 |
323 | KERNELS = {
324 | "windows": "nt",
325 |
326 | "macos": "xnu",
327 | "ios": "xnu",
328 | "watchos": "xnu",
329 | "tvos": "xnu",
330 |
331 | "qnx": "nto",
332 | }
333 |
334 | CPU_FAMILIES = {
335 | "armbe8": "arm",
336 | "armeabi": "arm",
337 | "armhf": "arm",
338 | "armv6kz": "arm",
339 |
340 | "arm64": "aarch64",
341 | "arm64be": "aarch64",
342 | "arm64beilp32": "aarch64",
343 | "arm64e": "aarch64",
344 | "arm64eoabi": "aarch64",
345 |
346 | "mipsel": "mips",
347 | "mips64el": "mips64",
348 |
349 | "powerpc": "ppc"
350 | }
351 |
352 | CPU_TYPES = {
353 | "arm": "armv7",
354 | "armbe8": "armv6",
355 | "armhf": "armv7hf",
356 | "armeabi": "armv7eabi",
357 | "armv6kz": "armv6",
358 |
359 | "arm64": "aarch64",
360 | "arm64be": "aarch64",
361 | "arm64beilp32": "aarch64",
362 | "arm64e": "aarch64",
363 | "arm64eoabi": "aarch64",
364 | }
365 |
366 | CPU_TYPES_PER_OS_OVERRIDES = {
367 | "linux": {
368 | "arm": "armv5t",
369 | "armbe8": "armv6t",
370 | "armhf": "armv7a",
371 | "armv6kz": "armv6t",
372 |
373 | "mips": "mips1",
374 | "mipsel": "mips1",
375 |
376 | "mips64": "mips64r2",
377 | "mips64el": "mips64r2",
378 | },
379 | "android": {
380 | "x86": "i686",
381 | },
382 | "qnx": {
383 | "arm": "armv6",
384 | "armeabi": "armv7",
385 | },
386 | }
387 |
388 | BIG_ENDIAN_ARCHS = {
389 | "arm64be",
390 | "arm64beilp32",
391 | "armbe8",
392 | "mips",
393 | "mips64",
394 | "ppc",
395 | "ppc64",
396 | "s390x",
397 | }
398 |
399 | TARGET_TRIPLET_ARCH_PATTERN = re.compile(r"^(i.86|x86_64|arm\w*|aarch64(_be)?|mips\w*|powerpc|s390x)$")
400 |
401 | PROCESSOR_ARCHITECTURE_INTEL = 0
402 | PROCESSOR_ARCHITECTURE_AMD64 = 9
403 | PROCESSOR_ARCHITECTURE_ARM64 = 12
404 |
405 | IMAGE_FILE_MACHINE_AMD64 = 0x8664
406 | IMAGE_FILE_MACHINE_ARM64 = 0xAA64
407 |
--------------------------------------------------------------------------------
/env_generic.py:
--------------------------------------------------------------------------------
1 | from collections import OrderedDict
2 | from configparser import ConfigParser
3 | import locale
4 | from pathlib import Path
5 | import shutil
6 | import subprocess
7 | import tempfile
8 | from typing import Callable, Dict, List, Optional, Mapping, Sequence, Tuple
9 |
10 | from . import winenv
11 | from .machine_file import strv_to_meson
12 | from .machine_spec import MachineSpec
13 |
14 |
15 | def init_machine_config(machine: MachineSpec,
16 | build_machine: MachineSpec,
17 | is_cross_build: bool,
18 | environ: Dict[str, str],
19 | toolchain_prefix: Optional[Path],
20 | sdk_prefix: Optional[Path],
21 | call_selected_meson: Callable,
22 | config: ConfigParser,
23 | outpath: List[str],
24 | outenv: Dict[str, str],
25 | outdir: Path):
26 | allow_undefined_symbols = machine.os == "freebsd"
27 |
28 | options = config["built-in options"]
29 | options["c_args"] = "c_like_flags"
30 | options["cpp_args"] = "c_like_flags + cxx_like_flags"
31 | options["c_link_args"] = "linker_flags"
32 | options["cpp_link_args"] = "linker_flags + cxx_link_flags"
33 | options["b_lundef"] = str(not allow_undefined_symbols).lower()
34 |
35 | binaries = config["binaries"]
36 | cc = None
37 | common_flags = []
38 | c_like_flags = []
39 | linker_flags = []
40 | cxx_like_flags = []
41 | cxx_link_flags = []
42 |
43 | triplet = machine.triplet
44 | if triplet is not None:
45 | try:
46 | cc, gcc_binaries = resolve_gcc_binaries(toolprefix=triplet + "-")
47 | binaries.update(gcc_binaries)
48 | except CompilerNotFoundError:
49 | pass
50 |
51 | diagnostics = None
52 | if cc is None:
53 | with tempfile.TemporaryDirectory() as raw_prober_dir:
54 | prober_dir = Path(raw_prober_dir)
55 | machine_file = prober_dir / "machine.txt"
56 |
57 | argv = [
58 | "env2mfile",
59 | "-o", machine_file,
60 | "--native" if machine == build_machine else "--cross",
61 | ]
62 |
63 | if machine != build_machine:
64 | argv += [
65 | "--system", machine.system,
66 | "--subsystem", machine.subsystem,
67 | "--kernel", machine.kernel,
68 | "--cpu-family", machine.cpu_family,
69 | "--cpu", machine.cpu,
70 | "--endian", machine.endian,
71 | ]
72 |
73 | process = call_selected_meson(argv,
74 | cwd=raw_prober_dir,
75 | env=environ,
76 | stdout=subprocess.PIPE,
77 | stderr=subprocess.STDOUT,
78 | encoding=locale.getpreferredencoding())
79 | if process.returncode == 0:
80 | mcfg = ConfigParser()
81 | mcfg.read(machine_file)
82 |
83 | for section in mcfg.sections():
84 | copy = config[section] if section in config else OrderedDict()
85 | for key, val in mcfg.items(section):
86 | if section == "binaries":
87 | argv = eval(val.replace("\\", "\\\\"))
88 | if not Path(argv[0]).is_absolute():
89 | path = shutil.which(argv[0])
90 | if path is None:
91 | raise BinaryNotFoundError(f"unable to locate {argv[0]}")
92 | argv[0] = path
93 | val = strv_to_meson(argv)
94 | if key in {"c", "cpp"}:
95 | val += " + common_flags"
96 | if key in copy and section == "built-in options" and key.endswith("_args"):
97 | val = val + " + " + copy[key]
98 | copy[key] = val
99 | config[section] = copy
100 |
101 | raw_cc = binaries.get("c", None)
102 | if raw_cc is not None:
103 | cc = eval(raw_cc.replace("\\", "\\\\"), None, {"common_flags": []})
104 | else:
105 | diagnostics = process.stdout
106 |
107 | linker_flavor = None
108 |
109 | if cc is not None \
110 | and machine.os == "windows" \
111 | and machine.toolchain_is_msvc:
112 | linker_flavor = detect_linker_flavor(cc)
113 | detected_wrong_toolchain = linker_flavor != "msvc"
114 | if detected_wrong_toolchain:
115 | cc = None
116 | linker_flavor = None
117 |
118 | if cc is None:
119 | if machine.os == "windows":
120 | detect_tool_path = lambda name: winenv.detect_msvs_tool_path(machine, build_machine, name, toolchain_prefix)
121 |
122 | cc = [str(detect_tool_path("cl.exe"))]
123 | lib = [str(detect_tool_path("lib.exe"))]
124 | link = [str(detect_tool_path("link.exe"))]
125 | assembler_name = MSVC_ASSEMBLER_NAMES[machine.arch]
126 | assembler_tool = [str(detect_tool_path(assembler_name + ".exe"))]
127 |
128 | raw_cc = strv_to_meson(cc) + " + common_flags"
129 | binaries["c"] = raw_cc
130 | binaries["cpp"] = raw_cc
131 | binaries["lib"] = strv_to_meson(lib) + " + common_flags"
132 | binaries["link"] = strv_to_meson(link) + " + common_flags"
133 | binaries[assembler_name] = strv_to_meson(assembler_tool) + " + common_flags"
134 |
135 | runtime_dirs = winenv.detect_msvs_runtime_path(machine, build_machine, toolchain_prefix)
136 | outpath.extend(runtime_dirs)
137 |
138 | vs_dir = winenv.detect_msvs_installation_dir(toolchain_prefix)
139 | outenv["VSINSTALLDIR"] = str(vs_dir) + "\\"
140 | outenv["VCINSTALLDIR"] = str(vs_dir / "VC") + "\\"
141 | outenv["Platform"] = machine.msvc_platform
142 | outenv["INCLUDE"] = ";".join([str(path) for path in winenv.detect_msvs_include_path(toolchain_prefix)])
143 | outenv["LIB"] = ";".join([str(path) for path in winenv.detect_msvs_library_path(machine, toolchain_prefix)])
144 | elif machine != build_machine \
145 | and "CC" not in environ \
146 | and "CFLAGS" not in environ \
147 | and machine.os == build_machine.os \
148 | and machine.os == "linux" \
149 | and machine.pointer_size == 4 \
150 | and build_machine.pointer_size == 8:
151 | try:
152 | cc, gcc_binaries = resolve_gcc_binaries()
153 | binaries.update(gcc_binaries)
154 | common_flags += ["-m32"]
155 | except CompilerNotFoundError:
156 | pass
157 |
158 | if cc is None:
159 | suffix = ":\n" + diagnostics if diagnostics is not None else ""
160 | raise CompilerNotFoundError("no C compiler found" + suffix)
161 |
162 | if "cpp" not in binaries:
163 | raise CompilerNotFoundError("no C++ compiler found")
164 |
165 | if linker_flavor is None:
166 | linker_flavor = detect_linker_flavor(cc)
167 |
168 | strip_binary = binaries.get("strip", None)
169 | if strip_binary is not None:
170 | strip_arg = "-Sx" if linker_flavor == "apple" else "--strip-all"
171 | binaries["strip"] = strip_binary[:-1] + f", '{strip_arg}']"
172 |
173 | if linker_flavor == "msvc":
174 | for gnu_tool in ["ar", "as", "ld", "nm", "objcopy", "objdump",
175 | "ranlib", "readelf", "size", "strip", "windres"]:
176 | binaries.pop(gnu_tool, None)
177 |
178 | c_like_flags += [
179 | "/GS-",
180 | "/Gy",
181 | "/Zc:inline",
182 | "/fp:fast",
183 | ]
184 | if machine.arch == "x86":
185 | c_like_flags += ["/arch:SSE2"]
186 |
187 | # Relax C++11 compliance for XP compatibility.
188 | cxx_like_flags += ["/Zc:threadSafeInit-"]
189 | else:
190 | if machine.os == "qnx":
191 | common_flags += ARCH_COMMON_FLAGS_QNX.get(machine.arch, [])
192 | else:
193 | common_flags += ARCH_COMMON_FLAGS_UNIX.get(machine.arch, [])
194 | c_like_flags += ARCH_C_LIKE_FLAGS_UNIX.get(machine.arch, [])
195 |
196 | c_like_flags += [
197 | "-ffunction-sections",
198 | "-fdata-sections",
199 | ]
200 |
201 | if linker_flavor.startswith("gnu-"):
202 | linker_flags += ["-static-libgcc"]
203 | if machine.os != "windows":
204 | linker_flags += ["-Wl,-z,noexecstack"]
205 | if machine.os not in {"windows", "none"}:
206 | linker_flags += ["-Wl,-z,relro"]
207 | cxx_link_flags += ["-static-libstdc++"]
208 |
209 | if linker_flavor == "apple":
210 | linker_flags += ["-Wl,-dead_strip"]
211 | else:
212 | linker_flags += ["-Wl,--gc-sections"]
213 | if linker_flavor == "gnu-gold":
214 | linker_flags += ["-Wl,--icf=all"]
215 |
216 | if machine.os == "none":
217 | linker_flags += ["-specs=nosys.specs"]
218 |
219 | constants = config["constants"]
220 | constants["common_flags"] = strv_to_meson(common_flags)
221 | constants["c_like_flags"] = strv_to_meson(c_like_flags)
222 | constants["linker_flags"] = strv_to_meson(linker_flags)
223 | constants["cxx_like_flags"] = strv_to_meson(cxx_like_flags)
224 | constants["cxx_link_flags"] = strv_to_meson(cxx_link_flags)
225 |
226 |
227 | def resolve_gcc_binaries(toolprefix: str = "") -> Tuple[List[str], Dict[str, str]]:
228 | cc = None
229 | binaries = OrderedDict()
230 |
231 | for identifier in GCC_TOOL_IDS:
232 | name = GCC_TOOL_NAMES.get(identifier, identifier)
233 | full_name = toolprefix + name
234 |
235 | val = shutil.which(full_name)
236 | if val is None:
237 | raise CompilerNotFoundError(f"missing {full_name}")
238 |
239 | # QNX SDP 6.5 gcc-* tools are broken, erroring out with:
240 | # > sorry - this program has been built without plugin support
241 | # We detect this and use the tool without the gcc-* prefix.
242 | if name.startswith("gcc-"):
243 | p = subprocess.run([val, "--version"], capture_output=True)
244 | if p.returncode != 0:
245 | full_name = toolprefix + name[4:]
246 | val = shutil.which(full_name)
247 | if val is None:
248 | raise CompilerNotFoundError(f"missing {full_name}")
249 |
250 | if identifier == "c":
251 | cc = [val]
252 |
253 | extra = " + common_flags" if identifier in {"c", "cpp"} else ""
254 |
255 | binaries[identifier] = strv_to_meson([val]) + extra
256 |
257 | return (cc, binaries)
258 |
259 |
260 | def detect_linker_flavor(cc: List[str]) -> str:
261 | linker_version = subprocess.run(cc + ["-Wl,--version"],
262 | stdout=subprocess.PIPE,
263 | stderr=subprocess.STDOUT,
264 | encoding=locale.getpreferredencoding()).stdout
265 | if "Microsoft " in linker_version:
266 | return "msvc"
267 | if "GNU ld " in linker_version:
268 | return "gnu-ld"
269 | if "GNU gold " in linker_version:
270 | return "gnu-gold"
271 | if linker_version.startswith("LLD ") or "compatible with GNU linkers" in linker_version:
272 | return "lld"
273 | if linker_version.startswith("ld: "):
274 | return "apple"
275 |
276 | excerpt = linker_version.split("\n")[0].rstrip()
277 | raise LinkerDetectionError(f"unknown linker: '{excerpt}'")
278 |
279 |
280 | class CompilerNotFoundError(Exception):
281 | pass
282 |
283 |
284 | class BinaryNotFoundError(Exception):
285 | pass
286 |
287 |
288 | class LinkerDetectionError(Exception):
289 | pass
290 |
291 |
292 | ARCH_COMMON_FLAGS_UNIX = {
293 | "x86": [
294 | "-march=pentium4",
295 | ],
296 | "arm": [
297 | "-march=armv5t",
298 | "-mthumb",
299 | ],
300 | "armbe8": [
301 | "-mcpu=cortex-a72",
302 | "-mthumb",
303 | ],
304 | "armhf": [
305 | "-march=armv7-a",
306 | "-mtune=cortex-a7",
307 | "-mfpu=neon-vfpv4",
308 | "-mthumb",
309 | ],
310 | "armv6kz": [
311 | "-march=armv6kz",
312 | "-mcpu=arm1176jzf-s",
313 | ],
314 | "arm64": [
315 | "-march=armv8-a",
316 | ],
317 | "mips": [
318 | "-march=mips1",
319 | "-mfp32",
320 | ],
321 | "mipsel": [
322 | "-march=mips1",
323 | "-mfp32",
324 | ],
325 | "mips64": [
326 | "-march=mips64r2",
327 | "-mabi=64",
328 | ],
329 | "mips64el": [
330 | "-march=mips64r2",
331 | "-mabi=64",
332 | ],
333 | "s390x": [
334 | "-march=z10",
335 | "-m64",
336 | ],
337 | }
338 |
339 | ARCH_COMMON_FLAGS_QNX = {
340 | "x86": [
341 | "-march=i686",
342 | ],
343 | "arm": [
344 | "-march=armv6",
345 | "-mno-unaligned-access",
346 | ],
347 | "armeabi": [
348 | "-march=armv7-a",
349 | "-mno-unaligned-access",
350 | ],
351 | }
352 |
353 | ARCH_C_LIKE_FLAGS_UNIX = {
354 | "x86": [
355 | "-mfpmath=sse",
356 | "-mstackrealign",
357 | ],
358 | }
359 |
360 | GCC_TOOL_IDS = [
361 | "c",
362 | "cpp",
363 | "ar",
364 | "nm",
365 | "ranlib",
366 | "strip",
367 | "readelf",
368 | "objcopy",
369 | "objdump",
370 | ]
371 |
372 | GCC_TOOL_NAMES = {
373 | "c": "gcc",
374 | "cpp": "g++",
375 | "ar": "gcc-ar",
376 | "nm": "gcc-nm",
377 | "ranlib": "gcc-ranlib",
378 | }
379 |
380 | MSVC_ASSEMBLER_NAMES = {
381 | "x86": "ml",
382 | "x86_64": "ml64",
383 | "arm64": "armasm64",
384 | }
385 |
--------------------------------------------------------------------------------
/env.py:
--------------------------------------------------------------------------------
1 | from collections import OrderedDict
2 | from configparser import ConfigParser
3 | from dataclasses import dataclass
4 | import os
5 | from pathlib import Path
6 | import platform
7 | import pprint
8 | import shlex
9 | import shutil
10 | import subprocess
11 | import sys
12 | from typing import Callable, Dict, List, Literal, Optional, Tuple
13 |
14 | from . import env_android, env_apple, env_generic, machine_file
15 | from .machine_file import bool_to_meson, str_to_meson, strv_to_meson
16 | from .machine_spec import MachineSpec
17 |
18 |
19 | @dataclass
20 | class MachineConfig:
21 | machine_file: Path
22 | binpath: List[Path]
23 | environ: Dict[str, str]
24 |
25 | def make_merged_environment(self, source_environ: Dict[str, str]) -> Dict[str, str]:
26 | menv = {**source_environ}
27 | menv.update(self.environ)
28 |
29 | if self.binpath:
30 | old_path = menv.get("PATH", "")
31 | old_dirs = old_path.split(os.pathsep) if old_path else []
32 | menv["PATH"] = os.pathsep.join([str(p) for p in self.binpath] + old_dirs)
33 |
34 | return menv
35 |
36 |
37 | DefaultLibrary = Literal["shared", "static"]
38 |
39 |
40 | def call_meson(argv, use_submodule, *args, **kwargs):
41 | return subprocess.run(query_meson_entrypoint(use_submodule) + argv, *args, **kwargs)
42 |
43 |
44 | def query_meson_entrypoint(use_submodule):
45 | if use_submodule:
46 | return [sys.executable, str(INTERNAL_MESON_ENTRYPOINT)]
47 | return ["meson"]
48 |
49 |
50 | def load_meson_config(machine: MachineSpec, flavor: str, build_dir: Path):
51 | return machine_file.load(query_machine_file_path(machine, flavor, build_dir))
52 |
53 |
54 | def query_machine_file_path(machine: MachineSpec, flavor: str, build_dir: Path) -> Path:
55 | return build_dir / f"frida{flavor}-{machine.identifier}.txt"
56 |
57 |
58 | def detect_default_prefix() -> Path:
59 | if platform.system() == "Windows":
60 | return Path(os.environ["ProgramFiles"]) / "Frida"
61 | return Path("/usr/local")
62 |
63 |
64 | def generate_machine_configs(build_machine: MachineSpec,
65 | host_machine: MachineSpec,
66 | environ: Dict[str, str],
67 | toolchain_prefix: Optional[Path],
68 | build_sdk_prefix: Optional[Path],
69 | host_sdk_prefix: Optional[Path],
70 | call_selected_meson: Callable,
71 | default_library: DefaultLibrary,
72 | outdir: Path) -> Tuple[MachineConfig, MachineConfig]:
73 | is_cross_build = host_machine != build_machine
74 |
75 | if is_cross_build:
76 | build_environ = {build_envvar_to_host(k): v for k, v in environ.items() if k not in TOOLCHAIN_ENVVARS}
77 | else:
78 | build_environ = environ
79 |
80 | build_config = \
81 | generate_machine_config(build_machine,
82 | build_machine,
83 | is_cross_build,
84 | build_environ,
85 | toolchain_prefix,
86 | build_sdk_prefix,
87 | call_selected_meson,
88 | default_library,
89 | outdir)
90 |
91 | if is_cross_build:
92 | host_config = generate_machine_config(host_machine,
93 | build_machine,
94 | is_cross_build,
95 | environ,
96 | toolchain_prefix,
97 | host_sdk_prefix,
98 | call_selected_meson,
99 | default_library,
100 | outdir)
101 | else:
102 | host_config = build_config
103 |
104 | return (build_config, host_config)
105 |
106 |
107 | def generate_machine_config(machine: MachineSpec,
108 | build_machine: MachineSpec,
109 | is_cross_build: bool,
110 | environ: Dict[str, str],
111 | toolchain_prefix: Optional[Path],
112 | sdk_prefix: Optional[Path],
113 | call_selected_meson: Callable,
114 | default_library: DefaultLibrary,
115 | outdir: Path) -> MachineConfig:
116 | config = ConfigParser(dict_type=OrderedDict)
117 | config["constants"] = OrderedDict()
118 | config["binaries"] = OrderedDict()
119 | config["built-in options"] = OrderedDict()
120 | config["properties"] = OrderedDict()
121 | config["host_machine"] = OrderedDict([
122 | ("system", str_to_meson(machine.system)),
123 | ("subsystem", str_to_meson(machine.subsystem)),
124 | ("kernel", str_to_meson(machine.kernel)),
125 | ("cpu_family", str_to_meson(machine.cpu_family)),
126 | ("cpu", str_to_meson(machine.cpu)),
127 | ("endian", str_to_meson(machine.endian)),
128 | ])
129 |
130 | binaries = config["binaries"]
131 | builtin_options = config["built-in options"]
132 | properties = config["properties"]
133 |
134 | outpath = []
135 | outenv = OrderedDict()
136 | outdir.mkdir(parents=True, exist_ok=True)
137 |
138 | if machine.is_apple:
139 | impl = env_apple
140 | elif machine.os == "android":
141 | impl = env_android
142 | else:
143 | impl = env_generic
144 |
145 | impl.init_machine_config(machine,
146 | build_machine,
147 | is_cross_build,
148 | environ,
149 | toolchain_prefix,
150 | sdk_prefix,
151 | call_selected_meson,
152 | config,
153 | outpath,
154 | outenv,
155 | outdir)
156 |
157 | if machine.toolchain_is_msvc:
158 | builtin_options["b_vscrt"] = str_to_meson(machine.config)
159 |
160 | pkg_config = None
161 | vala_compiler = None
162 | if toolchain_prefix is not None:
163 | toolchain_bindir = toolchain_prefix / "bin"
164 | exe_suffix = build_machine.executable_suffix
165 |
166 | ninja_binary = toolchain_bindir / f"ninja{exe_suffix}"
167 | if ninja_binary.exists():
168 | outenv["NINJA"] = str(ninja_binary)
169 |
170 | for (tool_name, filename_suffix) in {("gdbus-codegen", ""),
171 | ("gio-querymodules", exe_suffix),
172 | ("glib-compile-resources", exe_suffix),
173 | ("glib-compile-schemas", exe_suffix),
174 | ("glib-genmarshal", ""),
175 | ("glib-mkenums", ""),
176 | ("flex", exe_suffix),
177 | ("bison", exe_suffix),
178 | ("nasm", exe_suffix)}:
179 | tool_path = toolchain_bindir / (tool_name + filename_suffix)
180 | if tool_path.exists():
181 | if tool_name == "bison":
182 | outenv["BISON_PKGDATADIR"] = str(toolchain_prefix / "share" / "bison")
183 | outenv["M4"] = str(toolchain_bindir / f"m4{exe_suffix}")
184 | else:
185 | tool_path = shutil.which(tool_name)
186 | if tool_path is not None:
187 | binaries[tool_name] = strv_to_meson([str(tool_path)])
188 |
189 | pkg_config_binary = toolchain_bindir / f"pkg-config{exe_suffix}"
190 | if not pkg_config_binary.exists():
191 | pkg_config_binary = shutil.which("pkg-config")
192 | if pkg_config_binary is not None:
193 | pkg_config = [
194 | str(pkg_config_binary),
195 | ]
196 | if default_library == "static":
197 | pkg_config += ["--static"]
198 | if sdk_prefix is not None:
199 | pkg_config += [f"--define-variable=frida_sdk_prefix={sdk_prefix}"]
200 | binaries["pkg-config"] = strv_to_meson(pkg_config)
201 |
202 | vala_compiler = detect_toolchain_vala_compiler(toolchain_prefix, build_machine)
203 |
204 | pkg_config_path = shlex.split(environ.get("PKG_CONFIG_PATH", "").replace("\\", "\\\\"))
205 |
206 | if sdk_prefix is not None:
207 | builtin_options["vala_args"] = strv_to_meson([
208 | "--vapidir=" + str(sdk_prefix / "share" / "vala" / "vapi")
209 | ])
210 |
211 | pkg_config_path += [str(sdk_prefix / machine.libdatadir / "pkgconfig")]
212 |
213 | sdk_bindir = sdk_prefix / "bin" / build_machine.os_dash_arch
214 | if sdk_bindir.exists():
215 | for f in sdk_bindir.iterdir():
216 | binaries[f.stem] = strv_to_meson([str(f)])
217 |
218 | if vala_compiler is not None:
219 | valac, vapidir = vala_compiler
220 | vala = [
221 | str(valac),
222 | f"--vapidir={vapidir}",
223 | ]
224 | if pkg_config is not None:
225 | wrapper = outdir / "frida-pkg-config.py"
226 | wrapper.write_text(make_pkg_config_wrapper(pkg_config, pkg_config_path), encoding="utf-8")
227 | vala += [f"--pkg-config={quote(sys.executable)} {quote(str(wrapper))}"]
228 | binaries["vala"] = strv_to_meson(vala)
229 |
230 | qmake6 = shutil.which("qmake6")
231 | if qmake6 is not None:
232 | binaries["qmake6"] = strv_to_meson([qmake6])
233 |
234 | builtin_options["pkg_config_path"] = strv_to_meson(pkg_config_path)
235 |
236 | needs_wrapper = needs_exe_wrapper(build_machine, machine, environ)
237 | properties["needs_exe_wrapper"] = bool_to_meson(needs_wrapper)
238 | if needs_wrapper:
239 | wrapper = find_exe_wrapper(machine, environ)
240 | if wrapper is not None:
241 | binaries["exe_wrapper"] = strv_to_meson(wrapper)
242 |
243 | machine_file = outdir / f"frida-{machine.identifier}.txt"
244 | with machine_file.open("w", encoding="utf-8") as f:
245 | config.write(f)
246 |
247 | return MachineConfig(machine_file, outpath, outenv)
248 |
249 |
250 | def needs_exe_wrapper(build_machine: MachineSpec,
251 | host_machine: MachineSpec,
252 | environ: Dict[str, str]) -> bool:
253 | return not can_run_host_binaries(build_machine, host_machine, environ)
254 |
255 |
256 | def can_run_host_binaries(build_machine: MachineSpec,
257 | host_machine: MachineSpec,
258 | environ: Dict[str, str]) -> bool:
259 | if host_machine == build_machine:
260 | return True
261 |
262 | build_os = build_machine.os
263 | build_arch = build_machine.arch
264 |
265 | host_os = host_machine.os
266 | host_arch = host_machine.arch
267 |
268 | if host_os == build_os:
269 | if build_os == "windows":
270 | return build_arch == "arm64" or host_arch != "arm64"
271 |
272 | if build_os == "macos":
273 | if build_arch == "arm64" and host_arch == "x86_64":
274 | return True
275 |
276 | if build_os == "linux" and host_machine.config == build_machine.config:
277 | if build_arch == "x86_64" and host_arch == "x86":
278 | return True
279 |
280 | return environ.get("FRIDA_CAN_RUN_HOST_BINARIES", "no") == "yes"
281 |
282 |
283 | def find_exe_wrapper(machine: MachineSpec,
284 | environ: Dict[str, str]) -> Optional[List[str]]:
285 | if machine.arch == "arm64beilp32":
286 | return None
287 |
288 | qemu_sysroot = environ.get("FRIDA_QEMU_SYSROOT")
289 | if qemu_sysroot is None:
290 | return None
291 |
292 | qemu_flavor = "qemu-" + QEMU_ARCHS.get(machine.arch, machine.arch)
293 | qemu_binary = shutil.which(qemu_flavor)
294 | if qemu_binary is None:
295 | raise QEMUNotFoundError(f"unable to find {qemu_flavor}, needed due to FRIDA_QEMU_SYSROOT being set")
296 |
297 | return [qemu_binary, "-L", qemu_sysroot]
298 |
299 |
300 | def make_pkg_config_wrapper(pkg_config: List[str], pkg_config_path: List[str]) -> str:
301 | return "\n".join([
302 | "import os",
303 | "import subprocess",
304 | "import sys",
305 | "",
306 | "args = [",
307 | f" {pprint.pformat(pkg_config, indent=4)[1:-1]},",
308 | " *sys.argv[1:],",
309 | "]",
310 | "env = {",
311 | " **os.environ,",
312 | f" 'PKG_CONFIG_PATH': {repr(os.pathsep.join(pkg_config_path))},",
313 | "}",
314 | f"p = subprocess.run(args, env=env)",
315 | "sys.exit(p.returncode)"
316 | ])
317 |
318 |
319 | def detect_toolchain_vala_compiler(toolchain_prefix: Path,
320 | build_machine: MachineSpec) -> Optional[Tuple[Path, Path]]:
321 | datadir = next((toolchain_prefix / "share").glob("vala-*"), None)
322 | if datadir is None:
323 | return None
324 |
325 | api_version = datadir.name.split("-", maxsplit=1)[1]
326 |
327 | valac = toolchain_prefix / "bin" / f"valac-{api_version}{build_machine.executable_suffix}"
328 | vapidir = datadir / "vapi"
329 | return (valac, vapidir)
330 |
331 |
332 | def build_envvar_to_host(name: str) -> str:
333 | if name.endswith("_FOR_BUILD"):
334 | return name[:-10]
335 | return name
336 |
337 |
338 | def quote(s: str) -> str:
339 | if " " not in s:
340 | return s
341 | return "\"" + s.replace("\"", "\\\"") + "\""
342 |
343 |
344 | class QEMUNotFoundError(Exception):
345 | pass
346 |
347 |
348 | INTERNAL_MESON_ENTRYPOINT = Path(__file__).resolve().parent / "meson" / "meson.py"
349 |
350 | # Based on mesonbuild/envconfig.py and mesonbuild/compilers/compilers.py
351 | TOOLCHAIN_ENVVARS = {
352 | # Compilers
353 | "CC",
354 | "CXX",
355 | "CSC",
356 | "CYTHON",
357 | "DC",
358 | "FC",
359 | "OBJC",
360 | "OBJCXX",
361 | "RUSTC",
362 | "VALAC",
363 | "NASM",
364 |
365 | # Linkers
366 | "CC_LD",
367 | "CXX_LD",
368 | "DC_LD",
369 | "FC_LD",
370 | "OBJC_LD",
371 | "OBJCXX_LD",
372 | "RUSTC_LD",
373 |
374 | # Binutils
375 | "AR",
376 | "AS",
377 | "LD",
378 | "NM",
379 | "OBJCOPY",
380 | "OBJDUMP",
381 | "RANLIB",
382 | "READELF",
383 | "SIZE",
384 | "STRINGS",
385 | "STRIP",
386 | "WINDRES",
387 |
388 | # Other tools
389 | "CMAKE",
390 | "QMAKE",
391 | "PKG_CONFIG",
392 | "PKG_CONFIG_PATH",
393 | "MAKE",
394 | "VAPIGEN",
395 | "LLVM_CONFIG",
396 |
397 | # Deprecated
398 | "D_LD",
399 | "F_LD",
400 | "RUST_LD",
401 | "OBJCPP_LD",
402 |
403 | # Flags
404 | "CFLAGS",
405 | "CXXFLAGS",
406 | "CUFLAGS",
407 | "OBJCFLAGS",
408 | "OBJCXXFLAGS",
409 | "FFLAGS",
410 | "DFLAGS",
411 | "VALAFLAGS",
412 | "RUSTFLAGS",
413 | "CYTHONFLAGS",
414 | "CSFLAGS",
415 | "LDFLAGS",
416 | }
417 |
418 | QEMU_ARCHS = {
419 | "armeabi": "arm",
420 | "armhf": "arm",
421 | "armbe8": "armeb",
422 | "arm64": "aarch64",
423 | "arm64be": "aarch64_be",
424 | }
425 |
--------------------------------------------------------------------------------
/meson_configure.py:
--------------------------------------------------------------------------------
1 | import argparse
2 | import os
3 | from pathlib import Path
4 | import pickle
5 | import platform
6 | import re
7 | import shlex
8 | import shutil
9 | import subprocess
10 | import sys
11 | from typing import Any, Callable, Dict, List, Optional, Set
12 |
13 | RELENG_DIR = Path(__file__).resolve().parent
14 | SCRIPTS_DIR = RELENG_DIR / "meson-scripts"
15 |
16 | sys.path.insert(0, str(RELENG_DIR / "meson"))
17 | import mesonbuild.interpreter
18 | from mesonbuild.coredata import UserArrayOption, UserBooleanOption, \
19 | UserComboOption, UserFeatureOption, UserOption, UserStringOption
20 |
21 | from . import deps, env
22 | from .machine_spec import MachineSpec
23 | from .progress import ProgressCallback, print_progress
24 |
25 |
26 | def main():
27 | default_sourcedir = Path(sys.argv.pop(1))
28 | sourcedir = Path(os.environ.get("MESON_SOURCE_ROOT", default_sourcedir)).resolve()
29 |
30 | workdir = Path(os.getcwd())
31 | if workdir == sourcedir:
32 | default_builddir = sourcedir / "build"
33 | else:
34 | default_builddir = workdir
35 | builddir = Path(os.environ.get("MESON_BUILD_ROOT", default_builddir)).resolve()
36 |
37 | parser = argparse.ArgumentParser(prog="configure",
38 | add_help=False,
39 | formatter_class=argparse.RawTextHelpFormatter)
40 | opts = parser.add_argument_group(title="generic options")
41 | opts.add_argument("-h", "--help",
42 | help="show this help message and exit",
43 | action="help")
44 | opts.add_argument("--prefix",
45 | help="install files in PREFIX",
46 | metavar="PREFIX",
47 | type=parse_prefix)
48 | opts.add_argument("--build",
49 | help="configure for building on BUILD",
50 | metavar="BUILD",
51 | type=MachineSpec.parse)
52 | opts.add_argument("--host",
53 | help="cross-compile to build binaries to run on HOST",
54 | metavar="HOST",
55 | type=MachineSpec.parse)
56 | opts.add_argument("--enable-symbols",
57 | help="build binaries with debug symbols included (default: disabled)",
58 | action="store_true")
59 | opts.add_argument("--enable-shared",
60 | help="enable building shared libraries (default: disabled)",
61 | action="store_true")
62 | opts.add_argument("--with-meson",
63 | help="which Meson implementation to use (default: internal)",
64 | choices=["internal", "system"],
65 | dest="meson",
66 | default="internal")
67 | opts.add_argument(f"--without-prebuilds",
68 | help="do not make use of prebuilt bundles",
69 | metavar="{" + ",".join(query_supported_bundle_types(include_wildcards=True)) + "}",
70 | type=parse_bundle_type_set,
71 | default=set())
72 | opts.add_argument("extra_meson_options",
73 | nargs="*",
74 | help=argparse.SUPPRESS)
75 |
76 | meson_options_file = sourcedir / "meson.options"
77 | if not meson_options_file.exists():
78 | meson_options_file = sourcedir / "meson_options.txt"
79 | if meson_options_file.exists():
80 | meson_group = parser.add_argument_group(title="project-specific options")
81 | meson_opts = register_meson_options(meson_options_file, meson_group)
82 |
83 | options = parser.parse_args()
84 |
85 | if builddir.exists():
86 | if (builddir / "build.ninja").exists():
87 | print(f"Already configured. Wipe .{os.sep}{builddir.relative_to(workdir)} to reconfigure.",
88 | file=sys.stderr)
89 | sys.exit(1)
90 |
91 | default_library = "shared" if options.enable_shared else "static"
92 |
93 | allowed_prebuilds = set(query_supported_bundle_types(include_wildcards=False)) - options.without_prebuilds
94 |
95 | try:
96 | configure(sourcedir,
97 | builddir,
98 | options.prefix,
99 | options.build,
100 | options.host,
101 | os.environ,
102 | "included" if options.enable_symbols else "stripped",
103 | default_library,
104 | allowed_prebuilds,
105 | options.meson,
106 | collect_meson_options(options))
107 | except Exception as e:
108 | print(e, file=sys.stderr)
109 | if isinstance(e, subprocess.CalledProcessError):
110 | for label, data in [("Output", e.output),
111 | ("Stderr", e.stderr)]:
112 | if data:
113 | print(f"{label}:\n\t| " + "\n\t| ".join(data.strip().split("\n")), file=sys.stderr)
114 | sys.exit(1)
115 |
116 |
117 | def configure(sourcedir: Path,
118 | builddir: Path,
119 | prefix: Optional[str] = None,
120 | build_machine: Optional[MachineSpec] = None,
121 | host_machine: Optional[MachineSpec] = None,
122 | environ: Dict[str, str] = os.environ,
123 | debug_symbols: str = "stripped",
124 | default_library: str = "static",
125 | allowed_prebuilds: Set[str] = None,
126 | meson: str = "internal",
127 | extra_meson_options: List[str] = [],
128 | call_meson: Callable = env.call_meson,
129 | on_progress: ProgressCallback = print_progress):
130 | if prefix is None:
131 | prefix = env.detect_default_prefix()
132 |
133 | project_vscrt = detect_project_vscrt(sourcedir)
134 |
135 | if build_machine is None:
136 | build_machine = MachineSpec.make_from_local_system()
137 | build_machine = build_machine.default_missing(recommended_vscrt=project_vscrt)
138 |
139 | if host_machine is None:
140 | host_machine = build_machine
141 | else:
142 | host_machine = host_machine.default_missing(recommended_vscrt=project_vscrt)
143 |
144 | if host_machine.os == "windows":
145 | vs_arch = environ.get("VSCMD_ARG_TGT_ARCH")
146 | if vs_arch == "x86":
147 | host_machine = host_machine.evolve(arch=vs_arch)
148 |
149 | build_machine = build_machine.maybe_adapt_to_host(host_machine)
150 |
151 | if allowed_prebuilds is None:
152 | allowed_prebuilds = set(query_supported_bundle_types(include_wildcards=False))
153 |
154 | call_selected_meson = lambda argv, *args, **kwargs: call_meson(argv,
155 | use_submodule=meson == "internal",
156 | *args,
157 | **kwargs)
158 |
159 | meson_options = [
160 | f"-Dprefix={prefix}",
161 | f"-Ddefault_library={default_library}",
162 | *host_machine.meson_optimization_options,
163 | ]
164 | if debug_symbols == "stripped" and host_machine.toolchain_can_strip:
165 | meson_options += ["-Dstrip=true"]
166 |
167 | deps_dir = deps.detect_cache_dir(sourcedir)
168 |
169 | allow_prebuilt_toolchain = "toolchain" in allowed_prebuilds
170 | if allow_prebuilt_toolchain:
171 | try:
172 | toolchain_prefix, _ = deps.ensure_toolchain(build_machine, deps_dir, on_progress=on_progress)
173 | except deps.BundleNotFoundError as e:
174 | raise_toolchain_not_found(e)
175 | else:
176 | if project_depends_on_vala_compiler(sourcedir):
177 | toolchain_prefix = deps.query_toolchain_prefix(build_machine, deps_dir)
178 | vala_compiler = env.detect_toolchain_vala_compiler(toolchain_prefix, build_machine)
179 | if vala_compiler is None:
180 | build_vala_compiler(toolchain_prefix, deps_dir, call_selected_meson)
181 | else:
182 | toolchain_prefix = None
183 |
184 | is_cross_build = host_machine != build_machine
185 |
186 | build_sdk_prefix = None
187 | required = {"sdk:build"}
188 | if not is_cross_build:
189 | required.add("sdk:host")
190 | if allowed_prebuilds.issuperset(required):
191 | try:
192 | build_sdk_prefix, _ = deps.ensure_sdk(build_machine, deps_dir, on_progress=on_progress)
193 | except deps.BundleNotFoundError as e:
194 | raise_sdk_not_found(e, "build", build_machine)
195 |
196 | host_sdk_prefix = None
197 | if is_cross_build and "sdk:host" in allowed_prebuilds:
198 | try:
199 | host_sdk_prefix, _ = deps.ensure_sdk(host_machine, deps_dir, on_progress=on_progress)
200 | except deps.BundleNotFoundError as e:
201 | raise_sdk_not_found(e, "host", host_machine)
202 |
203 | build_config, host_config = \
204 | env.generate_machine_configs(build_machine,
205 | host_machine,
206 | environ,
207 | toolchain_prefix,
208 | build_sdk_prefix,
209 | host_sdk_prefix,
210 | call_selected_meson,
211 | default_library,
212 | builddir)
213 |
214 | meson_options += [f"--native-file={build_config.machine_file}"]
215 | if host_config is not build_config:
216 | meson_options += [f"--cross-file={host_config.machine_file}"]
217 |
218 | setup_env = host_config.make_merged_environment(environ)
219 | setup_env["FRIDA_ALLOWED_PREBUILDS"] = ",".join(allowed_prebuilds)
220 |
221 | call_selected_meson(["setup"] + meson_options + extra_meson_options + [builddir],
222 | cwd=sourcedir,
223 | env=setup_env,
224 | check=True)
225 |
226 | shutil.copy(SCRIPTS_DIR / "BSDmakefile", builddir)
227 | (builddir / "Makefile").write_text(generate_out_of_tree_makefile(sourcedir), encoding="utf-8")
228 | if platform.system() == "Windows":
229 | (builddir / "make.bat").write_text(generate_out_of_tree_make_bat(sourcedir), encoding="utf-8")
230 |
231 | (builddir / "frida-env.dat").write_bytes(pickle.dumps({
232 | "meson": meson,
233 | "build": build_config,
234 | "host": host_config if host_config is not build_config else None,
235 | "allowed_prebuilds": allowed_prebuilds,
236 | "deps": deps_dir,
237 | }))
238 |
239 |
240 | def parse_prefix(raw_prefix: str) -> Path:
241 | prefix = Path(raw_prefix)
242 | if not prefix.is_absolute():
243 | prefix = Path(os.getcwd()) / prefix
244 | return prefix
245 |
246 |
247 | def query_supported_bundle_types(include_wildcards: bool) -> List[str]:
248 | for e in deps.Bundle:
249 | identifier = e.name.lower()
250 | if e == deps.Bundle.SDK:
251 | if include_wildcards:
252 | yield identifier
253 | yield identifier + ":build"
254 | yield identifier + ":host"
255 | else:
256 | yield identifier
257 |
258 |
259 | def query_supported_bundle_type_values() -> List[deps.Bundle]:
260 | return [e for e in deps.Bundle]
261 |
262 |
263 | def parse_bundle_type_set(raw_array: str) -> List[str]:
264 | supported_types = list(query_supported_bundle_types(include_wildcards=True))
265 | result = set()
266 | for element in raw_array.split(","):
267 | bundle_type = element.strip()
268 | if bundle_type not in supported_types:
269 | pretty_choices = "', '".join(supported_types)
270 | raise argparse.ArgumentTypeError(f"invalid bundle type: '{bundle_type}' (choose from '{pretty_choices}')")
271 | if bundle_type == "sdk":
272 | result.add("sdk:build")
273 | result.add("sdk:host")
274 | else:
275 | result.add(bundle_type)
276 | return result
277 |
278 |
279 | def raise_toolchain_not_found(e: Exception):
280 | raise ToolchainNotFoundError("\n".join([
281 | f"Unable to download toolchain: {e}",
282 | "",
283 | "Specify --without-prebuilds=toolchain to only use tools on your PATH.",
284 | "",
285 | "Another option is to do what Frida's CI does:",
286 | "",
287 | " ./releng/deps.py build --bundle=toolchain",
288 | "",
289 | "This produces a tarball in ./deps which gets picked up if you retry `./configure`.",
290 | "You may also want to make a backup of it for future reuse.",
291 | ]))
292 |
293 |
294 | def raise_sdk_not_found(e: Exception, kind: str, machine: MachineSpec):
295 | raise SDKNotFoundError("\n".join([
296 | f"Unable to download SDK: {e}",
297 | "",
298 | f"Specify --without-prebuilds=sdk:{kind} to build dependencies from source code.",
299 | "",
300 | "Another option is to do what Frida's CI does:",
301 | "",
302 | f" ./releng/deps.py build --bundle=sdk --host={machine.identifier}",
303 | "",
304 | "This produces a tarball in ./deps which gets picked up if you retry `./configure`.",
305 | "You may also want to make a backup of it for future reuse.",
306 | ]))
307 |
308 |
309 | def generate_out_of_tree_makefile(sourcedir: Path) -> str:
310 | m = ((SCRIPTS_DIR / "Makefile").read_text(encoding="utf-8")
311 | .replace("sys.argv[1]", "r'" + str(RELENG_DIR.parent) + "'")
312 | .replace('"$(shell pwd)"', shlex.quote(str(sourcedir)))
313 | .replace("./build", "."))
314 | return re.sub(r"git-submodules:.+?(?=\.PHONY:)", "", m, flags=re.MULTILINE | re.DOTALL)
315 |
316 |
317 | def generate_out_of_tree_make_bat(sourcedir: Path) -> str:
318 | m = ((SCRIPTS_DIR / "make.bat").read_text(encoding="utf-8")
319 | .replace("sys.argv[1]", "r'" + str(RELENG_DIR.parent) + "'")
320 | .replace('"%dp0%"', '"' + str(sourcedir) + '"')
321 | .replace('.\\build', "\"%dp0%\""))
322 | return re.sub(r"if not exist .+?(?=endlocal)", "", m, flags=re.MULTILINE | re.DOTALL)
323 |
324 |
325 | def register_meson_options(meson_option_file: Path, group: argparse._ArgumentGroup):
326 | interpreter = mesonbuild.optinterpreter.OptionInterpreter(subproject="")
327 | interpreter.process(meson_option_file)
328 |
329 | for key, opt in interpreter.options.items():
330 | name = key.name
331 | pretty_name = name.replace("_", "-")
332 |
333 | if isinstance(opt, UserFeatureOption):
334 | if opt.value != "enabled":
335 | action = "enable"
336 | value_to_set = "enabled"
337 | else:
338 | action = "disable"
339 | value_to_set = "disabled"
340 | group.add_argument(f"--{action}-{pretty_name}",
341 | action="append_const",
342 | const=f"-D{name}={value_to_set}",
343 | dest="main_meson_options",
344 | **parse_option_meta(name, action, opt))
345 | if opt.value == "auto":
346 | group.add_argument(f"--disable-{pretty_name}",
347 | action="append_const",
348 | const=f"-D{name}=disabled",
349 | dest="main_meson_options",
350 | **parse_option_meta(name, "disable", opt))
351 | elif isinstance(opt, UserBooleanOption):
352 | if not opt.value:
353 | action = "enable"
354 | value_to_set = "true"
355 | else:
356 | action = "disable"
357 | value_to_set = "false"
358 | group.add_argument(f"--{action}-{pretty_name}",
359 | action="append_const",
360 | const=f"-D{name}={value_to_set}",
361 | dest="main_meson_options",
362 | **parse_option_meta(name, action, opt))
363 | elif isinstance(opt, UserComboOption):
364 | group.add_argument(f"--with-{pretty_name}",
365 | choices=opt.choices,
366 | dest="meson_option:" + name,
367 | **parse_option_meta(name, "with", opt))
368 | elif isinstance(opt, UserArrayOption):
369 | group.add_argument(f"--with-{pretty_name}",
370 | dest="meson_option:" + name,
371 | type=make_array_option_value_parser(opt),
372 | **parse_option_meta(name, "with", opt))
373 | else:
374 | group.add_argument(f"--with-{pretty_name}",
375 | dest="meson_option:" + name,
376 | **parse_option_meta(name, "with", opt))
377 |
378 |
379 | def parse_option_meta(name: str,
380 | action: str,
381 | opt: UserOption[Any]):
382 | params = {}
383 |
384 | if isinstance(opt, UserStringOption):
385 | default_value = repr(opt.value)
386 | metavar = name.upper()
387 | elif isinstance(opt, UserArrayOption):
388 | default_value = ",".join(opt.value)
389 | metavar = "{" + ",".join(opt.choices) + "}"
390 | elif isinstance(opt, UserComboOption):
391 | default_value = opt.value
392 | metavar = "{" + "|".join(opt.choices) + "}"
393 | else:
394 | default_value = str(opt.value).lower()
395 | metavar = name.upper()
396 |
397 | if not (isinstance(opt, UserFeatureOption) \
398 | and opt.value == "auto" \
399 | and action == "disable"):
400 | text = f"{help_text_from_meson(opt.description)} (default: {default_value})"
401 | if action == "disable":
402 | text = "do not " + text
403 | params["help"] = text
404 | params["metavar"] = metavar
405 |
406 | return params
407 |
408 |
409 | def help_text_from_meson(description: str) -> str:
410 | if description:
411 | return description[0].lower() + description[1:]
412 | return description
413 |
414 |
415 | def collect_meson_options(options: argparse.Namespace) -> List[str]:
416 | result = []
417 |
418 | for raw_name, raw_val in vars(options).items():
419 | if raw_val is None:
420 | continue
421 | if raw_name == "main_meson_options":
422 | result += raw_val
423 | if raw_name.startswith("meson_option:"):
424 | name = raw_name[13:]
425 | val = raw_val if isinstance(raw_val, str) else ",".join(raw_val)
426 | result += [f"-D{name}={val}"]
427 |
428 | result += options.extra_meson_options
429 |
430 | return result
431 |
432 |
433 | def make_array_option_value_parser(opt: UserOption[Any]) -> Callable[[str], List[str]]:
434 | return lambda v: parse_array_option_value(v, opt)
435 |
436 |
437 | def parse_array_option_value(v: str, opt: UserArrayOption) -> List[str]:
438 | vals = [v.strip() for v in v.split(",")]
439 |
440 | choices = opt.choices
441 | for v in vals:
442 | if v not in choices:
443 | pretty_choices = "', '".join(choices)
444 | raise argparse.ArgumentTypeError(f"invalid array value: '{v}' (choose from '{pretty_choices}')")
445 |
446 | return vals
447 |
448 |
449 | def detect_project_vscrt(sourcedir: Path) -> Optional[str]:
450 | m = next(re.finditer(r"project\(([^)]+\))", read_meson_build(sourcedir)), None)
451 | if m is not None:
452 | project_args = m.group(1)
453 | m = next(re.finditer("'b_vscrt=([^']+)'", project_args), None)
454 | if m is not None:
455 | return m.group(1)
456 | return None
457 |
458 |
459 | def project_depends_on_vala_compiler(sourcedir: Path) -> bool:
460 | return "'vala'" in read_meson_build(sourcedir)
461 |
462 |
463 | def read_meson_build(sourcedir: Path) -> str:
464 | return (sourcedir / "meson.build").read_text(encoding="utf-8")
465 |
466 |
467 | def build_vala_compiler(toolchain_prefix: Path, deps_dir: Path, call_selected_meson: Callable):
468 | print("Building Vala compiler...", flush=True)
469 |
470 | workdir = deps_dir / "src"
471 | workdir.mkdir(parents=True, exist_ok=True)
472 |
473 | git = lambda *args, **kwargs: subprocess.run(["git", *args],
474 | **kwargs,
475 | capture_output=True,
476 | encoding="utf-8")
477 | vala_checkout = workdir / "vala"
478 | if vala_checkout.exists():
479 | shutil.rmtree(vala_checkout)
480 | vala_pkg = deps.load_dependency_parameters().packages["vala"]
481 | deps.clone_shallow(vala_pkg, vala_checkout, git)
482 |
483 | run_kwargs = {
484 | "stdout": subprocess.PIPE,
485 | "stderr": subprocess.STDOUT,
486 | "encoding": "utf-8",
487 | "check": True,
488 | }
489 | call_selected_meson([
490 | "setup",
491 | f"--prefix={toolchain_prefix}",
492 | "-Doptimization=2",
493 | "build",
494 | ],
495 | cwd=vala_checkout,
496 | **run_kwargs)
497 | call_selected_meson(["install"],
498 | cwd=vala_checkout / "build",
499 | **run_kwargs)
500 |
501 |
502 | class ToolchainNotFoundError(Exception):
503 | pass
504 |
505 |
506 | class SDKNotFoundError(Exception):
507 | pass
508 |
--------------------------------------------------------------------------------
/devkit.py:
--------------------------------------------------------------------------------
1 | from collections import OrderedDict
2 | from enum import Enum
3 | import itertools
4 | import locale
5 | import os
6 | from pathlib import Path
7 | import re
8 | import shlex
9 | import shutil
10 | import subprocess
11 | import tempfile
12 | from typing import Mapping, Sequence, Union
13 |
14 | from . import env
15 | from .machine_spec import MachineSpec
16 |
17 |
18 | REPO_ROOT = Path(__file__).resolve().parent.parent
19 |
20 | DEVKITS = {
21 | "frida-gum": ("frida-gum-1.0", Path("gum") / "gum.h"),
22 | "frida-gumjs": ("frida-gumjs-1.0", Path("gumjs") / "gumscriptbackend.h"),
23 | "frida-core": ("frida-core-1.0", Path("frida-core.h")),
24 | }
25 |
26 | ASSETS_PATH = Path(__file__).parent / "devkit-assets"
27 |
28 | INCLUDE_PATTERN = re.compile(r"#include\s+[<\"](.*?)[>\"]")
29 |
30 |
31 | class DepSymbolScope(str, Enum):
32 | PREFIXED = "prefixed"
33 | ORIGINAL = "original"
34 |
35 |
36 | class CompilerApplication:
37 | def __init__(self,
38 | kit: str,
39 | machine: MachineSpec,
40 | meson_config: Mapping[str, Union[str, Sequence[str]]],
41 | output_dir: Path,
42 | dep_symbol_scope: DepSymbolScope = DepSymbolScope.PREFIXED):
43 | self.kit = kit
44 | package, umbrella_header = DEVKITS[kit]
45 | self.package = package
46 | self.umbrella_header = umbrella_header
47 |
48 | self.machine = machine
49 | self.meson_config = meson_config
50 | self.compiler_argument_syntax = None
51 | self.output_dir = output_dir
52 | self.dep_symbol_scope = dep_symbol_scope
53 | self.library_filename = None
54 |
55 | def run(self):
56 | output_dir = self.output_dir
57 | kit = self.kit
58 |
59 | self.compiler_argument_syntax = detect_compiler_argument_syntax(self.meson_config)
60 | self.library_filename = compute_library_filename(self.kit, self.compiler_argument_syntax)
61 |
62 | output_dir.mkdir(parents=True, exist_ok=True)
63 |
64 | (extra_ldflags, thirdparty_symbol_mappings) = self._generate_library()
65 |
66 | umbrella_header_path = compute_umbrella_header_path(self.machine,
67 | self.package,
68 | self.umbrella_header,
69 | self.meson_config)
70 |
71 | header_file = output_dir / f"{kit}.h"
72 | if not umbrella_header_path.exists():
73 | raise Exception(f"Header not found: {umbrella_header_path}")
74 | header_source = self._generate_header(umbrella_header_path, thirdparty_symbol_mappings)
75 | header_file.write_text(header_source, encoding="utf-8")
76 |
77 | example_file = output_dir / f"{kit}-example.c"
78 | example_source = self._generate_example(example_file, extra_ldflags)
79 | example_file.write_text(example_source, encoding="utf-8")
80 |
81 | extra_files = []
82 |
83 | extra_files += self._generate_gir()
84 |
85 | if self.compiler_argument_syntax == "msvc":
86 | for msvs_asset in itertools.chain(ASSETS_PATH.glob(f"{kit}-*.sln"), ASSETS_PATH.glob(f"{kit}-*.vcxproj*")):
87 | shutil.copy(msvs_asset, output_dir)
88 | extra_files.append(msvs_asset.name)
89 |
90 | return [header_file.name, self.library_filename, example_file.name] + extra_files
91 |
92 | def _generate_gir(self):
93 | if self.kit != "frida-core":
94 | return []
95 |
96 | gir_path = Path(query_pkgconfig_variable("frida_girdir", self.package, self.meson_config)) / "Frida-1.0.gir"
97 | gir_name = "frida-core.gir"
98 |
99 | shutil.copy(gir_path, self.output_dir / gir_name)
100 |
101 | return [gir_name]
102 |
103 | def _generate_header(self, umbrella_header_path, thirdparty_symbol_mappings):
104 | kit = self.kit
105 | package = self.package
106 | machine = self.machine
107 | meson_config = self.meson_config
108 |
109 | c_args = meson_config.get("c_args", [])
110 |
111 | include_cflags = query_pkgconfig_cflags(package, meson_config)
112 |
113 | if self.compiler_argument_syntax == "msvc":
114 | preprocessor = subprocess.run(meson_config["c"] + c_args + ["/nologo", "/E", umbrella_header_path] + include_cflags,
115 | stdout=subprocess.PIPE,
116 | stderr=subprocess.PIPE,
117 | encoding="utf-8")
118 | if preprocessor.returncode != 0:
119 | raise Exception(f"Failed to spawn preprocessor: {preprocessor.stderr}")
120 | lines = preprocessor.stdout.split("\n")
121 |
122 | mapping_prefix = "#line "
123 | header_refs = [line[line.index("\"") + 1:line.rindex("\"")].replace("\\\\", "/") for line in lines if line.startswith(mapping_prefix)]
124 |
125 | header_files = deduplicate(header_refs)
126 | frida_root_slashed = REPO_ROOT.as_posix()
127 | header_files = [Path(h) for h in header_files if bool(re.match("^" + frida_root_slashed, h, re.I))]
128 | else:
129 | header_dependencies = subprocess.run(
130 | meson_config["c"] + c_args + include_cflags + ["-E", "-M", umbrella_header_path],
131 | capture_output=True,
132 | encoding="utf-8",
133 | check=True).stdout
134 | _, raw_header_files = header_dependencies.split(": ", maxsplit=1)
135 | header_files = [Path(item) for item in shlex.split(raw_header_files) if item != "\n"]
136 | header_files = [h for h in header_files if h.is_relative_to(REPO_ROOT)]
137 |
138 | devkit_header_lines = []
139 | umbrella_header = header_files[0]
140 | processed_header_files = {umbrella_header}
141 | ingest_header(umbrella_header, header_files, processed_header_files, devkit_header_lines)
142 | if kit in {"frida-gum", "frida-gumjs"} and machine.os == "none":
143 | gum_dir = umbrella_header_path.parent
144 | if kit == "frida-gumjs":
145 | gum_dir = gum_dir.parent.parent / "gum"
146 | barebone_header = gum_dir / "backend-barebone" / "include" / "gum" / "gumbarebone.h"
147 | ingest_header(barebone_header, header_files, processed_header_files, devkit_header_lines)
148 | if kit == "frida-gumjs" and machine.os != "none":
149 | inspector_server_header = umbrella_header_path.parent / "guminspectorserver.h"
150 | ingest_header(inspector_server_header, header_files, processed_header_files, devkit_header_lines)
151 | if kit == "frida-core" and machine.os == "android":
152 | selinux_header = umbrella_header_path.parent / "frida-selinux.h"
153 | ingest_header(selinux_header, header_files, processed_header_files, devkit_header_lines)
154 | devkit_header = u"".join(devkit_header_lines)
155 |
156 | if package.startswith("frida-gum"):
157 | config = """#ifndef GUM_STATIC
158 | # define GUM_STATIC
159 | #endif
160 |
161 | """
162 | else:
163 | config = ""
164 |
165 | if machine.os == "windows":
166 | deps = ["dnsapi", "iphlpapi", "psapi", "shlwapi", "winmm", "ws2_32"]
167 | if package == "frida-core-1.0":
168 | deps.extend(["advapi32", "crypt32", "gdi32", "kernel32", "ole32", "secur32", "shell32", "user32"])
169 | deps.sort()
170 |
171 | frida_pragmas = f"#pragma comment(lib, \"{compute_library_filename(kit, self.compiler_argument_syntax)}\")"
172 | dep_pragmas = "\n".join([f"#pragma comment(lib, \"{dep}.lib\")" for dep in deps])
173 |
174 | config += f"#ifdef _MSC_VER\n\n{frida_pragmas}\n\n{dep_pragmas}\n\n#endif\n\n"
175 |
176 | if len(thirdparty_symbol_mappings) > 0:
177 | public_mappings = []
178 | for original, renamed in extract_public_thirdparty_symbol_mappings(thirdparty_symbol_mappings):
179 | public_mappings.append((original, renamed))
180 | if f"define {original}" not in devkit_header and f"define {original}" not in devkit_header:
181 | continue
182 | def fixup_macro(match):
183 | prefix = match.group(1)
184 | suffix = re.sub(f"\\b{original}\\b", renamed, match.group(2))
185 | return f"#undef {original}\n{prefix}{original}{suffix}"
186 | devkit_header = re.sub(r"^([ \t]*#[ \t]*define[ \t]*){0}\b((.*\\\n)*.*)$".format(original), fixup_macro, devkit_header, flags=re.MULTILINE)
187 |
188 | config += "#ifndef __FRIDA_SYMBOL_MAPPINGS__\n"
189 | config += "#define __FRIDA_SYMBOL_MAPPINGS__\n\n"
190 | config += "\n".join([f"#define {original} {renamed}" for original, renamed in public_mappings]) + "\n\n"
191 | config += "#endif\n\n"
192 |
193 | return (config + devkit_header).replace("\r\n", "\n")
194 |
195 | def _generate_library(self):
196 | library_flags = call_pkgconfig(["--static", "--libs", self.package], self.meson_config).split(" ")
197 |
198 | library_dirs = infer_library_dirs(library_flags)
199 | library_names = infer_library_names(library_flags)
200 | library_paths, extra_flags = resolve_library_paths(library_names, library_dirs, self.machine)
201 | extra_flags += infer_linker_flags(library_flags)
202 |
203 | if self.compiler_argument_syntax == "msvc":
204 | thirdparty_symbol_mappings = self._do_generate_library_msvc(library_paths)
205 | else:
206 | thirdparty_symbol_mappings = self._do_generate_library_unix(library_paths)
207 |
208 | return (extra_flags, thirdparty_symbol_mappings)
209 |
210 | def _do_generate_library_msvc(self, library_paths):
211 | subprocess.run(self.meson_config["lib"] + ["/nologo", "/out:" + str(self.output_dir / self.library_filename)] + library_paths,
212 | capture_output=True,
213 | encoding="utf-8",
214 | check=True)
215 |
216 | thirdparty_symbol_mappings = []
217 |
218 | return thirdparty_symbol_mappings
219 |
220 | def _do_generate_library_unix(self, library_paths):
221 | output_path = self.output_dir / self.library_filename
222 | output_path.unlink(missing_ok=True)
223 |
224 | v8_libs = [path for path in library_paths if path.name.startswith("libv8")]
225 | if len(v8_libs) > 0:
226 | v8_libdir = v8_libs[0].parent
227 | libcxx_libs = list((v8_libdir / "c++").glob("*.a"))
228 | library_paths.extend(libcxx_libs)
229 |
230 | meson_config = self.meson_config
231 |
232 | ar = meson_config.get("ar", ["ar"])
233 | ar_help = subprocess.run(ar + ["--help"],
234 | stdout=subprocess.PIPE,
235 | stderr=subprocess.STDOUT,
236 | encoding="utf-8").stdout
237 | mri_supported = "-M [ 0:
484 | flag = pending_cflags.pop(0)
485 | if flag == "-include":
486 | pending_cflags.pop(0)
487 | else:
488 | tweaked_cflags.append(flag)
489 |
490 | tweaked_cflags = deduplicate(tweaked_cflags)
491 | existing_cflags = set(tweaked_cflags)
492 |
493 | pending_ldflags = ldflags[:]
494 | seen_libs = set()
495 | seen_flags = set()
496 | while len(pending_ldflags) > 0:
497 | flag = pending_ldflags.pop(0)
498 | if flag in ("-arch", "-isysroot") and flag in existing_cflags:
499 | pending_ldflags.pop(0)
500 | else:
501 | if flag == "-isysroot":
502 | sysroot = pending_ldflags.pop(0)
503 | if "MacOSX" in sysroot:
504 | tweaked_ldflags.append("-isysroot \"$(xcrun --sdk macosx --show-sdk-path)\"")
505 | elif "iPhoneOS" in sysroot:
506 | tweaked_ldflags.append("-isysroot \"$(xcrun --sdk iphoneos --show-sdk-path)\"")
507 | continue
508 | elif flag == "-L":
509 | pending_ldflags.pop(0)
510 | continue
511 | elif flag.startswith("-L"):
512 | continue
513 | elif flag.startswith("-l"):
514 | if flag in seen_libs:
515 | continue
516 | seen_libs.add(flag)
517 | elif flag == "-pthread":
518 | if flag in seen_flags:
519 | continue
520 | seen_flags.add(flag)
521 | tweaked_ldflags.append(flag)
522 |
523 | pending_ldflags = tweaked_ldflags
524 | tweaked_ldflags = []
525 | while len(pending_ldflags) > 0:
526 | flag = pending_ldflags.pop(0)
527 |
528 | raw_flags = []
529 | while flag.startswith("-Wl,"):
530 | raw_flags.append(flag[4:])
531 | if len(pending_ldflags) > 0:
532 | flag = pending_ldflags.pop(0)
533 | else:
534 | flag = None
535 | break
536 | if len(raw_flags) > 0:
537 | merged_flags = "-Wl," + ",".join(raw_flags)
538 | if "--icf=" in merged_flags:
539 | tweaked_ldflags.append("-fuse-ld=gold")
540 | tweaked_ldflags.append(merged_flags)
541 |
542 | if flag is not None and flag not in existing_cflags:
543 | tweaked_ldflags.append(flag)
544 |
545 | return (tweaked_cflags, tweaked_ldflags)
546 |
547 |
548 | def deduplicate(items):
549 | return list(OrderedDict.fromkeys(items))
550 |
--------------------------------------------------------------------------------