├── .reuse └── dep5 ├── LICENSES ├── Apache-2.0.txt ├── BSD-3-Clause.txt └── CC0-1.0.txt ├── aidl-gen ├── .gitignore ├── README.md └── aidl_gen │ ├── __init__.py │ ├── __main__.py │ ├── aidl │ ├── interface.py │ ├── method.py │ └── service.py │ └── main.py ├── aosp-merger ├── README.md ├── _merge_helper.sh ├── _subtree_merge_helper.sh ├── aosp-merger.sh ├── branches_rebase.sh ├── branches_restore.sh ├── branches_save.sh ├── merge-aosp-forks.sh ├── merge-aosp.sh ├── prepare-commit-msg ├── push-merge.sh ├── squash.sh ├── upload-merge.sh └── upload-squash.sh ├── best-caf-kernel └── best-caf-kernel.py ├── build-webview └── build-webview.sh ├── carriersettings-extractor ├── .gitignore ├── LICENSE ├── Makefile ├── README.md ├── carrierId.proto ├── carrier_list.pb ├── carrier_list.proto ├── carrier_settings.proto └── carriersettings_extractor.py ├── config-fs-gen ├── README.md ├── config-fs-gen.py └── requirements.txt ├── default-wallpaper └── update_default_wallpaper.sh ├── device-deps-regenerator ├── .gitignore ├── README.md ├── app.py ├── device2kernel.py ├── devices.py └── requirements.txt ├── emoji-updater └── emoji-updater.py ├── fbpacktool ├── .gitignore ├── fbpack.py ├── fbpacktool.py └── packedstruct.py ├── git-push-merge-review ├── README.md └── git-push-merge-review ├── key-migration ├── export-keys.sh └── migration.sh ├── lineage-priv-template ├── BUILD.bazel ├── README.md ├── check_keys.py ├── keys.mk ├── keys.sh ├── make_key.sh ├── releasekey.pk8 ├── releasekey.x509.pem └── requirements.txt ├── lineage-push ├── README.md └── lineage-push.py ├── motorola ├── info.sh └── star.sh ├── pixel ├── README.md ├── all.sh ├── build-desc-fingerprint.sh ├── device.sh ├── download.sh ├── extract-factory-image.sh ├── firmware.sh ├── get-new-device-vars.py ├── prepare-firmware.sh ├── update-any-var.sh └── update-device-vars.sh ├── reuse_helper ├── .gitignore ├── README.md ├── reuse_helper.py └── utils.py ├── set-default-branch └── set-default-branch └── shipper ├── requirements.txt └── shipper.py /.reuse/dep5: -------------------------------------------------------------------------------- 1 | Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/ 2 | Upstream-Name: scripts 3 | Upstream-Contact: The LineageOS Project 4 | Source: https://github.com/LineageOS/scripts 5 | 6 | Files: .idea/* 7 | Copyright: 2023 The LineageOS Project 8 | License: Apache-2.0 9 | 10 | Files: aidl-gen/* 11 | Copyright: 2021 The LineageOS Project 12 | License: Apache-2.0 13 | 14 | Files: aosp-merger/README.md* 15 | Copyright: 2021-2023 The LineageOS Project 16 | License: Apache-2.0 17 | 18 | Files: carriersettings-extractor/*.py 19 | Copyright: 2020 Google LLC 20 | License: BSD-3-Clause 21 | 22 | Files: carriersettings-extractor/* 23 | Copyright: 2023 The CalyxOS Institute 24 | License: BSD-3-Clause 25 | 26 | Files: config-fs-gen/README.md 27 | Copyright: 2019 The LineageOS Project 28 | License: Apache-2.0 29 | 30 | Files: config-fs-gen/requirements.txt 31 | Copyright: 2020 The LineageOS Project 32 | License: Apache-2.0 33 | 34 | Files: device-deps-regenerator/.gitignore device-deps-regenerator/README.md device-deps-regenerator/requirements.txt 35 | Copyright: 2017-2023 The LineageOS Project 36 | License: Apache-2.0 37 | 38 | Files: fbpacktool/.gitignore 39 | Copyright: 2021 The Android Open Source Project 40 | License: Apache-2.0 41 | 42 | Files: lineage-push/README.md 43 | Copyright: 2017-2018 The LineageOS Project 44 | License: Apache-2.0 45 | 46 | Files: pixel/README.md pixel/get-new-device-vars.py 47 | Copyright: 2022-2023 The CalyxOS Institute 48 | License: Apache-2.0 49 | 50 | Files: reuse_helper/__pycache__ reuse_helper/.idea reuse_helper/.gitignore reuse_helper/README.md 51 | Copyright: 2023 The LineageOS Project 52 | License: Apache-2.0 53 | 54 | Files: shipper/requirements.txt 55 | Copyright: 2024 The LineageOS Project 56 | License: Apache-2.0 57 | -------------------------------------------------------------------------------- /LICENSES/Apache-2.0.txt: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. 10 | 11 | "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. 12 | 13 | "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. 14 | 15 | "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. 16 | 17 | "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. 18 | 19 | "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. 20 | 21 | "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). 22 | 23 | "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. 24 | 25 | "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." 26 | 27 | "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 28 | 29 | 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 30 | 31 | 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 32 | 33 | 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: 34 | 35 | (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and 36 | 37 | (b) You must cause any modified files to carry prominent notices stating that You changed the files; and 38 | 39 | (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and 40 | 41 | (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. 42 | 43 | You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 44 | 45 | 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 46 | 47 | 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 48 | 49 | 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 50 | 51 | 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 52 | 53 | 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. 54 | 55 | END OF TERMS AND CONDITIONS 56 | 57 | APPENDIX: How to apply the Apache License to your work. 58 | 59 | To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. 60 | 61 | Copyright [yyyy] [name of copyright owner] 62 | 63 | Licensed under the Apache License, Version 2.0 (the "License"); 64 | you may not use this file except in compliance with the License. 65 | You may obtain a copy of the License at 66 | 67 | http://www.apache.org/licenses/LICENSE-2.0 68 | 69 | Unless required by applicable law or agreed to in writing, software 70 | distributed under the License is distributed on an "AS IS" BASIS, 71 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 72 | See the License for the specific language governing permissions and 73 | limitations under the License. 74 | -------------------------------------------------------------------------------- /LICENSES/BSD-3-Clause.txt: -------------------------------------------------------------------------------- 1 | Copyright (c) . 2 | 3 | Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 4 | 5 | 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 6 | 7 | 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 8 | 9 | 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. 10 | 11 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 12 | -------------------------------------------------------------------------------- /LICENSES/CC0-1.0.txt: -------------------------------------------------------------------------------- 1 | Creative Commons Legal Code 2 | 3 | CC0 1.0 Universal 4 | 5 | CREATIVE COMMONS CORPORATION IS NOT A LAW FIRM AND DOES NOT PROVIDE 6 | LEGAL SERVICES. DISTRIBUTION OF THIS DOCUMENT DOES NOT CREATE AN 7 | ATTORNEY-CLIENT RELATIONSHIP. CREATIVE COMMONS PROVIDES THIS 8 | INFORMATION ON AN "AS-IS" BASIS. CREATIVE COMMONS MAKES NO WARRANTIES 9 | REGARDING THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS 10 | PROVIDED HEREUNDER, AND DISCLAIMS LIABILITY FOR DAMAGES RESULTING FROM 11 | THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS PROVIDED 12 | HEREUNDER. 13 | 14 | Statement of Purpose 15 | 16 | The laws of most jurisdictions throughout the world automatically confer 17 | exclusive Copyright and Related Rights (defined below) upon the creator 18 | and subsequent owner(s) (each and all, an "owner") of an original work of 19 | authorship and/or a database (each, a "Work"). 20 | 21 | Certain owners wish to permanently relinquish those rights to a Work for 22 | the purpose of contributing to a commons of creative, cultural and 23 | scientific works ("Commons") that the public can reliably and without fear 24 | of later claims of infringement build upon, modify, incorporate in other 25 | works, reuse and redistribute as freely as possible in any form whatsoever 26 | and for any purposes, including without limitation commercial purposes. 27 | These owners may contribute to the Commons to promote the ideal of a free 28 | culture and the further production of creative, cultural and scientific 29 | works, or to gain reputation or greater distribution for their Work in 30 | part through the use and efforts of others. 31 | 32 | For these and/or other purposes and motivations, and without any 33 | expectation of additional consideration or compensation, the person 34 | associating CC0 with a Work (the "Affirmer"), to the extent that he or she 35 | is an owner of Copyright and Related Rights in the Work, voluntarily 36 | elects to apply CC0 to the Work and publicly distribute the Work under its 37 | terms, with knowledge of his or her Copyright and Related Rights in the 38 | Work and the meaning and intended legal effect of CC0 on those rights. 39 | 40 | 1. Copyright and Related Rights. A Work made available under CC0 may be 41 | protected by copyright and related or neighboring rights ("Copyright and 42 | Related Rights"). Copyright and Related Rights include, but are not 43 | limited to, the following: 44 | 45 | i. the right to reproduce, adapt, distribute, perform, display, 46 | communicate, and translate a Work; 47 | ii. moral rights retained by the original author(s) and/or performer(s); 48 | iii. publicity and privacy rights pertaining to a person's image or 49 | likeness depicted in a Work; 50 | iv. rights protecting against unfair competition in regards to a Work, 51 | subject to the limitations in paragraph 4(a), below; 52 | v. rights protecting the extraction, dissemination, use and reuse of data 53 | in a Work; 54 | vi. database rights (such as those arising under Directive 96/9/EC of the 55 | European Parliament and of the Council of 11 March 1996 on the legal 56 | protection of databases, and under any national implementation 57 | thereof, including any amended or successor version of such 58 | directive); and 59 | vii. other similar, equivalent or corresponding rights throughout the 60 | world based on applicable law or treaty, and any national 61 | implementations thereof. 62 | 63 | 2. Waiver. To the greatest extent permitted by, but not in contravention 64 | of, applicable law, Affirmer hereby overtly, fully, permanently, 65 | irrevocably and unconditionally waives, abandons, and surrenders all of 66 | Affirmer's Copyright and Related Rights and associated claims and causes 67 | of action, whether now known or unknown (including existing as well as 68 | future claims and causes of action), in the Work (i) in all territories 69 | worldwide, (ii) for the maximum duration provided by applicable law or 70 | treaty (including future time extensions), (iii) in any current or future 71 | medium and for any number of copies, and (iv) for any purpose whatsoever, 72 | including without limitation commercial, advertising or promotional 73 | purposes (the "Waiver"). Affirmer makes the Waiver for the benefit of each 74 | member of the public at large and to the detriment of Affirmer's heirs and 75 | successors, fully intending that such Waiver shall not be subject to 76 | revocation, rescission, cancellation, termination, or any other legal or 77 | equitable action to disrupt the quiet enjoyment of the Work by the public 78 | as contemplated by Affirmer's express Statement of Purpose. 79 | 80 | 3. Public License Fallback. Should any part of the Waiver for any reason 81 | be judged legally invalid or ineffective under applicable law, then the 82 | Waiver shall be preserved to the maximum extent permitted taking into 83 | account Affirmer's express Statement of Purpose. In addition, to the 84 | extent the Waiver is so judged Affirmer hereby grants to each affected 85 | person a royalty-free, non transferable, non sublicensable, non exclusive, 86 | irrevocable and unconditional license to exercise Affirmer's Copyright and 87 | Related Rights in the Work (i) in all territories worldwide, (ii) for the 88 | maximum duration provided by applicable law or treaty (including future 89 | time extensions), (iii) in any current or future medium and for any number 90 | of copies, and (iv) for any purpose whatsoever, including without 91 | limitation commercial, advertising or promotional purposes (the 92 | "License"). The License shall be deemed effective as of the date CC0 was 93 | applied by Affirmer to the Work. Should any part of the License for any 94 | reason be judged legally invalid or ineffective under applicable law, such 95 | partial invalidity or ineffectiveness shall not invalidate the remainder 96 | of the License, and in such case Affirmer hereby affirms that he or she 97 | will not (i) exercise any of his or her remaining Copyright and Related 98 | Rights in the Work or (ii) assert any associated claims and causes of 99 | action with respect to the Work, in either case contrary to Affirmer's 100 | express Statement of Purpose. 101 | 102 | 4. Limitations and Disclaimers. 103 | 104 | a. No trademark or patent rights held by Affirmer are waived, abandoned, 105 | surrendered, licensed or otherwise affected by this document. 106 | b. Affirmer offers the Work as-is and makes no representations or 107 | warranties of any kind concerning the Work, express, implied, 108 | statutory or otherwise, including without limitation warranties of 109 | title, merchantability, fitness for a particular purpose, non 110 | infringement, or the absence of latent or other defects, accuracy, or 111 | the present or absence of errors, whether or not discoverable, all to 112 | the greatest extent permissible under applicable law. 113 | c. Affirmer disclaims responsibility for clearing rights of other persons 114 | that may apply to the Work or any use thereof, including without 115 | limitation any person's Copyright and Related Rights in the Work. 116 | Further, Affirmer disclaims responsibility for obtaining any necessary 117 | consents, permissions or other rights required for any use of the 118 | Work. 119 | d. Affirmer understands and acknowledges that Creative Commons is not a 120 | party to this document and has no duty or obligation with respect to 121 | this CC0 or use of the Work. 122 | -------------------------------------------------------------------------------- /aidl-gen/.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # Distribution / packaging 7 | .Python 8 | build/ 9 | develop-eggs/ 10 | dist/ 11 | downloads/ 12 | eggs/ 13 | .eggs/ 14 | lib/ 15 | lib64/ 16 | parts/ 17 | sdist/ 18 | var/ 19 | wheels/ 20 | pip-wheel-metadata/ 21 | share/python-wheels/ 22 | *.egg-info/ 23 | .installed.cfg 24 | *.egg 25 | MANIFEST 26 | 27 | # PyInstaller 28 | # Usually these files are written by a python script from a template 29 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 30 | *.manifest 31 | *.spec 32 | 33 | # Installer logs 34 | pip-log.txt 35 | pip-delete-this-directory.txt 36 | 37 | # Unit test / coverage reports 38 | htmlcov/ 39 | .tox/ 40 | .nox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | *.py,cover 48 | .hypothesis/ 49 | .pytest_cache/ 50 | 51 | # Sphinx documentation 52 | docs/_build/ 53 | 54 | # PyBuilder 55 | target/ 56 | 57 | # pyenv 58 | .python-version 59 | 60 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 61 | __pypackages__/ 62 | 63 | # Environments 64 | .env 65 | .venv 66 | env/ 67 | venv/ 68 | ENV/ 69 | env.bak/ 70 | venv.bak/ 71 | roject 72 | 73 | # mkdocs documentation 74 | /site 75 | 76 | # mypy 77 | .mypy_cache/ 78 | .dmypy.json 79 | dmypy.json 80 | 81 | # Pyre type checker 82 | .pyre/ 83 | 84 | # editors 85 | .idea/ 86 | .vscode/ 87 | -------------------------------------------------------------------------------- /aidl-gen/README.md: -------------------------------------------------------------------------------- 1 | # AIDL (service) generator 2 | 3 | ``` 4 | $ python3 -m aidl_gen -h 5 | usage: aidl_gen [-h] -I INCLUDE fqname out_dir 6 | 7 | positional arguments: 8 | fqname Full qualifier of an AIDL interface (e.g. 9 | android.hardware.light.ILights) 10 | out_dir Folders where the service will be written on 11 | 12 | optional arguments: 13 | -h, --help show this help message and exit 14 | -I INCLUDE, --include INCLUDE 15 | Folders to include that contains the AIDL interface 16 | (note: use the folder where Android.bp resides, aka 17 | the top AIDL folder), you can use multiple -I flags to 18 | include multiple locations, but at least one is 19 | required 20 | ``` 21 | -------------------------------------------------------------------------------- /aidl-gen/aidl_gen/__init__.py: -------------------------------------------------------------------------------- 1 | """AIDL generator module.""" 2 | 3 | __version__ = "1.0" 4 | 5 | from pathlib import Path 6 | 7 | module_path = Path(__file__).parent 8 | -------------------------------------------------------------------------------- /aidl-gen/aidl_gen/__main__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | from aidl_gen.main import main 4 | 5 | if __name__ == '__main__': 6 | main() 7 | -------------------------------------------------------------------------------- /aidl-gen/aidl_gen/aidl/interface.py: -------------------------------------------------------------------------------- 1 | from aidl_gen.aidl.method import AIDLMethod 2 | from pathlib import Path 3 | 4 | class AIDLInterface: 5 | def __init__(self, fqname: str, includes: list[Path]): 6 | self.fqname = fqname 7 | self.includes = includes 8 | 9 | self.interface_file = self.get_aidl_file(self.fqname) 10 | 11 | self.methods = [] 12 | self.imports = {} 13 | self.is_interface = False 14 | self.is_parcelable = False 15 | 16 | open_comment = False 17 | inside_structure = False 18 | self.method = "" 19 | 20 | self.content = self.interface_file.read_text() 21 | for line in self.content.splitlines(): 22 | line = line.strip() 23 | 24 | # Skip empty lines 25 | if not line: 26 | continue 27 | 28 | # Deal with comments, we relay on the .aidl 29 | # not having comments in the middle of the code 30 | if open_comment: 31 | if "*/" in line: 32 | open_comment = False 33 | continue 34 | 35 | if line.startswith("/*"): 36 | open_comment = True 37 | continue 38 | 39 | if line.startswith("import"): 40 | # Save the imports, they will be used in the code 41 | # to know from where data types comes from 42 | # and what data type it is 43 | import_name = line.split()[1].removesuffix(';') 44 | self.imports[import_name.rsplit('.', 1)[1]] = AIDLInterface(import_name, includes) 45 | continue 46 | 47 | if line.startswith("interface") or line.startswith("parcelable"): 48 | if inside_structure: 49 | raise AssertionError("Found nested declarations") 50 | inside_structure = True 51 | if line.startswith("interface"): 52 | self.is_interface = True 53 | elif line.startswith("parcelable"): 54 | self.is_parcelable = True 55 | continue 56 | 57 | if inside_structure: 58 | # If we reached end of interface declaration exit 59 | if line[0] == '}': 60 | inside_structure = False 61 | continue 62 | 63 | if self.is_interface: 64 | # Skip non functions 65 | if not '(' in line and not line.startswith("in"): 66 | continue 67 | 68 | # This should be a method (can span multiple lines) 69 | if line.endswith(","): 70 | self.method += line 71 | else: 72 | self.methods.append(AIDLMethod(self.method + line, self.imports)) 73 | self.method = "" 74 | continue 75 | 76 | def get_aidl_file(self, fqname: str): 77 | for dir in self.includes: 78 | file = dir / Path(fqname.replace('.', '/') + '.aidl') 79 | if not file.is_file(): 80 | continue 81 | return file 82 | 83 | raise FileNotFoundError(f"Interface {fqname} not found") 84 | -------------------------------------------------------------------------------- /aidl-gen/aidl_gen/aidl/method.py: -------------------------------------------------------------------------------- 1 | # Source: https://source.android.com/devices/architecture/aidl/aidl-backends#types 2 | AIDL_TO_CPP_TYPE = { 3 | "boolean": "bool", 4 | "byte": "int8_t", 5 | "char": "char16_t", 6 | "int": "int32_t", 7 | "long": "int64_t", 8 | # "float": "float", # No intervention required 9 | # "double": "double", # No intervention required 10 | "String": "::android::String16", 11 | "android.os.Parcelable": "::android::Parcelable", 12 | "IBinder": "::android::IBinder", 13 | # "T[]": "std::vector", # Dealt with in AIDLMethodArgument 14 | # "byte[]": "std::vector", # "byte" match will handle this 15 | # "List": "std::vector", # Dealt with in AIDLMethodArgument 16 | "FileDescriptor": "::android::base::unique_fd", 17 | "ParcelFileDescriptor": "::android::os::ParcelFileDescriptor", 18 | # "interface type (T)": "::android::sp", # Dealt with in AIDLMethodArgument 19 | # "parcelable type (T)": "T", # No intervention required 20 | # "union type (T)": "T", # No intervention required 21 | } 22 | 23 | class AIDLMethodArgument: 24 | def __init__(self, argument: str, imports: dict, aidl_return: bool = False): 25 | self.argument = argument 26 | self.imports = imports 27 | self.aidl_return = aidl_return 28 | self.nullable = False 29 | 30 | args = self.argument.split() 31 | if len(args) > 2: 32 | self.nullable = True 33 | self.arg_type = args[1] 34 | self.name = args[2] 35 | else: 36 | self.arg_type = args[0] 37 | self.name = args[1] 38 | 39 | self.data_type = self.get_type() 40 | self.is_array = self.get_is_array() 41 | 42 | if self.data_type in AIDL_TO_CPP_TYPE: 43 | self.data_type = AIDL_TO_CPP_TYPE[self.data_type] 44 | 45 | if self.is_array: 46 | self.arg_type = f"std::vector<{self.data_type}>" 47 | else: 48 | self.arg_type = self.data_type 49 | 50 | if self.data_type in imports and imports[self.data_type].is_interface: 51 | self.arg_type = f"std::shared_ptr<{self.arg_type}>" 52 | 53 | if self.data_type in imports and not aidl_return: 54 | if imports[self.data_type].is_interface or imports[self.data_type].is_parcelable: 55 | if self.nullable: 56 | self.arg_type = f"std::optional<{self.arg_type}>" 57 | self.arg_type = f"const {self.arg_type}&" 58 | 59 | if self.aidl_return: 60 | self.arg_type += "*" 61 | 62 | def get_type(self): 63 | if self.arg_type.endswith("[]"): 64 | return self.arg_type.removesuffix("[]") 65 | if self.arg_type.startswith("List<"): 66 | return self.arg_type.removeprefix('List<').removesuffix('>') 67 | if self.arg_type.startswith("std::vector<"): 68 | return self.arg_type.removeprefix('std::vector<').removesuffix('>') 69 | return self.arg_type 70 | 71 | def get_is_array(self): 72 | return (self.arg_type.endswith("[]") 73 | or self.arg_type.startswith("List<") 74 | or self.arg_type.startswith("std::vector<")) 75 | 76 | class AIDLMethod: 77 | def __init__(self, method_str: str, imports: dict): 78 | self.method_str = method_str 79 | 80 | self.args = [] 81 | 82 | # We don't care about the method being oneway 83 | self.method_str = self.method_str.removeprefix("oneway ") 84 | 85 | self.return_type, temp = self.method_str.split(maxsplit=1) 86 | temp = temp.removesuffix(';') 87 | self.name, self.args_str = temp.split('(', 1) 88 | self.args_str = '(' + self.args_str 89 | 90 | self.args_str = self.args_str.removeprefix('(').removesuffix(')') 91 | 92 | if self.args_str != "": 93 | for arg in self.args_str.split(','): 94 | arg = arg.strip().removeprefix("in").strip() 95 | self.args.append(AIDLMethodArgument(arg, imports)) 96 | 97 | if self.return_type != "void": 98 | self.args.append(AIDLMethodArgument(f"{self.return_type} _aidl_return", 99 | imports, aidl_return=True)) 100 | -------------------------------------------------------------------------------- /aidl-gen/aidl_gen/aidl/service.py: -------------------------------------------------------------------------------- 1 | from aidl_gen.aidl.interface import AIDLInterface 2 | from datetime import datetime 3 | from pathlib import Path 4 | 5 | ANDROID_BP_TEMPLATE = \ 6 | """\ 7 | // 8 | // Copyright (C) {year} The LineageOS Project 9 | // 10 | // SPDX-License-Identifier: Apache-2.0 11 | // 12 | 13 | cc_binary {{ 14 | name: "{aidl_name}-service", 15 | relative_install_path: "hw", 16 | init_rc: ["{aidl_name}-service.rc"], 17 | vintf_fragments: ["{aidl_name}-service.xml"], 18 | srcs: [ 19 | "{class_name}.cpp", 20 | "service.cpp", 21 | ], 22 | shared_libs: [ 23 | "libbase", 24 | "libbinder_ndk", 25 | "{aidl_name}-ndk_platform", 26 | ], 27 | vendor: true, 28 | }} 29 | """ 30 | 31 | INIT_RC_TEMPLATE = \ 32 | """\ 33 | service vendor.{hal_name}-default /vendor/bin/hw/{aidl_name}-service 34 | class hal 35 | user nobody 36 | group nobody 37 | shutdown critical 38 | """ 39 | 40 | VINTF_FRAGMENT_TEMPLATE = \ 41 | """\ 42 | 43 | 44 | {aidl_name} 45 | {interface_name}/default 46 | 47 | 48 | """ 49 | 50 | MAIN_CPP_TEMPLATE = \ 51 | """\ 52 | /* 53 | * Copyright (C) {year} The LineageOS Project 54 | * 55 | * SPDX-License-Identifier: Apache-2.0 56 | */ 57 | 58 | #include "{class_name}.h" 59 | 60 | namespace aidl {{ 61 | {aidl_namespace_open} 62 | 63 | {methods_definitions} 64 | 65 | {aidl_namespace_close} 66 | }} // namespace aidl 67 | """ 68 | 69 | MAIN_H_TEMPLATE = \ 70 | """\ 71 | /* 72 | * Copyright (C) {year} The LineageOS Project 73 | * 74 | * SPDX-License-Identifier: Apache-2.0 75 | */ 76 | 77 | #pragma once 78 | 79 | #include 80 | 81 | {using_namespaces} 82 | 83 | namespace aidl {{ 84 | {aidl_namespace_open} 85 | 86 | class {class_name} : public Bn{class_name} {{ 87 | public: 88 | {methods_declarations} 89 | }}; 90 | 91 | {aidl_namespace_close} 92 | }} // namespace aidl 93 | """ 94 | 95 | SERVICE_CPP_TEMPLATE = \ 96 | """\ 97 | /* 98 | * Copyright (C) {year} The LineageOS Project 99 | * 100 | * SPDX-License-Identifier: Apache-2.0 101 | */ 102 | 103 | #include "{class_name}.h" 104 | 105 | #include 106 | #include 107 | #include 108 | 109 | using ::aidl::{aidl_namespace}::{class_name}; 110 | 111 | int main() {{ 112 | ABinderProcess_setThreadPoolMaxThreadCount(0); 113 | std::shared_ptr<{class_name}> {class_name_lower} = ndk::SharedRefBase::make<{class_name}>(); 114 | 115 | const std::string instance = std::string() + {class_name}::descriptor + "/default"; 116 | binder_status_t status = AServiceManager_addService({class_name_lower}->asBinder().get(), instance.c_str()); 117 | CHECK(status == STATUS_OK); 118 | 119 | ABinderProcess_joinThreadPool(); 120 | return EXIT_FAILURE; // should not reach 121 | }} 122 | """ 123 | 124 | class AIDLService: 125 | def __init__(self, fqname: str, includes: list[Path]): 126 | self.fqname = fqname 127 | self.includes = includes 128 | 129 | self.aidl_name, self.interface_name = self.fqname.rsplit('.', 1) 130 | self.hal_name = self.aidl_name.rsplit('.', 1)[1] 131 | self.class_name = self.interface_name.removeprefix("I") 132 | self.class_name_lower = self.class_name.lower() 133 | self.aidl_namespace = self.aidl_name.replace('.', "::") 134 | self.aidl_path = self.aidl_name.replace('.', "/") 135 | self.aidl_namespace_open = "\n".join([f"namespace {namespace} {{" 136 | for namespace in self.aidl_name.split('.')]) 137 | self.aidl_namespace_close = "\n".join([f"}} // namespace {namespace}" 138 | for namespace in self.aidl_name.split('.')[::-1]]) 139 | self.year = datetime.now().year 140 | 141 | self.interface = AIDLInterface(self.fqname, self.includes) 142 | 143 | def write_to_folder(self, dir: Path): 144 | dir.mkdir(exist_ok=True) 145 | open(dir / "Android.bp", 'w').write(self.get_android_bp()) 146 | open(dir / f"{self.aidl_name}-service.rc", 'w').write(self.get_init_rc()) 147 | open(dir / f"{self.aidl_name}-service.xml", 'w').write(self.get_vintf_fragment()) 148 | open(dir / f"{self.class_name}.cpp", 'w').write(self.get_main_cpp()) 149 | open(dir / f"{self.class_name}.h", 'w').write(self.get_main_h()) 150 | open(dir / "service.cpp", 'w').write(self.get_service_cpp()) 151 | 152 | def get_android_bp(self): 153 | return ANDROID_BP_TEMPLATE.format(year=self.year, 154 | aidl_name=self.aidl_name, 155 | class_name=self.class_name) 156 | 157 | def get_init_rc(self): 158 | return INIT_RC_TEMPLATE.format(hal_name=self.hal_name, 159 | aidl_name=self.aidl_name) 160 | 161 | def get_vintf_fragment(self): 162 | return VINTF_FRAGMENT_TEMPLATE.format(aidl_name=self.aidl_name, 163 | interface_name=self.interface_name) 164 | 165 | def get_main_cpp(self): 166 | return MAIN_CPP_TEMPLATE.format(year=self.year, 167 | class_name=self.class_name, 168 | aidl_namespace_open=self.aidl_namespace_open, 169 | methods_definitions=self._format_methods_definitions(), 170 | aidl_namespace_close=self.aidl_namespace_close) 171 | 172 | def get_main_h(self): 173 | return MAIN_H_TEMPLATE.format(year=self.year, 174 | aidl_path=self.aidl_path, 175 | class_name=self.class_name, 176 | aidl_namespace_open=self.aidl_namespace_open, 177 | using_namespaces=self._format_using_namespaces(), 178 | methods_declarations=self._format_methods_declarations(), 179 | aidl_namespace_close=self.aidl_namespace_close) 180 | 181 | def get_service_cpp(self): 182 | return SERVICE_CPP_TEMPLATE.format(year=self.year, 183 | class_name=self.class_name, 184 | aidl_namespace=self.aidl_namespace, 185 | class_name_lower=self.class_name_lower) 186 | 187 | def _format_using_namespaces(self): 188 | result = [] 189 | for import_types in self.interface.imports.values(): 190 | result.append(f"using ::aidl::{import_types.fqname.replace('.', '::')};") 191 | 192 | return "\n".join(result) 193 | 194 | def _format_methods_definitions(self): 195 | result = [] 196 | for method in self.interface.methods: 197 | args_formatted = ", ".join([f"{arg.arg_type} /*{arg.name}*/" for arg in method.args]) 198 | result.append(f"ndk::ScopedAStatus {self.class_name}::{method.name}({args_formatted}) {{\n" 199 | f" return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);\n" 200 | f"}}") 201 | 202 | return "\n\n".join(result) 203 | 204 | def _format_methods_declarations(self): 205 | result = [] 206 | for method in self.interface.methods: 207 | args_formatted = ", ".join([f"{arg.arg_type} {arg.name}" for arg in method.args]) 208 | result.append(f" ndk::ScopedAStatus {method.name}({args_formatted}) override;") 209 | 210 | return "\n".join(result) 211 | -------------------------------------------------------------------------------- /aidl-gen/aidl_gen/main.py: -------------------------------------------------------------------------------- 1 | from aidl_gen.aidl.interface import AIDLInterface 2 | from aidl_gen.aidl.service import AIDLService 3 | from argparse import ArgumentParser 4 | from pathlib import Path 5 | 6 | def main(): 7 | parser = ArgumentParser(prog="aidl_gen") 8 | 9 | parser.add_argument("fqname", type=str, 10 | help="Full qualifier of an AIDL interface (e.g. android.hardware.light.ILights)") 11 | parser.add_argument("-I", "--include", type=Path, action='append', required=True, 12 | help="Folders to include that contains the AIDL interface " 13 | "(note: use the folder where Android.bp resides, aka the top AIDL " 14 | "folder), you can use multiple -I flags to include multiple " 15 | "locations, but at least one is required") 16 | parser.add_argument("out_dir", type=Path, 17 | help="Folders where the service will be written on") 18 | 19 | args = parser.parse_args() 20 | 21 | service = AIDLService(args.fqname, args.include) 22 | service.write_to_folder(args.out_dir) 23 | -------------------------------------------------------------------------------- /aosp-merger/README.md: -------------------------------------------------------------------------------- 1 | ## LineageOS Merger Scripts 2 | 3 | ### Variables 4 | 5 | `${TOP}/vendor/lineage/vars/` - This directory stores all variables for repositories that have upstreams that are regularly merged. 6 | 7 | Standard Variables: 8 | 9 | `common` - Stores the following data: 10 | 11 | * `os_branch` - Previous/current LineageOS version 12 | * `device_branch` - Previous/current branch name 13 | * `{prev_,}common_aosp_tag` - Previous/current tracked AOSP tag 14 | * `common_aosp_build_id` - Previous/current build ID string 15 | * `topic` - The name of the topic to be used when pushing merges of newer tags to [Gerrit](https://review.lineageos.org) for review before merging 16 | 17 | Special Variables: 18 | 19 | `$platformName` - e.g. `qcom` - These files store tags specific to relevant non-AOSP repositories where upstream repos are regularly merged, such as CAF/CLO repositories. 20 | 21 | * e.g. `qcom` - Stores names of relevant SoC platforms mapped to the tag we currently track for that platform's repositories 22 | 23 | `devices` - This file stores the matrix of devices we currently merge upstream repositories monthly for 24 | 25 | `pixels` - This file stores the matrix of Pixel devices we currently merge upstream AOSP repositories monthly for, which correlates directly to the current list of Google supported Pixel devices 26 | 27 | `kernel_repos` - This file stores a matrix of kernel-paths relevant to AOSP devices we currently merge upstream AOSP repositories monthly for 28 | 29 | See `../pixel/README.md` for more 30 | 31 | ### Scripts and usage 32 | 33 | See `../pixel/README.md` for scripts to update Pixels 34 | 35 | ### Workflows 36 | 37 | To merge a new AOSP tag platform-wide: 38 | 39 | 1. Wait for AOSP tags to show in https://android.googlesource.com/platform/manifest/ 40 | 41 | 2. Wait for Pixel kernel tags to show in a post in https://groups.google.com/g/android-building 42 | 43 | 3. Edit `${TOP}/.repo/manifests/default.xml` with the new main tag 44 | 45 | 4. Edit `${TOP}/.repo/manifests/snippets/lineage.xml` and replace any existing repo with non-pinned custom tag set with the new kernel tag from latest gen Pixel - e.g. `android-13.0.0_r0.55` 46 | 47 | 5. Edit `${TOP}/.repo/manifests/snippets/pixel.xml` and replace all entries with the relevant Pixel kernel tags. commit the above changes 48 | 49 | 6. Upload `LineageOS/android` change generated to [Gerrit](https://review.lineageos.org) 50 | 51 | 7. Execute `repo sync` on the working tree 52 | 53 | 8. Edit `${TOP}/vendor/lineage/vars/common` moving the currently tracked tag from `common_aosp_tag` to `prev_common_aosp_tag`, then updating `common_aosp_tag` to reflect the newly tracked tag, and then do the same for `prev_common_aosp_build_id` and `common_aosp_build_id` - lastly, update the `topic` variable to reflect the current month 54 | 55 | 9. Run `aosp-merger/aosp-merger.sh`, this will take some time, and reads all the variables you set up above while merging the new tags to all relevant tracked repos. This will likely create conflicts on some forked repository, and will ask you to resolve them. It will then issue a final check to ask you if you'd like to upload the merge to gerrit, then after approval uploads the merge to Gerrit for review. 56 | 57 | 10. Once testing of the merge is completed, a global committer or higher can run `aosp-merger/aosp-merger.sh submit-platform` to push the merge of the new tag to the HEAD of all relevant forked repositories 58 | 59 | 11. Directly after `submit-platform` is run, a Project Director must merge the `LineageOS/android` change on Gerrit uploaded as part of step 6 above 60 | 61 | To merge a new AOSP tag to all currently Google supported Pixel devices and their relevant dependency repositories: 62 | 63 | 1. Wait for factory images to show up in https://developers.google.com/android/images 64 | 65 | 2. To discern build ID, the relevant `$deviceName` variable: 66 | 67 | e.g. `pixel/update-any-var.sh build_id TQ2A.230305.008.C1 sunfish bramble redfin barbet cheetah` 68 | 69 | 3. Git commit the `vendor/lineage/vars` variable updates and upload to Gerrit 70 | 71 | 4. Run `aosp-merger/aosp-merger.sh devices` to merge newly entered AOSP tags to all supported device's device-tree and dependencies, and upload the merges to Gerrit 72 | 73 | 5. Run `aosp-merger/aosp-merger.sh kernels` to merge newly entered AOSP kernel tags to all supported device's kernel-tree and dependencies, and upload the merges to Gerrit 74 | 75 | 6. Download pixel factory images, extract files and firmware - e.g. `source ${TOP}/vendor/lineage/vars/devices && for device in devices; do pixel/device.sh $device done` 76 | 77 | 7. `cd` to each relevant device's `vendor/$oem/$deviceName` repository, as well as `vendor/firmware` and `git add`/`git commit` the updated files 78 | 79 | TODO: Automate this in the future 80 | 81 | 8. Update the build description/fingerprint for all supported Pixels by running `pixel/build-desc-fingerprint.sh` - after this you need to manually `cd` to each of the supported Pixel trees and upload the build description/fingerprint commits to Gerrit for review 82 | 83 | TODO: Automate this in the future 84 | 85 | 9. When testing is done, to push the device-specific tag merges to relevant repository HEAD's, run `aosp-merger/aosp-merger.sh submit-devices` - Please note this can only be done by Pixel device maintainers OR Global Committers and above 86 | 87 | NOTE: If you have your vendor repositories tracked somewhere you sync, you will also need to `cd` to those and `git push` them at this time by hand 88 | 89 | 10. Following the above, submit the kernel tag updates as well: `aosp-merger/aosp-merger.sh submit-kernels` - Please note this can only be done by Pixel device maintainers OR Global Committers and above 90 | 91 | To merge a new CAF/CLO tag to all forked repositories: 92 | 93 | 1. Fetch the latest tags for supported SoCs and current version of QSSI from https://wiki.codelinaro.org/en/clo/la/release 94 | 95 | 2. Edit `vendor/lineage/vars/qcom`, `git commit` and upload the change to Gerrit 96 | 97 | 3. Run the merger script on whatever platforms you have updated the tags to create merges and upload them to Gerrit - e.g. To merge on all support platforms you'd run `for platform in qssi msm8953 sdm660 sdm845 msmnile kona lahaina waipio-vendor waipio-video; do aosp-merger/aosp-merger.sh clo $platform done` 98 | 99 | 4. When testing is done, a global committer or higher can run the merger script to push the merges to HEADs - e.g. To push aforementioned merges on all support platforms you'd run `for platform in qssi msm8953 sdm660 sdm845 msmnile kona lahaina waipio-vendor waipio-video; do aosp-merger/aosp-merger.sh submit-clo $platform done` 100 | -------------------------------------------------------------------------------- /aosp-merger/_merge_helper.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # SPDX-FileCopyrightText: 2017, 2020-2022 The LineageOS Project 4 | # SPDX-FileCopyrightText: 2021-2023 The Calyx Institute 5 | # 6 | # SPDX-License-Identifier: Apache-2.0 7 | # 8 | 9 | usage() { 10 | echo "Usage ${0} -p -o -c -n -b " 11 | } 12 | 13 | # Verify argument count 14 | if [ "${#}" -eq 0 ]; then 15 | usage 16 | exit 1 17 | fi 18 | 19 | while [ "${#}" -gt 0 ]; do 20 | case "${1}" in 21 | -p | --project-path ) 22 | PROJECTPATH="${2}"; shift 23 | ;; 24 | -o | --operation ) 25 | OPERATION="${2}"; shift 26 | ;; 27 | -c | --old-tag ) 28 | OLDTAG="${2}"; shift 29 | ;; 30 | -n | --new-tag ) 31 | NEWTAG="${2}"; shift 32 | ;; 33 | -b | --branch-suffix ) 34 | BRANCHSUFFIX="${2}"; shift 35 | ;; 36 | * ) 37 | usage 38 | exit 1 39 | ;; 40 | esac 41 | shift 42 | done 43 | 44 | if [ -z "${OPERATION}" ]; then 45 | OPERATION="merge" 46 | elif [ "${OPERATION}" != "merge" -a "${OPERATION}" != "rebase" ]; then 47 | usage 48 | exit 1 49 | fi 50 | 51 | ### CONSTANTS ### 52 | readonly script_path="$(cd "$(dirname "$0")";pwd -P)" 53 | readonly vars_path="${script_path}/../../../vendor/lineage/vars" 54 | 55 | source "${vars_path}/common" 56 | 57 | readonly hook="${script_path}/prepare-commit-msg" 58 | 59 | TOP="${script_path}/../../.." 60 | 61 | # Source build environment (needed for aospremote) 62 | source "${TOP}/build/envsetup.sh" 63 | 64 | BRANCH="${os_branch}" 65 | STAGINGBRANCH="staging/${BRANCHSUFFIX}" 66 | 67 | cd "${TOP}/${PROJECTPATH}" 68 | # Ditch any existing staging branches 69 | if git show-ref --verify --quiet refs/heads/"${STAGINGBRANCH}"; then 70 | repo abandon "${STAGINGBRANCH}" . 71 | fi 72 | repo start "${STAGINGBRANCH}" . 73 | if [ -f ".gitupstream" ]; then 74 | git fetch -q --force --tags "$(cat .gitupstream)" "${NEWTAG}" 75 | else 76 | aospremote | grep -v "Remote 'aosp' created" 77 | git fetch -q --force --tags aosp "${NEWTAG}" 78 | fi 79 | 80 | [[ ! -e .git/hooks/prepare-commit-msg ]] && cp "${hook}" .git/hooks/ 81 | chmod +x .git/hooks/prepare-commit-msg 82 | 83 | if [ ! -z "${OLDTAG}" ]; then 84 | # Was there any change upstream? Skip if not. 85 | if [[ -z "$(git diff --no-ext-diff ${OLDTAG} ${NEWTAG})" ]]; then 86 | echo -e "nochange\t\t${PROJECTPATH}" | tee -a "${MERGEDREPOS}" 87 | repo abandon "${STAGINGBRANCH}" . 88 | exit 0 89 | fi 90 | 91 | # Determine whether OLDTAG is an ancestor of NEWTAG 92 | # ie is history consistent. 93 | git merge-base --is-ancestor "${OLDTAG}" "${NEWTAG}" 94 | # If no, print a warning message. 95 | if [[ "$?" -eq 1 ]]; then 96 | echo -n "#### Warning: project ${PROJECTPATH} old tag ${OLDTAG} is not an ancestor " 97 | echo "of new tag ${NEWTAG} ####" 98 | fi 99 | fi 100 | 101 | if [[ "${OPERATION}" == "merge" ]]; then 102 | echo -e "\n#### Merging ${NEWTAG} into ${PROJECTPATH} ####" 103 | git merge --no-commit --log "${NEWTAG}" && git commit --no-edit 104 | 105 | # Check if we've actually changed anything after the merge 106 | # If we haven't, just abandon the branch 107 | if [[ -z "$(git diff --no-ext-diff HEAD m/${os_branch})" && -z "$(git status --porcelain)" ]]; then 108 | echo -e "nochange\t\t${PROJECTPATH}" | tee -a "${MERGEDREPOS}" 109 | repo abandon "${STAGINGBRANCH}" . 110 | exit 0 111 | fi 112 | elif [[ "${OPERATION}" == "rebase" ]]; then 113 | echo -e "\n#### Rebasing ${PROJECTPATH} onto ${NEWTAG} ####" 114 | git rebase --onto "${NEWTAG}" "${OLDTAG}" 115 | fi 116 | 117 | CONFLICT="" 118 | if [[ -n "$(git status --porcelain)" ]]; then 119 | CONFLICT="conflict-" 120 | fi 121 | echo -e "${CONFLICT}${OPERATION}\t\t${PROJECTPATH}" | tee -a "${MERGEDREPOS}" 122 | -------------------------------------------------------------------------------- /aosp-merger/_subtree_merge_helper.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # SPDX-FileCopyrightText: 2017, 2020-2022 The LineageOS Project 4 | # SPDX-FileCopyrightText: 2021-2023 The Calyx Institute 5 | # 6 | # SPDX-License-Identifier: Apache-2.0 7 | # 8 | 9 | usage() { 10 | echo "Usage ${0} -p -o -c -n -b " 11 | } 12 | 13 | # Verify argument count 14 | if [ "${#}" -eq 0 ]; then 15 | usage 16 | exit 1 17 | fi 18 | 19 | while [ "${#}" -gt 0 ]; do 20 | case "${1}" in 21 | -p | --project-path ) 22 | PROJECTPATH="${2}"; shift 23 | ;; 24 | -o | --operation ) 25 | OPERATION="${2}"; shift 26 | ;; 27 | -c | --old-tag ) 28 | OLDTAG="${2}"; shift 29 | ;; 30 | -n | --new-tag ) 31 | NEWTAG="${2}"; shift 32 | ;; 33 | -b | --branch-suffix ) 34 | BRANCHSUFFIX="${2}"; shift 35 | ;; 36 | * ) 37 | usage 38 | exit 1 39 | ;; 40 | esac 41 | shift 42 | done 43 | 44 | if [ -z "${OPERATION}" ]; then 45 | OPERATION="merge" 46 | elif [ "${OPERATION}" != "merge" -a "${OPERATION}" != "rebase" ]; then 47 | usage 48 | exit 1 49 | fi 50 | 51 | ### CONSTANTS ### 52 | readonly script_path="$(cd "$(dirname "$0")";pwd -P)" 53 | readonly vars_path="${script_path}/../../../vendor/lineage/vars" 54 | 55 | source "${vars_path}/common" 56 | 57 | readonly hook="${script_path}/prepare-commit-msg" 58 | 59 | TOP="${script_path}/../../.." 60 | 61 | # Source build environment (needed for aospremote) 62 | source "${TOP}/build/envsetup.sh" 63 | 64 | BRANCH="${os_branch}" 65 | STAGINGBRANCH="staging/${BRANCHSUFFIX}" 66 | 67 | cd "${TOP}/${PROJECTPATH}" 68 | # Ditch any existing staging branches 69 | if git show-ref --verify --quiet refs/heads/"${STAGINGBRANCH}"; then 70 | repo abandon "${STAGINGBRANCH}" . 71 | fi 72 | repo start "${STAGINGBRANCH}" . 73 | if [ -f ".gitupstream" ]; then 74 | git fetch -q --force --tags "$(cat .gitupstream)" "${NEWTAG}" 75 | else 76 | aospremote | grep -v "Remote 'aosp' created" 77 | git fetch -q --force --tags aosp "${NEWTAG}" 78 | fi 79 | 80 | [[ ! -e .git/hooks/prepare-commit-msg ]] && cp "${hook}" .git/hooks/ 81 | chmod +x .git/hooks/prepare-commit-msg 82 | 83 | if [ ! -z "${OLDTAG}" ]; then 84 | # Was there any change upstream? Skip if not. 85 | if [[ -z "$(git diff --no-ext-diff ${OLDTAG} ${NEWTAG})" ]]; then 86 | echo -e "nochange\t\t${PROJECTPATH}" | tee -a "${MERGEDREPOS}" 87 | repo abandon "${STAGINGBRANCH}" . 88 | exit 0 89 | fi 90 | 91 | # Determine whether OLDTAG is an ancestor of NEWTAG 92 | # ie is history consistent. 93 | git merge-base --is-ancestor "${OLDTAG}" "${NEWTAG}" 94 | # If no, print a warning message. 95 | if [[ "$?" -eq 1 ]]; then 96 | echo -n "#### Warning: project ${PROJECTPATH} old tag ${OLDTAG} is not an ancestor " 97 | echo "of new tag ${NEWTAG} ####" 98 | fi 99 | fi 100 | 101 | CONFLICT="" 102 | 103 | echo -e "\n#### Merging ${NEWTAG} into ${PROJECTPATH} ####" 104 | git merge --no-commit --log "${NEWTAG}" 105 | 106 | if [[ -z "$(git diff --no-ext-diff HEAD)" ]]; then 107 | echo "#### Skipping empty merge ####" 108 | git reset --hard 109 | else 110 | git commit --no-edit 111 | if [[ -n "$(git status --porcelain)" ]]; then 112 | CONFLICT="conflict-" 113 | fi 114 | read -p "Waiting for conflict resolution before continuing. Press enter when done." 115 | 116 | echo $(git log -1 --pretty=%b | tail -2) > .git/CHANGE_ID 117 | fi 118 | 119 | for subtree in `find -mindepth 2 -type f -name .gitupstream | cut -d / -f 2- | sed s#/.gitupstream##`; do 120 | gitupstream="${subtree}/.gitupstream" 121 | git fetch -q --force --tags "$(cat ${gitupstream})" "${NEWTAG}" 122 | git merge -X subtree="$subtree" --no-commit --log "${NEWTAG}" 123 | 124 | if [[ -z "$(git diff --no-ext-diff HEAD)" ]]; then 125 | echo "#### Skipping empty merge on ${subtree} ####" 126 | git reset --hard 127 | continue 128 | fi 129 | 130 | git commit --no-edit 131 | if [[ -n "$(git status --porcelain)" && -z "${CONFLICT}" ]]; then 132 | CONFLICT="conflict-" 133 | fi 134 | read -p "Waiting for conflict resolution before continuing. Press enter when done." 135 | 136 | if [[ ! -f ".git/CHANGE_ID" ]]; then 137 | echo $(git log -1 --pretty=%b | tail -2) > .git/CHANGE_ID 138 | fi 139 | done 140 | 141 | # Check if we've actually changed anything after the merge 142 | # If we haven't, just abandon the branch 143 | if [[ -z "$(git diff --no-ext-diff HEAD m/${os_branch})" && -z "$(git status --porcelain)" ]]; then 144 | echo -e "nochange\t\t${PROJECTPATH}" | tee -a "${MERGEDREPOS}" 145 | repo abandon "${STAGINGBRANCH}" . 146 | exit 0 147 | fi 148 | 149 | echo -e "${CONFLICT}${OPERATION}\t\t${PROJECTPATH}" | tee -a "${MERGEDREPOS}" 150 | -------------------------------------------------------------------------------- /aosp-merger/aosp-merger.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # SPDX-FileCopyrightText: 2022 The Calyx Institute 4 | # SPDX-FileCopyrightText: 2022 The LineageOS Project 5 | # 6 | # SPDX-License-Identifier: Apache-2.0 7 | # 8 | # merge-aosp: 9 | # 10 | # Merge the latest AOSP release based on variables 11 | # 12 | # 13 | ############################################################################## 14 | 15 | 16 | ### SET ### 17 | 18 | # use bash strict mode 19 | set -euo pipefail 20 | 21 | 22 | ### TRAPS ### 23 | 24 | # trap signals for clean exit 25 | trap 'exit $?' EXIT 26 | trap 'error_m interrupted!' SIGINT 27 | 28 | ### CONSTANTS ### 29 | readonly script_path="$(cd "$(dirname "$0")";pwd -P)" 30 | readonly vars_path="${script_path}/../../../vendor/lineage/vars" 31 | 32 | source "${vars_path}/common" 33 | source "${vars_path}/pixels" 34 | source "${vars_path}/kernel_repos" 35 | source "${vars_path}/qcom" 36 | 37 | TOP="${script_path}/../../.." 38 | 39 | # make sure we have consistent and readable commit messages 40 | export LC_MESSAGES=C 41 | export LC_TIME=C 42 | 43 | ## HELP MESSAGE (USAGE INFO) 44 | # TODO 45 | 46 | ### FUNCTIONS ### 47 | 48 | # Reverse merge AOSP to AOSP (for testing only) 49 | merge_aosp() { 50 | "${script_path}"/merge-aosp.sh --old-tag "${common_aosp_tag}" --new-tag "${prev_common_aosp_tag}" --branch-suffix "${common_aosp_tag}_merge-${prev_common_aosp_tag}" 51 | } 52 | 53 | # Merge AOSP to forks 54 | merge_aosp_forks() { 55 | "${script_path}"/merge-aosp-forks.sh --old-tag "${prev_common_aosp_tag}" --new-tag "${common_aosp_tag}" --branch-suffix "${os_branch}_merge-${common_aosp_tag}" 56 | } 57 | 58 | post_aosp_merge() { 59 | if [ "${merge_method}" = "merge" ]; then 60 | return 61 | else 62 | "${script_path}"/squash.sh --branch-suffix "${os_branch}_merge-${common_aosp_tag}" 63 | fi 64 | } 65 | 66 | upload_aosp_merge_to_review() { 67 | if [ "${merge_method}" = "merge" ]; then 68 | "${script_path}"/upload-merge.sh --branch-suffix "${os_branch}_merge-${common_aosp_tag}" 69 | else 70 | "${script_path}"/upload-squash.sh --branch-suffix "${os_branch}_merge-${common_aosp_tag}" 71 | fi 72 | } 73 | 74 | push_aosp_merge() { 75 | "${script_path}"/push-merge.sh --branch-suffix "${os_branch}_merge-${common_aosp_tag}" 76 | } 77 | 78 | # Merge AOSP to pixel device forks 79 | merge_pixel_device() { 80 | for repo in ${device_repos[@]}; do 81 | "${script_path}"/_subtree_merge_helper.sh --project-path "${repo}" --old-tag "${prev_aosp_tag}" --new-tag "${aosp_tag}" --branch-suffix "${device_branch}_merge-${aosp_tag}" 82 | done 83 | } 84 | 85 | post_pixel_device_merge() { 86 | if [ "${merge_method}" = "merge" ]; then 87 | return 88 | else 89 | "${script_path}"/squash.sh --new-tag "${aosp_tag}" --branch-suffix "${device_branch}_merge-${aosp_tag}" --pixel 90 | fi 91 | } 92 | 93 | upload_pixel_device_to_review() { 94 | if [ "${merge_method}" = "merge" ]; then 95 | "${script_path}"/upload-merge.sh --branch-suffix "${device_branch}_merge-${aosp_tag}" --pixel 96 | else 97 | "${script_path}"/upload-squash.sh --branch-suffix "${device_branch}_merge-${aosp_tag}" --pixel 98 | fi 99 | } 100 | 101 | push_device_merge() { 102 | "${script_path}"/push-merge.sh --branch-suffix "${device_branch}_merge-${aosp_tag}" --pixel 103 | } 104 | 105 | # Merge AOSP to pixel kernel forks 106 | merge_pixel_kernel() { 107 | "${script_path}"/_subtree_merge_helper.sh --project-path "${device_kernel_repo}" --old-tag "${prev_kernel_tag}" --new-tag "${kernel_tag}" --branch-suffix "${device_branch}_merge-${kernel_tag}" 108 | } 109 | 110 | post_pixel_kernel_merge() { 111 | if [ "${merge_method}" = "merge" ]; then 112 | return 113 | else 114 | "${script_path}"/squash.sh --new-tag "${kernel_tag}" --branch-suffix "${device_branch}_merge-${kernel_tag}" --pixel 115 | fi 116 | } 117 | 118 | upload_pixel_kernel_to_review() { 119 | if [ "${merge_method}" = "merge" ]; then 120 | "${script_path}"/upload-merge.sh --branch-suffix "${device_branch}_merge-${kernel_tag}" --pixel 121 | else 122 | "${script_path}"/upload-squash.sh --branch-suffix "${device_branch}_merge-${kernel_tag}" --pixel 123 | fi 124 | } 125 | 126 | push_kernel_merge() { 127 | "${script_path}"/push-merge.sh --branch-suffix "${device_branch}_merge-${kernel_tag}" --pixel 128 | } 129 | 130 | # Merge CLO to forks 131 | merge_clo() { 132 | "${script_path}"/_merge_helper.sh --project-path "${repo}" --new-tag "${1}" --branch-suffix "${os_branch}_merge-${1}" 133 | } 134 | 135 | squash_clo_merge() { 136 | "${script_path}"/squash.sh --new-tag "${1}" --branch-suffix "${os_branch}_merge-${1}" 137 | } 138 | 139 | upload_squash_clo_to_review() { 140 | if [ "${merge_method}" = "merge" ]; then 141 | "${script_path}"/upload-merge.sh --new-tag "${1}" --branch-suffix "${os_branch}_merge-${1}" 142 | else 143 | "${script_path}"/upload-squash.sh --new-tag "${1}" --branch-suffix "${os_branch}_merge-${1}" 144 | fi 145 | } 146 | 147 | push_clo_merge() { 148 | "${script_path}"/push-merge.sh --branch-suffix "${os_branch}_merge-${1}" 149 | } 150 | 151 | # error message 152 | # ARG1: error message for STDERR 153 | # ARG2: error status 154 | error_m() { 155 | echo "ERROR: ${1:-'failed.'}" 1>&2 156 | return "${2:-1}" 157 | } 158 | 159 | # print help message. 160 | help_message() { 161 | echo "${help_message:-'No help available.'}" 162 | } 163 | 164 | main() { 165 | if [ "$#" -eq 0 ]; then 166 | export MERGEDREPOS="${TOP}/merged_repos.txt" 167 | # Remove any existing list of merged repos file 168 | rm -f "${MERGEDREPOS}" 169 | 170 | merge_aosp_forks 171 | # Run this to print list of conflicting repos 172 | cat "${MERGEDREPOS}" | grep -w conflict-merge || true 173 | read -p "Waiting for conflict resolution. Press enter when done." 174 | post_aosp_merge 175 | upload_aosp_merge_to_review 176 | echo "Don't forget to update the manifest!" 177 | 178 | unset MERGEDREPOS 179 | elif [ "${1}" = "aosp" ]; then 180 | export MERGEDREPOS="${TOP}/merged_repos_aosp.txt" 181 | # Remove any existing list of merged repos file 182 | rm -f "${MERGEDREPOS}" 183 | 184 | merge_aosp 185 | 186 | unset MERGEDREPOS 187 | elif [ "${1}" = "devices" ]; then 188 | for device in ${devices[@]}; do 189 | ( 190 | source "${vars_path}/${device}" 191 | export MERGEDREPOS="${TOP}/merged_repos_${device}.txt" 192 | # Remove any existing list of merged repos file 193 | rm -f "${MERGEDREPOS}" 194 | 195 | merge_pixel_device 196 | # Run this to print list of conflicting repos 197 | cat "${MERGEDREPOS}" | grep -w conflict-merge || true 198 | read -p "Waiting for conflict resolution. Press enter when done." 199 | post_pixel_device_merge 200 | upload_pixel_device_to_review 201 | 202 | unset MERGEDREPOS 203 | ) 204 | done 205 | elif [ "${1}" = "kernels" ]; then 206 | for kernel in ${kernel_repos[@]}; do 207 | ( 208 | readonly kernel_short="$(echo ${kernel} | cut -d / -f 3)" 209 | source "${vars_path}/${kernel_short}" 210 | 211 | readonly device_kernel_repo="${kernel}" 212 | 213 | export MERGEDREPOS="${TOP}/merged_repos_${kernel_short}_kernel.txt" 214 | # Remove any existing list of merged repos file 215 | rm -f "${MERGEDREPOS}" 216 | 217 | merge_pixel_kernel 218 | # Run this to print list of conflicting repos 219 | cat "${MERGEDREPOS}" | grep -w conflict-merge || true 220 | read -p "Waiting for conflict resolution. Press enter when done." 221 | post_pixel_kernel_merge 222 | upload_pixel_kernel_to_review 223 | 224 | unset MERGEDREPOS 225 | ) 226 | done 227 | elif [ "${1}" = "clo" ]; then 228 | qcom_tag="${qcom_group_revision[${2}]}" 229 | 230 | export MERGEDREPOS="${TOP}/merged_repos_clo_${2}.txt" 231 | # Remove any existing list of merged repos file 232 | rm -f "${MERGEDREPOS}" 233 | 234 | for repo in $(repo list -p -g ${2}); do 235 | ( 236 | merge_clo "${qcom_tag}" 237 | ) 238 | done 239 | 240 | # Run this to print list of conflicting repos 241 | cat "${MERGEDREPOS}" | grep -w conflict-merge || true 242 | read -p "Waiting for conflict resolution. Press enter when done." 243 | squash_clo_merge "${qcom_tag}" 244 | upload_squash_clo_to_review "${qcom_tag}" 245 | 246 | unset MERGEDREPOS 247 | elif [ "${1}" = "submit-platform" ]; then 248 | export MERGEDREPOS="${TOP}/merged_repos.txt" 249 | 250 | push_aosp_merge 251 | 252 | unset MERGEDREPOS 253 | elif [ "${1}" = "submit-devices" ]; then 254 | for device in ${devices[@]}; do 255 | ( 256 | source "${vars_path}/${device}" 257 | export MERGEDREPOS="${TOP}/merged_repos_${device}.txt" 258 | 259 | push_device_merge 260 | 261 | unset MERGEDREPOS 262 | ) 263 | done 264 | elif [ "${1}" = "submit-kernels" ]; then 265 | for kernel in ${kernel_repos[@]}; do 266 | ( 267 | readonly kernel_short="$(echo ${kernel} | cut -d / -f 3)" 268 | source "${vars_path}/${kernel_short}" 269 | export MERGEDREPOS="${TOP}/merged_repos_${kernel_short}_kernel.txt" 270 | 271 | push_kernel_merge 272 | 273 | unset MERGEDREPOS 274 | ) 275 | done 276 | elif [ "${1}" = "submit-clo" ]; then 277 | qcom_tag="${qcom_group_revision[${2}]}" 278 | 279 | export MERGEDREPOS="${TOP}/merged_repos_clo_${2}.txt" 280 | 281 | push_clo_merge "${qcom_tag}" 282 | 283 | unset MERGEDREPOS 284 | fi 285 | } 286 | 287 | ### RUN PROGRAM ### 288 | 289 | main "${@}" 290 | 291 | 292 | ## 293 | -------------------------------------------------------------------------------- /aosp-merger/branches_rebase.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # SPDX-FileCopyrightText: 2017 The LineageOS Project 4 | # 5 | # SPDX-License-Identifier: Apache-2.0 6 | # 7 | 8 | ##### 9 | # Rebase your local working branches onto a new "upstream" branch. 10 | # Local branch list is defined in branches.list 11 | # (and can be created with branches_save.sh) 12 | # If the upstream branch doesn't exist (eg perhaps in lineage-sdk), 13 | # simply switch the working branch instead. 14 | 15 | if [ ! -e "build/envsetup.sh" ]; then 16 | echo "Must run from root of repo" 17 | exit 1 18 | fi 19 | 20 | if [ "$#" -ne 1 ]; then 21 | echo "Usage ${0} " 22 | exit 1 23 | fi 24 | REBASEONTO="${1}" 25 | 26 | TOP="${PWD}" 27 | BRANCHLIST="${TOP}/branches.list" 28 | 29 | cat "${BRANCHLIST}" | while read l; do 30 | set ${l} 31 | PROJECTPATH="${1}" 32 | BRANCH="${2}" 33 | NEWBRANCH="${2}-rebase" 34 | cd "${TOP}/${PROJECTPATH}" 35 | 36 | # Sanity check 37 | [[ -n "$(git status --porcelain)" ]]; then 38 | echo -n "!!!! Project ${PROJECTPATH} has uncommitted files, " 39 | echo "not switching to branch ${BRANCH} (skipping) !!!!" 40 | continue 41 | fi 42 | 43 | # Check the $REBASEONTO branch actually exists 44 | git show-ref "refs/heads/${REBASEONTO}" >/dev/null 45 | if [ "$?" -ne 0 ]; then 46 | # Nope 47 | echo -n "#### Project ${PROJECTPATH} branch ${REBASEONTO} does not exist, " 48 | echo "switching to ${BRANCH} instead ####" 49 | git checkout "${BRANCH}" 50 | else 51 | echo "#### Creating ${PROJECTPATH} branch ${NEWBRANCH} from ${BRANCH} ####" 52 | repo abandon "${NEWBRANCH}" . 53 | repo start "${NEWBRANCH}" . 54 | git reset --hard "${BRANCH}" 55 | echo -n "#### Project ${PROJECTPATH} Rebasing branch ${NEWBRANCH} " 56 | echo "on top of ${REBASEONTO} ####" 57 | git rebase --onto "${REBASEONTO}" 58 | fi 59 | done 60 | -------------------------------------------------------------------------------- /aosp-merger/branches_restore.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # SPDX-FileCopyrightText: 2017 The LineageOS Project 4 | # 5 | # SPDX-License-Identifier: Apache-2.0 6 | # 7 | 8 | if [ ! -e "build/envsetup.sh" ]; then 9 | echo "Must run from root of repo" 10 | exit 1 11 | fi 12 | 13 | TOP="${PWD}" 14 | BRANCHLIST="${TOP}/branches.list" 15 | 16 | cat "${BRANCHLIST}" | while read l; do 17 | set ${l} 18 | PROJECTPATH="${1}" 19 | BRANCH="${2}" 20 | cd "${TOP}/${PROJECTPATH}" 21 | 22 | # Check if we're on this branch already 23 | CURBRANCH=$(git status -b --porcelain | head -1 | awk '{print $2}' | sed 's/\.\.\..*//') 24 | if [ "${CURBRANCH}" == "${BRANCH}" ]; then 25 | echo "#### Project ${PROJECTPATH} is already on branch ${BRANCH} ####" 26 | continue 27 | fi 28 | 29 | # Sanity check 30 | if [[ -n "$(git status --porcelain)" ]]; then 31 | echo -n "#!#! Project ${PROJECTPATH} has uncommitted files, " 32 | echo "not switching to branch ${BRANCH} #!#!" 33 | exit 1 34 | fi 35 | 36 | echo "#### Project ${PROJECTPATH} Switching to branch ${BRANCH} ####" 37 | git checkout "${BRANCH}" 38 | done 39 | -------------------------------------------------------------------------------- /aosp-merger/branches_save.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # SPDX-FileCopyrightText: 2017 The LineageOS Project 4 | # 5 | # SPDX-License-Identifier: Apache-2.0 6 | # 7 | 8 | if [ ! -e "build/envsetup.sh" ]; then 9 | echo "Must run from root of repo" 10 | exit 1 11 | fi 12 | 13 | TOP="${PWD}" 14 | BRANCHLIST="${TOP}/branches.list" 15 | 16 | # Example repo status output: 17 | #project build/make/ branch x 18 | #project device/huawei/angler/ branch x 19 | 20 | repo status | grep '^project ' | while read l; do 21 | set ${l} 22 | PROJECTPATH=$(echo ${2} | sed 's|/$||') 23 | BRANCH="${4}" 24 | echo "${PROJECTPATH} ${BRANCH}" 25 | done | sort > "${BRANCHLIST}" 26 | -------------------------------------------------------------------------------- /aosp-merger/merge-aosp-forks.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # SPDX-FileCopyrightText: 2017, 2020-2022 The LineageOS Project 4 | # SPDX-FileCopyrightText: 2021-2023 The Calyx Institute 5 | # 6 | # SPDX-License-Identifier: Apache-2.0 7 | # 8 | 9 | usage() { 10 | echo "Usage ${0} -o -c -n -b " 11 | } 12 | 13 | # Verify argument count 14 | if [ "${#}" -eq 0 ]; then 15 | usage 16 | exit 1 17 | fi 18 | 19 | while [ "${#}" -gt 0 ]; do 20 | case "${1}" in 21 | -o | --operation ) 22 | OPERATION="${2}"; shift 23 | ;; 24 | -c | --old-tag ) 25 | OLDTAG="${2}"; shift 26 | ;; 27 | -n | --new-tag ) 28 | NEWTAG="${2}"; shift 29 | ;; 30 | -b | --branch-suffix ) 31 | BRANCHSUFFIX="${2}"; shift 32 | ;; 33 | * ) 34 | usage 35 | exit 1 36 | ;; 37 | esac 38 | shift 39 | done 40 | 41 | if [ -z "${OPERATION}" ]; then 42 | OPERATION="merge" 43 | elif [ "${OPERATION}" != "merge" -a "${OPERATION}" != "rebase" ]; then 44 | usage 45 | exit 1 46 | fi 47 | 48 | ### CONSTANTS ### 49 | readonly script_path="$(cd "$(dirname "$0")";pwd -P)" 50 | readonly vars_path="${script_path}/../../../vendor/lineage/vars" 51 | 52 | source "${vars_path}/common" 53 | 54 | TOP="${script_path}/../../.." 55 | MANIFEST="${TOP}/.repo/manifests/default.xml" 56 | BRANCH="${os_branch}" 57 | STAGINGBRANCH="staging/${BRANCHSUFFIX}" 58 | 59 | # Build list of forked repos 60 | PROJECTPATHS=$(grep "name=\"LineageOS/" "${MANIFEST}" | sed -n 's/.*path="\([^"]\+\)".*/\1/p') 61 | 62 | echo -e "\n#### Old tag = ${OLDTAG} Branch = ${BRANCH} Staging branch = ${STAGINGBRANCH} ####" 63 | 64 | # Make sure manifest and forked repos are in a consistent state 65 | echo -e "\n#### Verifying there are no uncommitted changes on forked AOSP projects ####" 66 | for PROJECTPATH in ${PROJECTPATHS} .repo/manifests; do 67 | cd "${TOP}/${PROJECTPATH}" 68 | if [[ -n "$(git status --porcelain)" ]]; then 69 | echo "Path ${PROJECTPATH} has uncommitted changes. Please fix." 70 | exit 1 71 | fi 72 | done 73 | echo "#### Verification complete - no uncommitted changes found ####" 74 | 75 | # Iterate over each forked project 76 | for PROJECTPATH in ${PROJECTPATHS}; do 77 | "${script_path}"/_merge_helper.sh --project-path "${PROJECTPATH}" --operation "${OPERATION}" --old-tag "${OLDTAG}" --new-tag "${NEWTAG}" --branch-suffix "${BRANCHSUFFIX}" 78 | done 79 | -------------------------------------------------------------------------------- /aosp-merger/merge-aosp.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # SPDX-FileCopyrightText: 2017, 2020-2022 The LineageOS Project 4 | # SPDX-FileCopyrightText: 2021-2023 The Calyx Institute 5 | # 6 | # SPDX-License-Identifier: Apache-2.0 7 | # 8 | 9 | usage() { 10 | echo "Usage ${0} -o -c -n -b " 11 | echo "Example ${0} merge android-12.0.0_r26 android-12.0.0_r18" 12 | } 13 | 14 | # Verify argument count 15 | if [ "${#}" -eq 0 ]; then 16 | usage 17 | exit 1 18 | fi 19 | 20 | while [ "${#}" -gt 0 ]; do 21 | case "${1}" in 22 | -o | --operation ) 23 | OPERATION="${2}"; shift 24 | ;; 25 | -c | --old-tag ) 26 | OLDTAG="${2}"; shift 27 | ;; 28 | -n | --new-tag ) 29 | NEWTAG="${2}"; shift 30 | ;; 31 | -b | --branch-suffix ) 32 | BRANCHSUFFIX="${2}"; shift 33 | ;; 34 | * ) 35 | usage 36 | exit 1 37 | ;; 38 | esac 39 | shift 40 | done 41 | 42 | if [ -z "${OPERATION}" ]; then 43 | OPERATION="merge" 44 | elif [ "${OPERATION}" != "merge" -a "${OPERATION}" != "rebase" ]; then 45 | usage 46 | exit 1 47 | fi 48 | 49 | ### CONSTANTS ### 50 | readonly script_path="$(cd "$(dirname "$0")";pwd -P)" 51 | readonly vars_path="${script_path}/../../../vendor/lineage/vars" 52 | 53 | source "${vars_path}/common" 54 | 55 | TOP="${script_path}/../../.." 56 | MANIFEST="${TOP}/.repo/manifests/default.xml" 57 | STAGINGBRANCH="staging/${BRANCHSUFFIX}" 58 | 59 | # Build list of AOSP repos 60 | PROJECTPATHS=$(grep -v "remote=\"gitlab" "${MANIFEST}" | grep -v "clone-depth=\"1" | sed -n 's/.*path="\([^"]\+\)".*/\1/p') 61 | 62 | echo -e "\n#### Old tag = ${OLDTAG} New tag = ${NEWTAG} Staging branch = ${STAGINGBRANCH} ####" 63 | 64 | # Make sure manifest and forked repos are in a consistent state 65 | echo -e "\n#### Verifying there are no uncommitted changes on AOSP projects ####" 66 | for PROJECTPATH in ${PROJECTPATHS} .repo/manifests; do 67 | cd "${TOP}/${PROJECTPATH}" 68 | if [[ -n "$(git status --porcelain)" ]]; then 69 | echo "Path ${PROJECTPATH} has uncommitted changes. Please fix." 70 | exit 1 71 | fi 72 | done 73 | echo "#### Verification complete - no uncommitted changes found ####" 74 | 75 | # Iterate over each forked project 76 | for PROJECTPATH in ${PROJECTPATHS}; do 77 | "${script_path}"/_merge_helper.sh --project-path "${PROJECTPATH}" --operation "${OPERATION}" --old-tag "${OLDTAG}" --new-tag "${NEWTAG}" --branch-suffix "${BRANCHSUFFIX}" 78 | done 79 | -------------------------------------------------------------------------------- /aosp-merger/prepare-commit-msg: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # SPDX-FileCopyrightText: 2023 The LineageOS Project 4 | # SPDX-License-Identifier: Apache-2.0 5 | 6 | case "$2,$3" in 7 | merge,) 8 | if grep -q "# It looks like you may be committing a merge." "$1"; then 9 | sed -i -e '/# ------------------------ >8 ------------------------/,+3d' "$1" 10 | /usr/bin/perl -i.bak -ne 's/^#// if /^# Conflicts/ .. /^#\R/; print' "$1" 11 | sed -i '/ It looks like you may be committing a merge./i # ------------------------ >8 ------------------------' "$1" 12 | fi 13 | ;; 14 | 15 | *) 16 | ;; 17 | esac 18 | -------------------------------------------------------------------------------- /aosp-merger/push-merge.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # SPDX-FileCopyrightText: 2017, 2020-2022 The LineageOS Project 4 | # SPDX-FileCopyrightText: 2021-2023 The Calyx Institute 5 | # 6 | # SPDX-License-Identifier: Apache-2.0 7 | # 8 | 9 | usage() { 10 | echo "Usage ${0} -b --pixel" 11 | } 12 | 13 | # Verify argument count 14 | if [ "${#}" -eq 0 ]; then 15 | usage 16 | exit 1 17 | fi 18 | 19 | PIXEL=false 20 | 21 | while [ "${#}" -gt 0 ]; do 22 | case "${1}" in 23 | -b | --branch-suffix ) 24 | BRANCHSUFFIX="${2}"; shift 25 | ;; 26 | -p | --pixel ) 27 | PIXEL=true; shift 28 | ;; 29 | * ) 30 | usage 31 | exit 1 32 | ;; 33 | esac 34 | shift 35 | done 36 | 37 | ### CONSTANTS ### 38 | readonly script_path="$(cd "$(dirname "$0")";pwd -P)" 39 | readonly vars_path="${script_path}/../../../vendor/lineage/vars" 40 | 41 | source "${vars_path}/common" 42 | 43 | TOP="${script_path}/../../.." 44 | STAGINGBRANCH="staging/${BRANCHSUFFIX}" 45 | 46 | # Source build environment (needed for lineageremote) 47 | source "${TOP}/build/envsetup.sh" 48 | 49 | # List of merged repos 50 | PROJECTPATHS=$(cat ${MERGEDREPOS} | grep -w merge | awk '{printf "%s\n", $2}') 51 | 52 | echo -e "\n#### Staging branch = ${STAGINGBRANCH} ####" 53 | 54 | # Make sure manifest and forked repos are in a consistent state 55 | echo -e "\n#### Verifying there are no uncommitted changes on forked AOSP projects ####" 56 | for PROJECTPATH in ${PROJECTPATHS} .repo/manifests; do 57 | cd "${TOP}/${PROJECTPATH}" 58 | if [[ -n "$(git status --porcelain)" ]]; then 59 | echo "Path ${PROJECTPATH} has uncommitted changes. Please fix." 60 | exit 1 61 | fi 62 | done 63 | echo "#### Verification complete - no uncommitted changes found ####" 64 | 65 | echo -e "\n#### $(basename ${MERGEDREPOS}) ####" 66 | read -p "Pushing ${STAGINGBRANCH}. Press enter to confirm." 67 | 68 | # Iterate over each forked project 69 | for PROJECTPATH in ${PROJECTPATHS}; do 70 | cd "${TOP}/${PROJECTPATH}" 71 | 72 | if [ "${PIXEL}" = true ]; then 73 | BRANCH="${device_branch}" 74 | else 75 | BRANCH=$(git config --get branch.${STAGINGBRANCH}.merge | sed 's|refs/heads/||') 76 | if [ -z "${BRANCH}" ]; then 77 | BRANCH="${os_branch}" 78 | fi 79 | fi 80 | 81 | echo -e "\n#### Submitting ${PROJECTPATH} merge ####" 82 | git checkout "${STAGINGBRANCH}" 83 | lineageremote | grep -v "Remote 'lineage' created" 84 | git push lineage HEAD:refs/heads/"${BRANCH}" 85 | done 86 | -------------------------------------------------------------------------------- /aosp-merger/squash.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # SPDX-FileCopyrightText: 2017, 2020-2022 The LineageOS Project 4 | # SPDX-FileCopyrightText: 2021-2023 The Calyx Institute 5 | # 6 | # SPDX-License-Identifier: Apache-2.0 7 | # 8 | 9 | usage() { 10 | echo "Usage ${0} -n -b --pixel" 11 | } 12 | 13 | # Verify argument count 14 | if [ "${#}" -eq 0 ]; then 15 | usage 16 | exit 1 17 | fi 18 | 19 | PIXEL=false 20 | 21 | while [ "${#}" -gt 0 ]; do 22 | case "${1}" in 23 | -n | --new-tag ) 24 | NEWTAG="${2}"; shift 25 | ;; 26 | -b | --branch-suffix ) 27 | BRANCHSUFFIX="${2}"; shift 28 | ;; 29 | -p | --pixel ) 30 | PIXEL=true; shift 31 | ;; 32 | * ) 33 | usage 34 | exit 1 35 | ;; 36 | esac 37 | shift 38 | done 39 | 40 | ### CONSTANTS ### 41 | readonly script_path="$(cd "$(dirname "$0")";pwd -P)" 42 | readonly vars_path="${script_path}/../../../vendor/lineage/vars" 43 | 44 | source "${vars_path}/common" 45 | 46 | TOP="${script_path}/../../.." 47 | STAGINGBRANCH="staging/${BRANCHSUFFIX}" 48 | SQUASHBRANCH="squash/${BRANCHSUFFIX}" 49 | BRANCH=$(git config --get branch.${STAGINGBRANCH}.merge | sed 's|refs/heads/||') 50 | if [ -z "${BRANCH}" ]; then 51 | BRANCH="${os_branch}" 52 | fi 53 | 54 | # List of merged repos 55 | PROJECTPATHS=$(cat ${MERGEDREPOS} | grep -w merge | awk '{printf "%s\n", $2}') 56 | 57 | echo -e "\n#### Branch = ${BRANCH} Squash branch = ${SQUASHBRANCH} ####" 58 | 59 | # Make sure manifest and forked repos are in a consistent state 60 | echo -e "\n#### Verifying there are no uncommitted changes on forked AOSP projects ####" 61 | for PROJECTPATH in ${PROJECTPATHS} .repo/manifests; do 62 | cd "${TOP}/${PROJECTPATH}" 63 | if [[ -n "$(git status --porcelain)" ]]; then 64 | echo "Path ${PROJECTPATH} has uncommitted changes. Please fix." 65 | exit 1 66 | fi 67 | done 68 | echo "#### Verification complete - no uncommitted changes found ####" 69 | 70 | # Iterate over each forked project 71 | for PROJECTPATH in ${PROJECTPATHS}; do 72 | cd "${TOP}/${PROJECTPATH}" 73 | echo -e "\n#### Squashing ${PROJECTPATH} ####" 74 | repo abandon "${SQUASHBRANCH}" . 75 | git checkout -b "${SQUASHBRANCH}" "${STAGINGBRANCH}" 76 | git branch --set-upstream-to=m/"${BRANCH}" 77 | git reset --soft m/"${BRANCH}" 78 | git add . 79 | if [ "${PIXEL}" = true ]; then 80 | git commit -m "[SQUASH] Merge tag '${NEWTAG}' into ${STAGINGBRANCH}" -m "$(cat .git/CHANGE_ID)" 81 | rm .git/CHANGE_ID 82 | else 83 | git commit -m "[SQUASH] $(git log ${STAGINGBRANCH} -1 --pretty=%s)" -m "$(git log ${STAGINGBRANCH} -1 --pretty=%b)" 84 | fi 85 | done 86 | -------------------------------------------------------------------------------- /aosp-merger/upload-merge.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # SPDX-FileCopyrightText: 2017, 2020-2022 The LineageOS Project 4 | # SPDX-FileCopyrightText: 2021-2023 The Calyx Institute 5 | # 6 | # SPDX-License-Identifier: Apache-2.0 7 | # 8 | 9 | usage() { 10 | echo "Usage ${0} -n -b --pixel" 11 | } 12 | 13 | # Verify argument count 14 | if [ "${#}" -eq 0 ]; then 15 | usage 16 | exit 1 17 | fi 18 | 19 | PIXEL=false 20 | 21 | while [ "${#}" -gt 0 ]; do 22 | case "${1}" in 23 | -n | --new-tag ) 24 | NEWTAG="${2}"; shift 25 | ;; 26 | -b | --branch-suffix ) 27 | BRANCHSUFFIX="${2}"; shift 28 | ;; 29 | -p | --pixel ) 30 | PIXEL=true; shift 31 | ;; 32 | * ) 33 | usage 34 | exit 1 35 | ;; 36 | esac 37 | shift 38 | done 39 | 40 | ### CONSTANTS ### 41 | readonly script_path="$(cd "$(dirname "$0")";pwd -P)" 42 | readonly vars_path="${script_path}/../../../vendor/lineage/vars" 43 | 44 | source "${vars_path}/common" 45 | 46 | TOP="${script_path}/../../.." 47 | STAGINGBRANCH="staging/${BRANCHSUFFIX}" 48 | if [ ! -z "${NEWTAG}" ]; then 49 | TOPIC="${NEWTAG}" 50 | elif [ "${PIXEL}" = true ]; then 51 | TOPIC="${topic}_pixel" 52 | else 53 | TOPIC="${topic}" 54 | fi 55 | 56 | # Source build environment (needed for lineageremote) 57 | source "${TOP}/build/envsetup.sh" 58 | 59 | # List of merged repos 60 | PROJECTPATHS=$(cat ${MERGEDREPOS} | grep -w merge | awk '{printf "%s\n", $2}') 61 | 62 | echo -e "\n#### Staging branch = ${STAGINGBRANCH} ####" 63 | 64 | # Make sure manifest and forked repos are in a consistent state 65 | echo -e "\n#### Verifying there are no uncommitted changes on forked AOSP projects ####" 66 | for PROJECTPATH in ${PROJECTPATHS} .repo/manifests; do 67 | cd "${TOP}/${PROJECTPATH}" 68 | if [[ -n "$(git status --porcelain)" ]]; then 69 | echo "Path ${PROJECTPATH} has uncommitted changes. Please fix." 70 | exit 1 71 | fi 72 | done 73 | echo "#### Verification complete - no uncommitted changes found ####" 74 | 75 | # Iterate over each forked project 76 | for PROJECTPATH in ${PROJECTPATHS}; do 77 | cd "${TOP}/${PROJECTPATH}" 78 | 79 | if [ "${PIXEL}" = true ]; then 80 | BRANCH="${device_branch}" 81 | else 82 | BRANCH=$(git config --get branch.${STAGINGBRANCH}.merge | sed 's|refs/heads/||') 83 | if [ -z "${BRANCH}" ]; then 84 | BRANCH="${os_branch}" 85 | fi 86 | fi 87 | 88 | echo -e "\n#### Pushing ${PROJECTPATH} merge to review ####" 89 | git checkout "${STAGINGBRANCH}" 90 | lineageremote | grep -v "Remote 'lineage' created" 91 | FIRST_SHA="$(git show -s --pretty=%P HEAD | cut -d ' ' -f 1)" 92 | SECOND_SHA="$(git show -s --pretty=%P HEAD | cut -d ' ' -f 2)" 93 | git push lineage HEAD:refs/for/"${BRANCH}"%base="${FIRST_SHA}",base="${SECOND_SHA}",topic="${TOPIC}" 94 | done 95 | -------------------------------------------------------------------------------- /aosp-merger/upload-squash.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # SPDX-FileCopyrightText: 2017, 2020-2022 The LineageOS Project 4 | # SPDX-FileCopyrightText: 2021-2023 The Calyx Institute 5 | # 6 | # SPDX-License-Identifier: Apache-2.0 7 | # 8 | 9 | usage() { 10 | echo "Usage ${0} -n -b --pixel" 11 | } 12 | 13 | # Verify argument count 14 | if [ "${#}" -eq 0 ]; then 15 | usage 16 | exit 1 17 | fi 18 | 19 | PIXEL=false 20 | 21 | while [ "${#}" -gt 0 ]; do 22 | case "${1}" in 23 | -n | --new-tag ) 24 | NEWTAG="${2}"; shift 25 | ;; 26 | -b | --branch-suffix ) 27 | BRANCHSUFFIX="${2}"; shift 28 | ;; 29 | -p | --pixel ) 30 | PIXEL=true; shift 31 | ;; 32 | * ) 33 | usage 34 | exit 1 35 | ;; 36 | esac 37 | shift 38 | done 39 | 40 | ### CONSTANTS ### 41 | readonly script_path="$(cd "$(dirname "$0")";pwd -P)" 42 | readonly vars_path="${script_path}/../../../vendor/lineage/vars" 43 | 44 | source "${vars_path}/common" 45 | 46 | TOP="${script_path}/../../.." 47 | SQUASHBRANCH="squash/${BRANCHSUFFIX}" 48 | if [ ! -z "${NEWTAG}" ]; then 49 | TOPIC="${NEWTAG}" 50 | elif [ "${PIXEL}" = true ]; then 51 | TOPIC="${topic}_pixel" 52 | else 53 | TOPIC="${topic}" 54 | fi 55 | 56 | # List of merged repos 57 | PROJECTPATHS=$(cat ${MERGEDREPOS} | grep -w merge | awk '{printf "%s\n", $2}') 58 | 59 | echo -e "\n#### Squash branch = ${SQUASHBRANCH} ####" 60 | 61 | # Make sure manifest and forked repos are in a consistent state 62 | echo -e "\n#### Verifying there are no uncommitted changes on forked AOSP projects ####" 63 | for PROJECTPATH in ${PROJECTPATHS} .repo/manifests; do 64 | cd "${TOP}/${PROJECTPATH}" 65 | if [[ -n "$(git status --porcelain)" ]]; then 66 | echo "Path ${PROJECTPATH} has uncommitted changes. Please fix." 67 | exit 1 68 | fi 69 | done 70 | echo "#### Verification complete - no uncommitted changes found ####" 71 | 72 | # Iterate over each forked project 73 | for PROJECTPATH in ${PROJECTPATHS}; do 74 | cd "${TOP}/${PROJECTPATH}" 75 | echo -e "\n#### Pushing ${PROJECTPATH} squash to review ####" 76 | git checkout "${SQUASHBRANCH}" 77 | repo upload -c -y --no-verify -o topic="${TOPIC}" . 78 | done 79 | -------------------------------------------------------------------------------- /best-caf-kernel/best-caf-kernel.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # SPDX-FileCopyrightText: 2018-2022 The LineageOS Project 4 | # SPDX-License-Identifier: Apache-2.0 5 | 6 | from __future__ import print_function 7 | 8 | import sys 9 | import time 10 | from multiprocessing import Event, Pool, Process, Queue 11 | from subprocess import PIPE, Popen 12 | 13 | try: 14 | from Queue import Empty as Queue_Empty 15 | except ImportError: 16 | from queue import Empty as Queue_Empty 17 | 18 | 19 | def run_subprocess(cmd): 20 | sp = Popen(cmd, stdout=PIPE, stderr=PIPE, 21 | shell=True, universal_newlines=True) 22 | comm = sp.communicate() 23 | exit_code = sp.returncode 24 | if exit_code != 0: 25 | print("There was an error running the subprocess.\n" 26 | "cmd: %s\n" 27 | "exit code: %d\n" 28 | "stdout: %s\n" 29 | "stderr: %s" % (cmd, exit_code, comm[0], comm[1])) 30 | return comm 31 | 32 | 33 | def get_tags(tag_name): 34 | cmd = "git tag -l %s" % tag_name 35 | comm = run_subprocess(cmd) 36 | if not str(comm[0]): 37 | return 0 38 | 39 | return comm[0].strip("\n").split("\n") 40 | 41 | 42 | def get_total_changes(tag_name): 43 | cmd = "git diff %s --shortstat" % tag_name 44 | comm = run_subprocess(cmd) 45 | try: 46 | a, d = comm[0].split(",")[1:] 47 | a = int(a.strip().split()[0]) 48 | d = int(d.strip().split()[0]) 49 | except ValueError: 50 | total = None 51 | else: 52 | total = a + d 53 | return total 54 | 55 | 56 | def worker(tag_name): 57 | tc = get_total_changes(tag_name) 58 | worker.q.put((tag_name, tc)) 59 | 60 | 61 | def worker_init(q): 62 | worker.q = q 63 | 64 | 65 | def background(q, e, s): 66 | best = 9999999999999 67 | tag = "" 68 | while True: 69 | try: 70 | tn, tc = q.get(False) 71 | except Queue_Empty: 72 | if e.is_set(): 73 | break 74 | else: 75 | if tc is None: 76 | tc = 0; 77 | if not s: 78 | print("%s has %d lines changed" % (tn, tc)) 79 | if best > tc: 80 | best = tc 81 | tag = tn 82 | if not s: 83 | print("%s is the new best match with %d lines changed" % (tn, tc)) 84 | print("Best match") 85 | print("TAG: %s" % tag) 86 | print("Lines changed: %d" % best) 87 | 88 | 89 | def main(): 90 | import argparse # Only needed for main() 91 | parser = argparse.ArgumentParser() 92 | parser.add_argument("-j", action="store", dest="jobs", default=1, type=int, 93 | metavar="N", help="number of jobs to run at once") 94 | parser.add_argument("-s", action="store_true", dest="silent", default=False, 95 | help="reduce the verbosity of the output") 96 | parser.add_argument("tag_name", metavar="", 97 | help="tag name to search for (can contain wildcards)") 98 | args = parser.parse_args() 99 | 100 | tags = get_tags(args.tag_name) 101 | if not tags: 102 | print("No tags to check. bailing.") 103 | sys.exit(1) 104 | if not args.silent: 105 | print("number of tags to check: %d" % len(tags)) 106 | 107 | queue = Queue() 108 | event = Event() 109 | 110 | b = Process(target=background, args=(queue, event, args.silent)) 111 | b.start() 112 | 113 | pool = Pool(args.jobs, worker_init, [queue]) 114 | pool.map(worker, tags) 115 | 116 | pool.close() 117 | pool.join() 118 | event.set() 119 | b.join() 120 | 121 | 122 | if __name__ == '__main__': 123 | main() 124 | -------------------------------------------------------------------------------- /build-webview/build-webview.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # SPDX-FileCopyrightText: 2019-2023 The LineageOS Project 4 | # SPDX-License-Identifier: Apache-2.0 5 | 6 | set -e 7 | 8 | chromium_version="135.0.7049.100" 9 | chromium_code="7049100" 10 | clean=0 11 | gsync=0 12 | supported_archs=(arm arm64 x86 x64) 13 | 14 | lineage_github=https://github.com/LineageOS 15 | lineage_gitlab=https://gitlab.com/LineageOS/android 16 | webview_proj_base=android_external_chromium-webview 17 | 18 | usage() { 19 | echo "Usage:" 20 | echo " build_webview [ options ]" 21 | echo 22 | echo " Options:" 23 | echo " -a Build specified arch" 24 | echo " -c Clean" 25 | echo " -h Show this message" 26 | echo " -r Specify chromium release" 27 | echo " -s Sync" 28 | echo 29 | echo " Example:" 30 | echo " build_webview -c -r $chromium_version:$chromium_code" 31 | echo 32 | exit 1 33 | } 34 | 35 | clone_proj() { 36 | depth="" 37 | if [ "$#" -eq 3 ]; then 38 | depth="--depth $3" 39 | fi 40 | 41 | if [ ! -d "$2" ]; then 42 | git clone $1 $2 $depth 43 | fi 44 | } 45 | 46 | build() { 47 | build_args=$args' target_cpu="'$1'"' 48 | 49 | code=$chromium_code 50 | if [ $1 '==' "arm" ]; then 51 | code+=00 52 | elif [ $1 '==' "arm64" ]; then 53 | code+=50 54 | elif [ $1 '==' "x86" ]; then 55 | code+=10 56 | elif [ $1 '==' "x64" ]; then 57 | code+=60 58 | fi 59 | build_args+=' android_default_version_code="'$code'"' 60 | 61 | gn gen "out/$1" --args="$build_args" 62 | ninja -C out/$1 system_webview_apk 63 | if [ "$?" -eq 0 ]; then 64 | case $1 in 65 | x64) 66 | android_arch="x86_64" 67 | lineage_git=$lineage_gitlab 68 | ;; 69 | *) 70 | android_arch=$1 71 | lineage_git=$lineage_github 72 | ;; 73 | esac 74 | 75 | clone_proj ${lineage_git}/${webview_proj_base}_prebuilt_${android_arch}.git \ 76 | ../${webview_proj_base}/prebuilt/${android_arch} 1 77 | 78 | cp out/$1/apks/SystemWebView.apk ../$webview_proj_base/prebuilt/$android_arch/webview.apk 79 | fi 80 | } 81 | 82 | while getopts ":a:chr:s" opt; do 83 | case $opt in 84 | a) for arch in ${supported_archs[@]}; do 85 | [ "$OPTARG" '==' "$arch" ] && build_arch="$OPTARG" 86 | done 87 | if [ -z "$build_arch" ]; then 88 | echo "Unsupported ARCH: $OPTARG" 89 | echo "Supported ARCHs: ${supported_archs[@]}" 90 | exit 1 91 | fi 92 | ;; 93 | c) clean=1 ;; 94 | h) usage ;; 95 | r) version=(${OPTARG//:/ }) 96 | chromium_version=${version[0]} 97 | chromium_code=${version[1]} 98 | ;; 99 | s) gsync=1 ;; 100 | :) 101 | echo "Option -$OPTARG requires an argument" 102 | echo 103 | usage 104 | ;; 105 | \?) 106 | echo "Invalid option:-$OPTARG" 107 | echo 108 | usage 109 | ;; 110 | esac 111 | done 112 | shift $((OPTIND-1)) 113 | 114 | # Download webview patches 115 | clone_proj ${lineage_github}/${webview_proj_base}_patches.git \ 116 | ${webview_proj_base}/patches 117 | 118 | # Add depot_tools to PATH 119 | if [ ! -d depot_tools ]; then 120 | git clone https://chromium.googlesource.com/chromium/tools/depot_tools.git 121 | fi 122 | export PATH="$(pwd -P)/depot_tools:$PATH" 123 | 124 | if [ ! -d src ]; then 125 | fetch android 126 | yes | gclient sync -D -R -r $chromium_version 127 | fi 128 | 129 | # Apply our patches 130 | if [ $gsync -eq 1 ]; then 131 | ( cd src 132 | git am ../android_external_chromium-webview/patches/*.patch 133 | ) 134 | fi 135 | 136 | if [ $gsync -eq 1 ]; then 137 | find src -name index.lock -delete 138 | yes | gclient sync -R -r $chromium_version 139 | fi 140 | cd src 141 | 142 | # Replace webview icon 143 | mkdir -p android_webview/nonembedded/java/res_icon/drawable-xxxhdpi 144 | cp chrome/android/java/res_chromium_base/mipmap-mdpi/app_icon.png android_webview/nonembedded/java/res_icon/drawable-mdpi/icon_webview.png 145 | cp chrome/android/java/res_chromium_base/mipmap-hdpi/app_icon.png android_webview/nonembedded/java/res_icon/drawable-hdpi/icon_webview.png 146 | cp chrome/android/java/res_chromium_base/mipmap-xhdpi/app_icon.png android_webview/nonembedded/java/res_icon/drawable-xhdpi/icon_webview.png 147 | cp chrome/android/java/res_chromium_base/mipmap-xxhdpi/app_icon.png android_webview/nonembedded/java/res_icon/drawable-xxhdpi/icon_webview.png 148 | cp chrome/android/java/res_chromium_base/mipmap-xxxhdpi/app_icon.png android_webview/nonembedded/java/res_icon/drawable-xxxhdpi/icon_webview.png 149 | 150 | # Build args 151 | args='target_os="android"' 152 | args+=' is_debug=false' 153 | args+=' is_official_build=true' 154 | args+=' is_chrome_branded=false' 155 | args+=' use_official_google_api_keys=false' 156 | args+=' ffmpeg_branding="Chrome"' 157 | args+=' proprietary_codecs=true' 158 | args+=' enable_resource_allowlist_generation=false' 159 | args+=' enable_remoting=false' 160 | args+=' is_component_build=false' 161 | args+=' symbol_level=0' 162 | args+=' enable_nacl=false' 163 | args+=' blink_symbol_level=0' 164 | args+=' webview_devui_show_icon=false' 165 | args+=' dfmify_dev_ui=false' 166 | args+=' disable_fieldtrial_testing_config=true' 167 | args+=' android_default_version_name="'$chromium_version'"' 168 | 169 | # Setup environment 170 | [ $clean -eq 1 ] && rm -rf out 171 | . build/android/envsetup.sh 172 | 173 | # Check target and build 174 | if [ -n "$build_arch" ]; then 175 | build $build_arch 176 | else 177 | build arm 178 | build arm64 179 | build x86 180 | build x64 181 | fi 182 | -------------------------------------------------------------------------------- /carriersettings-extractor/.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__/ 2 | *.py[cod] 3 | *$py.class 4 | *_pb2.py 5 | -------------------------------------------------------------------------------- /carriersettings-extractor/LICENSE: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2020, Dara Adib 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions are met: 8 | 9 | 1. Redistributions of source code must retain the above copyright notice, this 10 | list of conditions and the following disclaimer. 11 | 12 | 2. Redistributions in binary form must reproduce the above copyright notice, 13 | this list of conditions and the following disclaimer in the documentation 14 | and/or other materials provided with the distribution. 15 | 16 | 3. Neither the name of the copyright holder nor the names of its 17 | contributors may be used to endorse or promote products derived from 18 | this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | -------------------------------------------------------------------------------- /carriersettings-extractor/Makefile: -------------------------------------------------------------------------------- 1 | TARGETS := carrierId_pb2.py carrier_list_pb2.py carrier_settings_pb2.py 2 | 3 | .PHONY: all clean 4 | all: $(TARGETS) 5 | clean: 6 | rm -f $(TARGETS) 7 | 8 | %_pb2.py: %.proto 9 | protoc --python_out=. $< 10 | -------------------------------------------------------------------------------- /carriersettings-extractor/README.md: -------------------------------------------------------------------------------- 1 | # carriersettings-extractor 2 | 3 | Android Open Source Project (AOSP) [includes](https://source.android.com/devices/tech/config/update) APN settings ([`apns-full-conf.xml`](https://android.googlesource.com/device/sample/+/main/etc/apns-full-conf.xml)) and [carrier settings](https://source.android.com/devices/tech/config/carrier) ([`carrier_config_*.xml`](https://android.googlesource.com/platform/packages/apps/CarrierConfig/+/main/assets) + [`vendor.xml`](https://android.googlesource.com/platform/packages/apps/CarrierConfig/+/refs/heads/main/res/xml/vendor.xml)) in human-readable XML format. However, Google Pixel device images instead include APN and carrier settings as binary protobuf files for use by the CarrierSettings system app. 4 | 5 | This script converts the CarrierSettings protobuf files (e.g., `carrier_list.pb`, `others.pb`) to XML format compatible with AOSP. This may be helpful for Android-based systems that do not bundle CarrierSettings, but wish to support carriers that are not included in AOSP. 6 | 7 | For a description of each APN and carrier setting, refer to the doc comments in [`Telephony.java`](https://android.googlesource.com/platform/frameworks/base/+/refs/heads/main/core/java/android/provider/Telephony.java) and [`CarrierConfigManager.java`](https://android.googlesource.com/platform/frameworks/base/+/refs/heads/main/telephony/java/android/telephony/CarrierConfigManager.java), respectively. 8 | 9 | ## Dependencies 10 | 11 | * protobuf-compiler (protoc) - optional, see below 12 | * python3-protobuf - required 13 | 14 | ## Usage 15 | 16 | Download a [Pixel factory image](https://developers.google.com/android/images) and extract the CarrierSettings protobuf files. 17 | Convert `CarrierSettings/*.pb` to `apns-full-conf.xml` and `vendor.xml`. 18 | 19 | ./carriersettings_extractor.py -i CarrierSettings -a apns-conf.xml -v vendor.xml 20 | 21 | ## Protobuf definitions 22 | 23 | The definitions in [`carrier_list.proto`](carrier_list.proto) and [`carrier_settings.proto`](carrier_settings.proto) are useful for inspecting the CarrierSettings protobuf files. 24 | 25 | protoc --decode=com.google.carrier.CarrierList carrier_list.proto < CarrierSettings/carrier_list.pb 26 | protoc --decode=com.google.carrier.CarrierSettings carrier_settings.proto < CarrierSettings/verizon_us.pb 27 | protoc --decode=com.google.carrier.MultiCarrierSettings carrier_settings.proto < CarrierSettings/others.pb 28 | 29 | To check schema or otherwise inspect the protobuf files without applying definitions, use the `--decode_raw` argument. 30 | 31 | protoc --decode_raw < CarrierSettings/carrier_list.pb 32 | protoc --decode_raw < CarrierSettings/verizon_us.pb 33 | protoc --decode_raw < CarrierSettings/others.pb 34 | -------------------------------------------------------------------------------- /carriersettings-extractor/carrierId.proto: -------------------------------------------------------------------------------- 1 | /* 2 | * SPDX-FileCopyrightText: 2019 The Android Open Source Project 3 | * SPDX-License-Identifier: Apache-2.0 4 | */ 5 | syntax = "proto2"; 6 | 7 | package carrierIdentification; 8 | 9 | // A complete list of carriers 10 | message CarrierList { 11 | // A collection of carriers. one entry for one carrier. 12 | repeated CarrierId carrier_id = 1; 13 | // Version number of current carrier list 14 | optional int32 version = 2; 15 | }; 16 | 17 | // CarrierId is the unique representation of a carrier in CID table. 18 | message CarrierId { 19 | // [Optional] A unique canonical number designated to a carrier. 20 | optional int32 canonical_id = 1; 21 | 22 | // [Optional] A user-friendly carrier name (not localized). 23 | optional string carrier_name = 2; 24 | 25 | // [Optional] Carrier attributes to match a carrier. At least one value is required. 26 | repeated CarrierAttribute carrier_attribute = 3; 27 | 28 | // [Optional] A unique canonical number to represent its parent carrier. The parent-child 29 | // relationship can be used to differentiate a single carrier by different networks, 30 | // by prepaid v.s. postpaid or even by 4G v.s. 3G plan. 31 | optional int32 parent_canonical_id = 4; 32 | }; 33 | 34 | // Attributes used to match a carrier. 35 | // For each field within this message: 36 | // - if not set, the attribute is ignored; 37 | // - if set, the device must have one of the specified values to match. 38 | // Match is based on AND between any field that is set and OR for values within a repeated field. 39 | message CarrierAttribute { 40 | // [Optional] The MCC and MNC that map to this carrier. At least one value is required. 41 | repeated string mccmnc_tuple = 1; 42 | 43 | // [Optional] Prefix of IMSI (International Mobile Subscriber Identity) in 44 | // decimal format. Some digits can be replaced with "x" symbols matching any digit. 45 | // Sample values: 20404794, 21670xx2xxx. 46 | repeated string imsi_prefix_xpattern = 2; 47 | 48 | // [Optional] The Service Provider Name. Read from subscription EF_SPN. 49 | // Sample values: C Spire, LeclercMobile 50 | repeated string spn = 3; 51 | 52 | // [Optional] PLMN network name. Read from subscription EF_PNN. 53 | // Sample values: 54 | repeated string plmn = 4; 55 | 56 | // [Optional] Group Identifier Level1 for a GSM phone. Read from subscription EF_GID1. 57 | // Sample values: 6D, BAE0000000000000 58 | repeated string gid1 = 5; 59 | 60 | // [Optional] Group Identifier Level2 for a GSM phone. Read from subscription EF_GID2. 61 | // Sample values: 6D, BAE0000000000000 62 | repeated string gid2 = 6; 63 | 64 | // [Optional] The Access Point Name, corresponding to "apn" field returned by 65 | // "content://telephony/carriers/preferapn" on device. 66 | // Sample values: fast.t-mobile.com, internet 67 | repeated string preferred_apn = 7; 68 | 69 | // [Optional] Prefix of Integrated Circuit Card Identifier. Read from subscription EF_ICCID. 70 | // Sample values: 894430, 894410 71 | repeated string iccid_prefix = 8; 72 | 73 | // [Optional] Carrier Privilege Access Rule in hex string. 74 | // Sample values: 61ed377e85d386a8dfee6b864bd85b0bfaa5af88 75 | repeated string privilege_access_rule = 9; 76 | }; 77 | 78 | -------------------------------------------------------------------------------- /carriersettings-extractor/carrier_list.pb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LineageOS/scripts/d823d8e50c46672daacdf0ac5489a10fbea682d7/carriersettings-extractor/carrier_list.pb -------------------------------------------------------------------------------- /carriersettings-extractor/carrier_list.proto: -------------------------------------------------------------------------------- 1 | /* 2 | * SPDX-FileCopyrightText: 2020 Google LLC 3 | * SPDX-License-Identifier: Apache-2.0 4 | */ 5 | syntax = "proto2"; 6 | 7 | package com.google.carrier; 8 | 9 | // The carrier ID is matched against SIM data to determine carrier 10 | message CarrierId { 11 | // Mobile Country Code (MCC) & Mobile Network Code (MNC) 12 | optional string mcc_mnc = 1; 13 | 14 | // Additional data to identify MVNO 15 | oneof mvno_data { 16 | // SPN (Service Provider Name) 17 | string spn = 2; 18 | 19 | // IMSI prefix pattern 20 | string imsi = 3; 21 | 22 | // Group identifier (level 1) prefix 23 | string gid1 = 4; 24 | } 25 | 26 | reserved 5; 27 | } 28 | 29 | // Maps CarrierIds to an internal unique carrier name 30 | message CarrierMap { 31 | // A unique canonical carrier name 32 | // This name is the primary key to identify a carrier 33 | // Typically a canonical_name looks like _ 34 | optional string canonical_name = 1; 35 | 36 | // A collection of network IDs owned by this carrier 37 | repeated CarrierId carrier_id = 2; 38 | 39 | reserved 3; 40 | } 41 | 42 | // Maps CarrierId to internal unique carrier name 43 | message CarrierList { 44 | // A collection of carrier maps; one entry for one carrier 45 | repeated CarrierMap entry = 1; 46 | 47 | // The version number of this CarrierList file 48 | optional int64 version = 2; 49 | } 50 | -------------------------------------------------------------------------------- /carriersettings-extractor/carrier_settings.proto: -------------------------------------------------------------------------------- 1 | /* 2 | * SPDX-FileCopyrightText: 2020 Google LLC 3 | * SPDX-License-Identifier: Apache-2.0 4 | */ 5 | syntax = "proto2"; 6 | 7 | package com.google.carrier; 8 | 9 | // Settings of one carrier, including apns and configs 10 | // This is the payload to be delivered from server 11 | message CarrierSettings { 12 | // A unique canonical carrier name 13 | optional string canonical_name = 1; 14 | 15 | // Version number of current carrier’s settings 16 | optional int64 version = 2; 17 | 18 | // Carrier APNs 19 | optional CarrierApns apns = 3; 20 | 21 | // Carrier configs 22 | optional CarrierConfig configs = 4; 23 | 24 | reserved 5; 25 | 26 | // Vendor carrier configs 27 | optional VendorConfigs vendor_configs = 6; 28 | } 29 | 30 | // A collection of multiple carriers’ settings 31 | message MultiCarrierSettings { 32 | // Version number 33 | optional int64 version = 1; 34 | 35 | // List of CarrierSettings 36 | repeated CarrierSettings setting = 2; 37 | } 38 | 39 | // An access point name (aka. APN) entry 40 | message ApnItem { 41 | // The name of APN, map to xml apn "carrier" attribute 42 | // eg. Verizon Internet, may visible to user in Settings 43 | optional string name = 1; 44 | // The value of APN, eg. send to modem for data call. map to xml 45 | // "apn" attribute, eg. vzwinternet 46 | optional string value = 2; 47 | 48 | // Next two fields type and bearer_bitmask affect how APN is selected by 49 | // platform. eg. type means APN capability and bearer_bitmask specifies 50 | // which RATs apply. 51 | // Note mcc/mnc and mvno data doesn't belong to this proto because they 52 | // define a carrier. 53 | // APN types as defined in Android code PhoneConstants.java 54 | enum ApnType { 55 | ALL = 0; // this APN can serve all kinds of data connections 56 | DEFAULT = 1; // internet data 57 | MMS = 2; 58 | SUPL = 3; 59 | DUN = 4; 60 | HIPRI = 5; 61 | FOTA = 6; 62 | IMS = 7; 63 | CBS = 8; 64 | IA = 9; // Initial attach 65 | EMERGENCY = 10; 66 | XCAP = 11; 67 | UT = 12; 68 | RCS = 13; 69 | } 70 | repeated ApnType type = 3; 71 | 72 | // Network types that this APN applies to, separated by "|". A network type 73 | // is represented as an integer defined in TelephonyManager.NETWORK_TYPE_*. 74 | // Default value "0" means all network types. 75 | optional string bearer_bitmask = 4 [default = "0"]; 76 | 77 | // Below are all parameters for the APN 78 | // APN server / auth parameters. 79 | optional string server = 5; 80 | optional string proxy = 6; 81 | optional string port = 7; 82 | optional string user = 8; 83 | optional string password = 9; 84 | optional int32 authtype = 10 [default = -1]; 85 | 86 | // MMS configuration. 87 | optional string mmsc = 11; 88 | optional string mmsc_proxy = 12; 89 | optional string mmsc_proxy_port = 13; 90 | 91 | // Protocols allowed to connect to the APN. 92 | enum Protocol { 93 | IP = 0; 94 | IPV6 = 1; 95 | IPV4V6 = 2; 96 | PPP = 3; 97 | } 98 | optional Protocol protocol = 14 [default = IP]; 99 | optional Protocol roaming_protocol = 15 [default = IP]; 100 | 101 | // MTU for the connections. 102 | optional int32 mtu = 16 [default = 0]; 103 | // An ID used to sync the APN in modem. 104 | optional int32 profile_id = 17; 105 | // Max connections. 106 | optional int32 max_conns = 18 [default = 0]; 107 | // The wait time required between disconnecting and connecting, in seconds. 108 | optional int32 wait_time = 19 [default = 0]; 109 | // The time to limit max connection, in seconds. 110 | optional int32 max_conns_time = 20 [default = 0]; 111 | // VENDOR ADDED 112 | optional bool carrier_enabled = 21; 113 | // Whether to be persisted to modem. 114 | optional bool modem_cognitive = 22 [default = false]; 115 | // Whether visible in APN settings. 116 | optional bool user_visible = 23 [default = true]; 117 | // Whether editable in APN settings. 118 | optional bool user_editable = 24 [default = true]; 119 | 120 | // If > 0: when an APN becomes a preferred APN on user/framework 121 | // selection, other APNs with the same apn_set_id will also be preferred 122 | // by framework when selecting APNs. 123 | optional int32 apn_set_id = 25 [default = 0]; 124 | 125 | // The skip 464xlat flag. Flag works as follows. 126 | // SKIP_464XLAT_DEFAULT: the APN will skip 464xlat only if the APN has type 127 | // IMS and does not support INTERNET which has type 128 | // DEFAULT or HIPRI. 129 | // SKIP_464XLAT_DISABLE: the APN will NOT skip 464xlat 130 | // SKIP_464XLAT_ENABLE: the APN will skip 464xlat 131 | enum Xlat { 132 | SKIP_464XLAT_DEFAULT = 0; 133 | SKIP_464XLAT_DISABLE = 1; 134 | SKIP_464XLAT_ENABLE = 2; 135 | } 136 | optional Xlat skip_464xlat = 26 [default = SKIP_464XLAT_DEFAULT]; 137 | } 138 | 139 | // A collection of all APNs for a carrier 140 | message CarrierApns { 141 | reserved 1; 142 | 143 | // APNs belong to this carrier 144 | repeated ApnItem apn = 2; 145 | } 146 | 147 | // An array of text 148 | message TextArray { 149 | repeated string item = 1; 150 | } 151 | 152 | // An array of int 153 | message IntArray { 154 | repeated int32 item = 1; 155 | } 156 | 157 | // Carrier configs 158 | message CarrierConfig { 159 | reserved 1, 3; 160 | 161 | // Key-Value pair as a config entry 162 | message Config { 163 | optional string key = 1; 164 | 165 | oneof value { 166 | string text_value = 2; 167 | int32 int_value = 3; 168 | int64 long_value = 4; 169 | bool bool_value = 5; 170 | TextArray text_array = 6; 171 | IntArray int_array = 7; 172 | CarrierConfig bundle = 8; 173 | double double_value = 9; 174 | } 175 | } 176 | 177 | // Key-value pairs, holding all config entries 178 | repeated Config config = 2; 179 | } 180 | 181 | // The configs of one vendor client. 182 | message VendorConfigClient { 183 | // Name of the client for which the configuration items need to 184 | // be stored 185 | required string name = 1; 186 | 187 | // Binary blob containing the configuration. The format 188 | // of the configuration depends on the specific client. 189 | // For some clients, the proto representation of {@link VendorConfigData} 190 | // defined in vendorconfigdata.proto is used. 191 | optional bytes value = 2; 192 | 193 | // Range of extensions. The extensions from 100 to 1000 are reserved for 194 | // Google's internal usage. 195 | extensions 100 to 5000; 196 | } 197 | 198 | // A collection of configs from vendor clients. 199 | message VendorConfigs { 200 | reserved 1; 201 | 202 | // Configuration 203 | repeated VendorConfigClient client = 2; 204 | } 205 | -------------------------------------------------------------------------------- /config-fs-gen/README.md: -------------------------------------------------------------------------------- 1 | # config-fs-gen 2 | 3 | ``` 4 | usage: config-fs-gen.py [-h] 5 | capability_header_path 6 | android_filesystem_config_header_path 7 | vendor_group_path fs_config_paths 8 | [fs_config_paths ...] 9 | 10 | Convert /vendor/etc/group × 11 | /(system|vendor)/etc/(fs_config_dirs|fs_config_files) to config.fs 12 | 13 | positional arguments: 14 | capability_header_path 15 | path to 16 | {android}/bionic/libc/kernel/uapi/linux/capability.h 17 | android_filesystem_config_header_path 18 | path to {android}/system/core/libcutils/include/privat 19 | e/android_filesystem_config.h 20 | vendor_group_path path to {rom}/vendor/etc/group 21 | fs_config_paths paths to 22 | {rom}/(system|vendor)/etc/fs_config_(dirs|files) 23 | 24 | optional arguments: 25 | -h, --help show this help message and exit 26 | ``` 27 | ``` 28 | Example usage: 29 | $ ./config-fs-gen.py ~/lineage-16.0/bionic/libc/kernel/uapi/linux/capability.h \ 30 | ~/lineage-16.0/system/core/libcutils/include/private/android_filesystem_config.h \ 31 | ~/lineage-16.0/out/target/product/guacamole/vendor/etc/group \ 32 | ~/lineage-16.0/out/target/product/guacamole/{system,vendor}/etc/{fs_config_dirs,fs_config_files} 33 | [AID_VENDOR_QTI_DIAG] 34 | value:2901 35 | 36 | [AID_VENDOR_QDSS] 37 | value:2902 38 | 39 | [AID_VENDOR_RFS] 40 | value:2903 41 | 42 | [AID_VENDOR_RFS_SHARED] 43 | value:2904 44 | 45 | [AID_VENDOR_ADPL_ODL] 46 | value:2905 47 | 48 | [AID_VENDOR_QRTR] 49 | value:2906 50 | 51 | [bt_firmware/] 52 | mode: 0771 53 | user: AID_SYSTEM 54 | group: AID_SYSTEM 55 | caps: 0 56 | 57 | [dsp/] 58 | mode: 0771 59 | user: AID_MEDIA 60 | group: AID_MEDIA 61 | caps: 0 62 | 63 | [firmware/] 64 | mode: 0771 65 | user: AID_SYSTEM 66 | group: AID_SYSTEM 67 | caps: 0 68 | 69 | [firmware/image/*] 70 | mode: 0771 71 | user: AID_SYSTEM 72 | group: AID_SYSTEM 73 | caps: 0 74 | 75 | [persist/] 76 | mode: 0771 77 | user: AID_SYSTEM 78 | group: AID_SYSTEM 79 | caps: 0 80 | 81 | [vendor/bin/cnd] 82 | mode: 0755 83 | user: AID_SYSTEM 84 | group: AID_SYSTEM 85 | caps: NET_BIND_SERVICE NET_ADMIN BLOCK_SUSPEND 86 | 87 | [vendor/bin/hw/android.hardware.bluetooth@1.0-service-qti] 88 | mode: 0755 89 | user: AID_BLUETOOTH 90 | group: AID_BLUETOOTH 91 | caps: NET_ADMIN BLOCK_SUSPEND 92 | 93 | [vendor/bin/ims_rtp_daemon] 94 | mode: 0755 95 | user: AID_SYSTEM 96 | group: AID_RADIO 97 | caps: NET_BIND_SERVICE 98 | 99 | [vendor/bin/imsdatadaemon] 100 | mode: 0755 101 | user: AID_SYSTEM 102 | group: AID_SYSTEM 103 | caps: NET_BIND_SERVICE 104 | 105 | [vendor/bin/imsrcsd] 106 | mode: 0755 107 | user: AID_SYSTEM 108 | group: AID_RADIO 109 | caps: NET_BIND_SERVICE WAKE_ALARM BLOCK_SUSPEND 110 | 111 | [vendor/bin/loc_launcher] 112 | mode: 0755 113 | user: AID_GPS 114 | group: AID_GPS 115 | caps: SETGID SETUID 116 | 117 | [vendor/bin/pd-mapper] 118 | mode: 0755 119 | user: AID_SYSTEM 120 | group: AID_SYSTEM 121 | caps: NET_BIND_SERVICE 122 | 123 | [vendor/bin/pm-service] 124 | mode: 0755 125 | user: AID_SYSTEM 126 | group: AID_SYSTEM 127 | caps: NET_BIND_SERVICE 128 | 129 | [vendor/bin/sensors.qti] 130 | mode: 0755 131 | user: AID_SYSTEM 132 | group: AID_SYSTEM 133 | caps: NET_BIND_SERVICE 134 | 135 | [vendor/bin/slim_daemon] 136 | mode: 0755 137 | user: AID_GPS 138 | group: AID_GPS 139 | caps: NET_BIND_SERVICE 140 | 141 | [vendor/bin/wcnss_filter] 142 | mode: 0755 143 | user: AID_BLUETOOTH 144 | group: AID_BLUETOOTH 145 | caps: BLOCK_SUSPEND 146 | 147 | [vendor/bin/xtwifi-client] 148 | mode: 0755 149 | user: AID_GPS 150 | group: AID_GPS 151 | caps: NET_BIND_SERVICE WAKE_ALARM BLOCK_SUSPEND 152 | 153 | [vendor/firmware_mnt/image/*] 154 | mode: 0771 155 | user: AID_ROOT 156 | group: AID_SYSTEM 157 | caps: 0 158 | 159 | [vendor/lib/modules-aging/*] 160 | mode: 0644 161 | user: AID_ROOT 162 | group: AID_ROOT 163 | caps: 0 164 | ``` 165 | -------------------------------------------------------------------------------- /config-fs-gen/config-fs-gen.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # SPDX-FileCopyrightText: 2019-2020 The LineageOS Project 5 | # SPDX-License-Identifier: Apache-2.0 6 | 7 | from __future__ import print_function 8 | 9 | import argparse 10 | import parse 11 | import struct 12 | 13 | 14 | def parse_cmdline(): 15 | parser = argparse.ArgumentParser( 16 | description='Convert /vendor/etc/group × /(system|vendor)/etc/(fs_config_dirs|fs_config_files) to config.fs') 17 | parser.add_argument('capability_header_path', 18 | help='path to {android}/bionic/libc/kernel/uapi/linux/capability.h') 19 | parser.add_argument('android_filesystem_config_header_path', 20 | help='path to {android}/system/core/libcutils/include/private/android_filesystem_config.h') 21 | parser.add_argument('vendor_group_path', 22 | help='path to {rom}/vendor/etc/group') 23 | parser.add_argument('fs_config_paths', nargs='+', 24 | help='paths to {rom}/(system|vendor)/etc/fs_config_(dirs|files)') 25 | return parser.parse_args() 26 | 27 | 28 | def get_capabilities(capability_header_path): 29 | capabilities = {} 30 | 31 | with open(capability_header_path, 'r') as file: 32 | for line in file: 33 | s = parse.search('#define CAP_{:w} {:d}', line) 34 | 35 | if s is not None: 36 | capabilities[s[1]] = s[0] 37 | 38 | return capabilities 39 | 40 | 41 | def get_groups(android_filesystem_config_header_path, vendor_group_path): 42 | system_groups = {} 43 | vendor_groups = {} 44 | 45 | with open(android_filesystem_config_header_path, 'r') as file: 46 | for line in file: 47 | s = parse.search('#define AID_{:w} {:d}', line) 48 | 49 | if s is not None: 50 | system_groups[s[1]] = 'AID_' + s[0] 51 | 52 | with open(vendor_group_path, 'r') as file: 53 | for line in file: 54 | name, _, uid, _ = line.split(':', 3) 55 | vendor_groups[uid] = 'AID_' + name.upper() 56 | 57 | return system_groups, vendor_groups 58 | 59 | 60 | def get_fs_path_configs(fs_config_paths, system_groups, vendor_groups): 61 | fs_path_config = {} 62 | 63 | for fs_config_path in args.fs_config_paths: 64 | with open(fs_config_path, 'rb') as file: 65 | while True: 66 | bytes = file.read(struct.calcsize(' 0: 101 | caps_list.append(str(caps)) 102 | 103 | return ' '.join(caps_list) 104 | 105 | 106 | def gid_to_str(gid, system_groups, vendor_groups): 107 | if gid in system_groups: 108 | return system_groups[gid] 109 | 110 | if gid in vendor_groups: 111 | return vendor_groups[gid] 112 | 113 | return gid 114 | 115 | 116 | if __name__ == '__main__': 117 | args = parse_cmdline() 118 | capabilities = get_capabilities(args.capability_header_path) 119 | system_groups, vendor_groups = get_groups( 120 | args.android_filesystem_config_header_path, 121 | args.vendor_group_path) 122 | fs_path_configs = get_fs_path_configs( 123 | args.fs_config_paths, 124 | system_groups, 125 | vendor_groups) 126 | 127 | # print vendor AIDs 128 | for gid in sorted(vendor_groups): 129 | print('[{}]'.format(vendor_groups[gid])) 130 | print('value:{}'.format(gid)) 131 | print() 132 | 133 | # print {system,vendor} fs path configs 134 | for name in sorted(fs_path_configs): 135 | print('[{}]'.format(name)) 136 | print('mode: {:04o}'.format(fs_path_configs[name]['mode'])) 137 | print('user: {}'.format(fs_path_configs[name]['user'])) 138 | print('group: {}'.format(fs_path_configs[name]['group'])) 139 | print('caps: {}'.format(fs_path_configs[name]['caps'])) 140 | print() 141 | -------------------------------------------------------------------------------- /config-fs-gen/requirements.txt: -------------------------------------------------------------------------------- 1 | parse 2 | -------------------------------------------------------------------------------- /default-wallpaper/update_default_wallpaper.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # SPDX-FileCopyrightText: 2022 The LineageOS Project 4 | # SPDX-License-Identifier: Apache-2.0 5 | 6 | usage() { 7 | echo "Usage: ${0} " 8 | } 9 | 10 | # Verify argument count 11 | if [ "$#" -ne 1 ]; then 12 | usage 13 | exit 1 14 | fi 15 | 16 | source "../../../build/envsetup.sh" 17 | 18 | ## First ensure the image is a properly formatted png file 19 | convert "${1}" "${PWD}"/default_wallpaper.png 20 | 21 | ## Convert the image to all the resolutions we need and put them in the appropriate place 22 | convert -resize 1080x1080 "${PWD}"/default_wallpaper.png "${ANDROID_BUILD_TOP}"/vendor/lineage/overlay/common/frameworks/base/core/res/res/drawable-hdpi/default_wallpaper.png 23 | convert -resize 960x960 "${PWD}"/default_wallpaper.png "${ANDROID_BUILD_TOP}"/vendor/lineage/overlay/common/frameworks/base/core/res/res/drawable-nodpi/default_wallpaper.png 24 | convert -resize 1920x1920 "${PWD}"/default_wallpaper.png "${ANDROID_BUILD_TOP}"/vendor/lineage/overlay/common/frameworks/base/core/res/res/drawable-sw600dp-nodpi/default_wallpaper.png 25 | convert -resize 1920x1920 "${PWD}"/default_wallpaper.png "${ANDROID_BUILD_TOP}"/vendor/lineage/overlay/common/frameworks/base/core/res/res/drawable-sw720dp-nodpi/default_wallpaper.png 26 | convert -resize 1440x1440 "${PWD}"/default_wallpaper.png "${ANDROID_BUILD_TOP}"/vendor/lineage/overlay/common/frameworks/base/core/res/res/drawable-xhdpi/default_wallpaper.png 27 | convert -resize 1920x1920 "${PWD}"/default_wallpaper.png "${ANDROID_BUILD_TOP}"/vendor/lineage/overlay/common/frameworks/base/core/res/res/drawable-xxhdpi/default_wallpaper.png 28 | convert -resize 2560x2560 "${PWD}"/default_wallpaper.png "${ANDROID_BUILD_TOP}"/vendor/lineage/overlay/common/frameworks/base/core/res/res/drawable-xxxhdpi/default_wallpaper.png 29 | 30 | ## Cleanup 31 | rm "${PWD}"/default_wallpaper.png 32 | 33 | ## Commit changes 34 | cd "${ANDROID_BUILD_TOP}"/vendor/lineage && git add overlay/common/frameworks/base/core/res/res/drawable-*/default_wallpaper.png && git commit -m "Update default wallpaper" 35 | 36 | ## Go back to top 37 | croot 38 | 39 | exit 0 40 | -------------------------------------------------------------------------------- /device-deps-regenerator/.gitignore: -------------------------------------------------------------------------------- 1 | *.json 2 | -------------------------------------------------------------------------------- /device-deps-regenerator/README.md: -------------------------------------------------------------------------------- 1 | 1. Use Python 3.7 or higher 2 | 2. Run `pip3 install -r requirements.txt` 3 | 3. run `python3 app.py` to generate the full lineage.dependencies mapping 4 | 4. run `python3 device2kernel.py` to generate kernel -> devices mapping (like cve_tracker/kernels.json) 5 | 5. run `python3 devices.py` to generate device -> dependency mapping (like lineageos_updater/device_deps.json) 6 | -------------------------------------------------------------------------------- /device-deps-regenerator/app.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2017-2023 The LineageOS Project 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import argparse 5 | import concurrent.futures 6 | import json 7 | import requests 8 | import subprocess 9 | import traceback 10 | 11 | from xml.etree import ElementTree 12 | 13 | parser = argparse.ArgumentParser() 14 | parser.add_argument( 15 | "-j", "--jobs", type=int, help="Max number of workers to use. Default is none" 16 | ) 17 | args = parser.parse_args() 18 | 19 | # supported branches, newest to oldest 20 | CUR_BRANCHES = ["lineage-22.2", "lineage-22.1", "lineage-21", "lineage-21.0", "lineage-20", "lineage-20.0", "lineage-19.1", "lineage-18.1"] 21 | 22 | 23 | def get_cm_dependencies(name): 24 | try: 25 | stdout = subprocess.run( 26 | ["git", "ls-remote", "-h", f"https://:@github.com/LineageOS/{name}"], 27 | stdout=subprocess.PIPE, 28 | stderr=subprocess.PIPE, 29 | ).stdout.decode() 30 | branches = [x.split()[-1] for x in stdout.splitlines()] 31 | except: 32 | return None 33 | 34 | branch = next((x for x in CUR_BRANCHES if f"refs/heads/{x}" in branches), None) 35 | 36 | if branch is None: 37 | return None 38 | 39 | try: 40 | cmdeps = requests.get( 41 | f"https://raw.githubusercontent.com/LineageOS/{name}/{branch}/lineage.dependencies" 42 | ).json() 43 | except: 44 | cmdeps = [] 45 | 46 | mydeps = [] 47 | non_device_repos = set() 48 | for el in cmdeps: 49 | if el.get("remote", "github") != "github": 50 | continue 51 | if "_device_" not in el["repository"]: 52 | non_device_repos.add(el["repository"]) 53 | depbranch = el.get("branch", branch) 54 | mydeps.append({"repo": el["repository"], "branch": depbranch}) 55 | 56 | return [mydeps, non_device_repos] 57 | 58 | 59 | futures = {} 60 | n = 1 61 | 62 | dependencies = {} 63 | other_repos = set() 64 | 65 | with concurrent.futures.ThreadPoolExecutor(max_workers=args.jobs) as executor: 66 | elements = ElementTree.fromstring( 67 | requests.get( 68 | "https://raw.githubusercontent.com/LineageOS/mirror/main/default.xml" 69 | ).text 70 | ) 71 | 72 | for name in [ 73 | x.attrib["name"].split("/", maxsplit=1)[-1] 74 | for x in elements.findall(".//project") 75 | ]: 76 | if "_device_" not in name and "_hardware_" not in name: 77 | continue 78 | print(n, name) 79 | n += 1 80 | futures[executor.submit(get_cm_dependencies, name)] = name 81 | for future in concurrent.futures.as_completed(futures): 82 | name = futures[future] 83 | try: 84 | data = future.result() 85 | if data is None: 86 | continue 87 | dependencies[name] = data[0] 88 | other_repos.update(data[1]) 89 | print(name, "=>", data[0]) 90 | except Exception as e: 91 | print(f"{name!r} generated an exception: {e}") 92 | traceback.print_exc() 93 | continue 94 | futures = {} 95 | 96 | print(other_repos) 97 | for name in other_repos: 98 | print(name) 99 | try: 100 | futures[executor.submit(get_cm_dependencies, name)] = name 101 | except Exception: 102 | continue 103 | 104 | other_repos = set() 105 | for future in concurrent.futures.as_completed(futures): 106 | name = futures[future] 107 | try: 108 | data = future.result() 109 | if data is None: 110 | continue 111 | dependencies[name] = data[0] 112 | for el in data[1]: 113 | if el in dependencies: 114 | continue 115 | other_repos.update(data[1]) 116 | print(name, "=>", data[0]) 117 | except Exception as e: 118 | print(f"{name!r} generated an exception: {e}") 119 | traceback.print_exc() 120 | continue 121 | futures = {} 122 | 123 | 124 | print(other_repos) 125 | 126 | with open("out.json", "w") as f: 127 | json.dump(dependencies, f, indent=4) 128 | -------------------------------------------------------------------------------- /device-deps-regenerator/device2kernel.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2017-2023 The LineageOS Project 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import json 5 | 6 | # Define device repos that have repos that depend on them, 7 | # otherwise the script will remove these on the assumption 8 | # they are common repos 9 | COMMON_DEVICE = [ 10 | "android_device_asus_flo", 11 | "android_device_asus_grouper", 12 | "android_device_google_marlin", 13 | "android_device_htc_flounder", 14 | "android_device_samsung_espressowifi", 15 | "android_device_samsung_n1awifi", 16 | "android_device_samsung_t0lte", 17 | ] 18 | 19 | with open("out.json") as f: 20 | mapping = json.load(f) 21 | 22 | kernels = {} 23 | 24 | reverse_deps = {} 25 | 26 | for device in mapping: 27 | deps = mapping[device] 28 | if device not in reverse_deps: 29 | reverse_deps[device] = [] 30 | for repo in deps: 31 | if repo["repo"] not in reverse_deps: 32 | reverse_deps[repo["repo"]] = [] 33 | reverse_deps[repo["repo"]].append(device) 34 | 35 | 36 | def simplify_reverse_deps(repo): 37 | if len(reverse_deps[repo]) == 0 and "-common" not in repo: 38 | return { 39 | repo, 40 | } 41 | res = set() 42 | for i in reverse_deps[repo]: 43 | res.update(simplify_reverse_deps(i)) 44 | if repo in COMMON_DEVICE: 45 | res.add(repo) 46 | return res 47 | 48 | 49 | for repo in reverse_deps: 50 | if "kernel" in repo: 51 | kernels[repo] = sorted(list(simplify_reverse_deps(repo))) 52 | 53 | with open("kernels.json", "w") as f: 54 | json.dump(kernels, f, indent=4, sort_keys=True) 55 | -------------------------------------------------------------------------------- /device-deps-regenerator/devices.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2017-2023 The LineageOS Project 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import json 5 | 6 | with open("out.json") as f: 7 | mapping = json.load(f) 8 | 9 | devices = {} 10 | suffixes = {} 11 | ignorelist = [ 12 | "atv", 13 | "caimito", 14 | "common", 15 | "contexthub", 16 | "devicesettings", 17 | "gs-common", 18 | "gs101", 19 | "gs201", 20 | "pantah", 21 | "raviole", 22 | "redbull", 23 | "sepolicy", 24 | "shusky", 25 | "sm8450-devicetrees", 26 | "sm8475-devicetrees", 27 | "sm8550-devicetrees", 28 | "sm8650-devicetrees", 29 | "sm8750-devicetrees", 30 | "zuma", 31 | ] 32 | 33 | 34 | def simplify_reverse_deps(repo, device): 35 | # repo['branch'] = cm-14.1 or cm-14.1-caf or cm-14.1-sony 36 | if "branch" in repo and repo["branch"].count("-") > 1: # get suffix 37 | if repo["repo"] not in suffixes: 38 | suffixes[repo["repo"]] = {} 39 | suffixes[repo["repo"]][device] = "-" + repo["branch"].split("-", 2)[2] 40 | 41 | if repo["repo"] not in mapping or len(mapping[repo["repo"]]) == 0: 42 | return [repo["repo"]] 43 | res = [] 44 | for i in mapping[repo["repo"]]: 45 | res += simplify_reverse_deps(i, device) 46 | res.append(repo["repo"]) 47 | return res 48 | 49 | 50 | for repo in mapping: 51 | if "device" not in repo or any(x in repo for x in ignorelist): 52 | continue 53 | codename = repo.split("_", maxsplit=3)[-1] 54 | if codename in devices: 55 | print(f"warning: dupe: {codename}") 56 | devices[codename] = sorted( 57 | list(set(simplify_reverse_deps({"repo": repo}, codename))) 58 | ) 59 | 60 | with open("device_deps.json", "w") as f: 61 | out = {"devices": devices, "suffixes": suffixes} 62 | out = devices 63 | json.dump(out, f, indent=4, sort_keys=True) 64 | f.write("\n") 65 | -------------------------------------------------------------------------------- /device-deps-regenerator/requirements.txt: -------------------------------------------------------------------------------- 1 | certifi==2022.12.7 2 | charset-normalizer==3.1.0 3 | idna==3.4 4 | requests==2.28.2 5 | urllib3==1.26.15 6 | -------------------------------------------------------------------------------- /emoji-updater/emoji-updater.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | # SPDX-FileCopyrightText: 2020-2023 The LineageOS Project 5 | # SPDX-License-Identifier: Apache-2.0 6 | 7 | import os 8 | import requests 9 | import sys 10 | 11 | from pathlib import Path 12 | 13 | if __name__ == "__main__": 14 | if len(sys.argv) < 2: 15 | sys.exit( 16 | f"usage: {sys.argv[0]} [url|https://unicode.org/Public/emoji/15.0/emoji-test.txt]" 17 | ) 18 | 19 | url = sys.argv[1] 20 | req = requests.get(url=url) 21 | 22 | group_name = "" 23 | items = {} 24 | 25 | for line in req.text.splitlines(): 26 | if line.startswith("# subgroup: "): 27 | group_name = line.split(maxsplit=2)[-1] 28 | elif '; fully-qualified' in line and not 'skin tone' in line: 29 | item = line.split(";")[0].strip().replace(" ", ",") 30 | items.setdefault(group_name, []).append(item) 31 | 32 | # We want to transfer the received data into the target file 33 | absolute_path = os.path.dirname(__file__) 34 | relative_path = "../../../packages/inputmethods/LatinIME/java/res/values-v19/emoji-categories.xml" 35 | target_path = Path(os.path.join(absolute_path, relative_path)).resolve() 36 | 37 | with open(target_path, "r+") as f: 38 | lines = f.read() 39 | f.seek(0) 40 | f.truncate() 41 | 42 | for key in [*items.keys()]: 43 | header = f"" 44 | start = lines.find(header) 45 | 46 | if start != -1: 47 | while start != -1: 48 | end1 = lines.find("", start) 49 | end2 = lines.find("" 74 | 75 | for c in items[key]: 76 | built += f"\n {c}" 77 | 78 | print(built) 79 | -------------------------------------------------------------------------------- /fbpacktool/.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__/ 2 | *.py[cod] 3 | *$py.class 4 | -------------------------------------------------------------------------------- /fbpacktool/fbpack.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2021 The Android Open Source Project 2 | # SPDX-License-Identifier: Apache-2.0 3 | import collections 4 | 5 | #from google3.third_party.devsite.androidsource.en.docs.core.architecture.bootloader.tools.pixel.fw_unpack import packedstruct 6 | import packedstruct 7 | 8 | FBPACK_MAGIC = 0x4b504246 # "FBPK" FastBook PacK 9 | FBPACK_VERSION = 2 10 | FBPACK_VERSION_V1 = 1 11 | FBPACK_DEFAULT_DATA_ALIGN = 16 12 | 13 | FBPACK_PARTITION_TABLE = 0 14 | FBPACK_PARTITION_DATA = 1 15 | FBPACK_SIDELOAD_DATA = 2 16 | 17 | BOOTLDR_IMG_MAGIC1 = 0x544f4f42 18 | BOOTLDR_IMG_MAGIC2 = 0x2152444c 19 | 20 | class CommonPackHeader(packedstruct.PackedStruct): 21 | magic: int 22 | version: int 23 | _FIELDS = collections.OrderedDict([ 24 | ('magic', 'I'), 25 | ('version', 'I'), 26 | ]) 27 | 28 | def __init__(self, 29 | magic=FBPACK_MAGIC, 30 | version=FBPACK_VERSION_V1): 31 | super().__init__(magic, version) 32 | 33 | 34 | class PackEntry(packedstruct.PackedStruct): 35 | """Pack entry info.""" 36 | 37 | type: int 38 | name: bytes 39 | product: bytes 40 | offset: int 41 | size: int 42 | slotted: int 43 | crc32: int 44 | _FIELDS = collections.OrderedDict([ 45 | ('type', 'I'), 46 | ('name', '36s'), 47 | ('product', '40s'), 48 | ('offset', 'Q'), 49 | ('size', 'Q'), 50 | ('slotted', 'I'), 51 | ('crc32', 'I'), 52 | ]) 53 | 54 | # Provide defaults. 55 | # pylint: disable=useless-super-delegation 56 | def __init__(self, 57 | type_=0, 58 | name=b'', 59 | prod=b'', 60 | offset=0, 61 | size=0, 62 | slotted=0, 63 | crc32=0): 64 | super(PackEntry, self).__init__(type_, name, prod, offset, size, slotted, 65 | crc32) 66 | 67 | 68 | class PackHeader(packedstruct.PackedStruct): 69 | """ A packed image representation""" 70 | 71 | magic: int 72 | version: int 73 | header_size: int 74 | entry_header_size: int 75 | platform: bytes 76 | pack_version: bytes 77 | slot_type: int 78 | data_align: int 79 | total_entries: int 80 | total_size: int 81 | _FIELDS = collections.OrderedDict([ 82 | ('magic', 'I'), 83 | ('version', 'I'), 84 | ('header_size', 'I'), 85 | ('entry_header_size', 'I'), 86 | ('platform', '16s'), 87 | ('pack_version', '64s'), 88 | ('slot_type', 'I'), 89 | ('data_align', 'I'), 90 | ('total_entries', 'I'), 91 | ('total_size', 'I'), 92 | ]) 93 | 94 | def __init__(self, 95 | magic=FBPACK_MAGIC, 96 | version=FBPACK_VERSION, 97 | header_size=0, 98 | entry_header_size=len(PackEntry()), 99 | platform=b'', 100 | pack_version=b'', 101 | slot_type=0, 102 | data_align=FBPACK_DEFAULT_DATA_ALIGN, 103 | total_entries=0, 104 | total_size=0): 105 | super(PackHeader, 106 | self).__init__(magic, version, header_size, entry_header_size, 107 | platform, pack_version, slot_type, data_align, 108 | total_entries, total_size) 109 | # update header size once we know all fields 110 | self.header_size = len(self) 111 | 112 | 113 | class PackHeaderV1(packedstruct.PackedStruct): 114 | magic: int 115 | version: int 116 | img_version: str 117 | total_entries: int 118 | total_size: int 119 | _FIELDS = collections.OrderedDict([ 120 | ('magic', 'I'), 121 | ('version', 'I'), 122 | ('img_version', '68s'), 123 | ('total_entries', 'I'), 124 | ('total_size', 'I'), 125 | ]) 126 | 127 | def __init__(self, 128 | magic=FBPACK_MAGIC, 129 | version=FBPACK_VERSION_V1, 130 | img_version=b'', 131 | total_entries=0, 132 | total_size=0): 133 | super().__init__(magic, version, img_version, total_entries, total_size) 134 | self.pack_version = b'' 135 | 136 | 137 | class PackEntryV1(packedstruct.PackedStruct): 138 | type: int 139 | name: str 140 | size_h: int 141 | size: int 142 | next_offset_h: int 143 | next_offset: int 144 | crc32: int 145 | _FIELDS = collections.OrderedDict([ 146 | ('type', 'I'), 147 | ('name', '32s'), 148 | ('size_h', 'I'), 149 | ('size', 'I'), 150 | ('next_offset_h', 'I'), 151 | ('next_offset', 'I'), 152 | ('crc32', 'I'), 153 | ]) 154 | 155 | # Provide defaults. 156 | # pylint: disable=useless-super-delegation 157 | def __init__(self, 158 | type_=0, 159 | name=b'', 160 | size_h=0, 161 | size=0, 162 | next_offset_h=0, 163 | next_offset=0, 164 | crc32=0): 165 | super().__init__(type_, name, size_h, size, next_offset_h, next_offset, crc32) 166 | 167 | 168 | class PackHeaderBootLDR(packedstruct.PackedStruct): 169 | magic1: int 170 | magic2: int 171 | total_entries: int 172 | start_offset: int 173 | total_size: int 174 | _FIELDS = collections.OrderedDict([ 175 | ('magic1', 'I'), 176 | ('magic2', 'I'), 177 | ('total_entries', 'I'), 178 | ('start_offset', 'I'), 179 | ('total_size', 'I'), 180 | ]) 181 | 182 | def __init__(self, 183 | magic1=BOOTLDR_IMG_MAGIC1, 184 | magic2=BOOTLDR_IMG_MAGIC2, 185 | total_entries=0, 186 | start_offset=0, 187 | total_size=0): 188 | super().__init__(magic1, magic2, total_entries, start_offset, total_size) 189 | self.pack_version = b'' 190 | 191 | 192 | class PackEntryBootLDR(packedstruct.PackedStruct): 193 | name: str 194 | size: int 195 | _FIELDS = collections.OrderedDict([ 196 | ('name', '64s'), 197 | ('size', 'I'), 198 | ]) 199 | 200 | # Provide defaults. 201 | # pylint: disable=useless-super-delegation 202 | def __init__(self, 203 | name=b'', 204 | size=0): 205 | super().__init__(name, size) 206 | self.type = FBPACK_PARTITION_DATA 207 | self.product = None 208 | -------------------------------------------------------------------------------- /fbpacktool/fbpacktool.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2021 The Android Open Source Project 2 | # SPDX-License-Identifier: Apache-2.0 3 | import argparse 4 | import logging 5 | import os 6 | import sys 7 | from lxml.etree import XMLParser 8 | import yaml 9 | 10 | #from google3.third_party.devsite.androidsource.en.docs.core.architecture.bootloader.tools.pixel.fw_unpack import fbpack 11 | import fbpack 12 | 13 | def bytes_to_str(bstr): 14 | return bstr.decode().rstrip('\x00') 15 | 16 | 17 | def print_pack_header(pack): 18 | print('magic: {:#x}'.format(pack.magic)) 19 | print('version: {}'.format(pack.version)) 20 | print('header size: {}'.format(pack.header_size)) 21 | print('entry header size: {}'.format(pack.entry_header_size)) 22 | platform = bytes_to_str(pack.platform) 23 | print('platform: {}'.format(platform)) 24 | pack_version = bytes_to_str(pack.pack_version) 25 | print('pack version: {}'.format(pack_version)) 26 | print('slock type: {}'.format(pack.slot_type)) 27 | print('data align: {}'.format(pack.data_align)) 28 | print('total entries: {}'.format(pack.total_entries)) 29 | print('total size: {}'.format(pack.total_size)) 30 | 31 | 32 | def print_pack_entry(entry, prefix): 33 | name = bytes_to_str(entry.name) 34 | print('{}name: {}'.format(prefix, name)) 35 | etype = 'unknown' 36 | if entry.type == fbpack.FBPACK_PARTITION_TABLE: 37 | etype = 'partiton table' 38 | elif entry.type == fbpack.FBPACK_PARTITION_DATA: 39 | etype = 'partition' 40 | elif entry.type == fbpack.FBPACK_SIDELOAD_DATA: 41 | etype = 'sideload' 42 | else: 43 | print('entry else') 44 | print('{}type: {}'.format(prefix, etype)) 45 | product = bytes_to_str(entry.product) 46 | print('{}product: {}'.format(prefix, product)) 47 | print('{}offset: {:#x} ({})'.format(prefix, entry.offset, entry.offset)) 48 | print('{}size: {:#x} ({})'.format(prefix, entry.size, entry.size)) 49 | print('{}slotted: {}'.format(entry.size, bool(entry.slotted))) 50 | print('{}crc32: {:#08x}'.format(prefix, entry.crc32)) 51 | 52 | 53 | def cmd_info(args): 54 | with open(args.file, 'rb') as f: 55 | pack = fbpack.PackHeader.from_bytes(f.read(len(fbpack.PackHeader()))) 56 | 57 | if pack.version != fbpack.FBPACK_VERSION: 58 | raise NotImplementedError('unsupported version {}'.format(pack.version)) 59 | 60 | print('Header:') 61 | print_pack_header(pack) 62 | 63 | print('\nEntries:') 64 | for i in range(1, pack.total_entries + 1): 65 | entry = fbpack.PackEntry.from_bytes(f.read(len(fbpack.PackEntry()))) 66 | print('Entry {}: {{'.format(i)) 67 | print_pack_entry(entry, ' ') 68 | print('}') 69 | 70 | 71 | def align_up(val, align): 72 | return (val + align - 1) & ~(align - 1) 73 | 74 | 75 | def create_pack_file(file_name, in_dir_name, pack): 76 | pack.total_entries = len(pack.entries) 77 | offset = pack.header_size + pack.total_entries * pack.entry_header_size 78 | with open(file_name, 'wb') as f: 79 | # write entries data 80 | for entry in pack.entries: 81 | # align data 82 | offset = align_up(offset, pack.data_align) 83 | entry.offset = offset 84 | f.seek(offset) 85 | fin_name = os.path.join(in_dir_name, entry.filepath) 86 | with open(fin_name, 'rb') as fin: 87 | data = fin.read() 88 | entry.size = len(data) 89 | f.write(data) 90 | offset += len(data) 91 | 92 | pack.total_size = offset 93 | f.seek(0) 94 | # write pack header 95 | f.write(bytes(pack)) 96 | # iterate over entries again to write entry header 97 | for entry in pack.entries: 98 | f.write(bytes(entry)) 99 | 100 | 101 | def cmd_create(args): 102 | if not (args.file.lower().endswith('.xml') or 103 | args.file.lower().endswith('.yaml')): 104 | raise NotImplementedError('{} type not supported'.format(args.file)) 105 | 106 | pack = None 107 | if args.file.lower().endswith('.yaml'): 108 | pack = yaml.parse(args.file) 109 | else: 110 | pack = XMLParser.parse(args.file) 111 | pack.pack_version = bytes(str(args.pack_version).encode('ascii')) 112 | pack.header_size = len(pack) 113 | 114 | # create output directory if missing 115 | if not os.path.isdir(args.out_dir): 116 | os.makedirs(args.out_dir, 0o755) 117 | 118 | file_name = os.path.join(args.out_dir, pack.name + '.img') 119 | 120 | create_pack_file(file_name, args.in_dir, pack) 121 | 122 | 123 | def product_match(products, product): 124 | return product in products.split(b'|') 125 | 126 | 127 | def copyfileobj(src, dst, file_size): 128 | while file_size > 0: 129 | buf = src.read(min(128 * 1024, file_size)) 130 | dst.write(buf) 131 | file_size -= len(buf) 132 | 133 | 134 | def cmd_unpack(args): 135 | with open(args.file, 'rb') as f: 136 | common_pack = fbpack.CommonPackHeader.from_bytes(f.read(len(fbpack.CommonPackHeader()))) 137 | 138 | f.seek(0, os.SEEK_SET) 139 | 140 | if common_pack.version == fbpack.FBPACK_VERSION: 141 | pack = fbpack.PackHeader.from_bytes(f.read(len(fbpack.PackHeader()))) 142 | elif common_pack.version == fbpack.FBPACK_VERSION_V1: 143 | pack = fbpack.PackHeaderV1.from_bytes(f.read(len(fbpack.PackHeaderV1()))) 144 | elif common_pack.version == fbpack.BOOTLDR_IMG_MAGIC2: 145 | pack = fbpack.PackHeaderBootLDR.from_bytes(f.read(len(fbpack.PackHeaderBootLDR()))) 146 | offset = pack.start_offset 147 | else: 148 | raise NotImplementedError('unsupported version {}'.format(pack.version)) 149 | 150 | entries = [] 151 | entry = None 152 | next_offset = len(pack) 153 | # create list of entries we want to extact 154 | for _ in range(pack.total_entries): 155 | if common_pack.version == fbpack.FBPACK_VERSION: 156 | entry = fbpack.PackEntry.from_bytes(f.read(len(fbpack.PackEntry()))) 157 | offset = entry.offset 158 | elif common_pack.version == fbpack.FBPACK_VERSION_V1: 159 | f.seek(next_offset, os.SEEK_SET) 160 | entry = fbpack.PackEntryV1.from_bytes(f.read(len(fbpack.PackEntryV1()))) 161 | offset = f.tell() 162 | next_offset = (entry.next_offset_h << 32) | entry.next_offset 163 | elif common_pack.version == fbpack.BOOTLDR_IMG_MAGIC2: 164 | if entry is not None: 165 | offset += entry.size 166 | entry = fbpack.PackEntryBootLDR.from_bytes(f.read(len(fbpack.PackEntryBootLDR()))) 167 | else: 168 | raise NotImplementedError('unsupported version {}'.format(common_pack.version)) 169 | 170 | if entry.type == 0: 171 | # Ignore partition table entries, next_offset will tell us 172 | # where to go next 173 | continue 174 | 175 | name = bytes_to_str(entry.name) 176 | if not args.partitions or name in args.partitions: 177 | # if both product are valid then match product name too 178 | if not args.product or not entry.product or product_match( 179 | entry.product, args.product): 180 | entries.append((name, offset, entry.size)) 181 | 182 | if not entries and not args.unpack_ver: 183 | raise RuntimeError('no images to unpack') 184 | 185 | # create output directory if it does not exist 186 | if not os.path.isdir(args.out_dir): 187 | os.makedirs(args.out_dir, 0o755) 188 | 189 | out_files = {} 190 | # write file per entry 191 | for name, offset, size in entries: 192 | logging.info('Unpacking {} (size: {}, offset: {})'.format( 193 | name, size, offset)) 194 | f.seek(offset) 195 | entry_filename = os.path.join(args.out_dir, name + '.img') 196 | instance = out_files.get(entry_filename, 0) + 1 197 | out_files[entry_filename] = instance 198 | if instance > 1: 199 | entry_filename = os.path.join(args.out_dir, 200 | name + '({}).img'.format(instance - 1)) 201 | with open(entry_filename, 'wb') as entry_file: 202 | copyfileobj(f, entry_file, size) 203 | 204 | if args.unpack_ver: 205 | ver_file_path = os.path.join(args.out_dir, 'version.txt') 206 | with open(ver_file_path, 'w') as ver_file: 207 | ver_file.write(bytes_to_str(pack.pack_version)) 208 | 209 | logging.info('Done') 210 | 211 | 212 | def parse_args(): 213 | parser = argparse.ArgumentParser( 214 | description='Tool to create/modify/inspect fastboot packed images') 215 | parser.add_argument( 216 | '-v', 217 | '--verbosity', 218 | action='count', 219 | default=0, 220 | help='increase output verbosity') 221 | 222 | subparsers = parser.add_subparsers() 223 | 224 | # info command 225 | info = subparsers.add_parser('info') 226 | info.add_argument('file', help='packed image file') 227 | info.set_defaults(func=cmd_info) 228 | 229 | # create command 230 | create = subparsers.add_parser('create') 231 | create.add_argument( 232 | '-d', '--in_dir', help='directory to search for data files', default='.') 233 | create.add_argument( 234 | '-o', 235 | '--out_dir', 236 | help='output directory for the packed image', 237 | default='.') 238 | create.add_argument( 239 | '-v', '--pack_version', help='Packed image version ', default='') 240 | create.add_argument( 241 | 'file', help='config file describing packed image (yaml/xml)') 242 | create.set_defaults(func=cmd_create) 243 | 244 | # unpack command 245 | unpack = subparsers.add_parser('unpack') 246 | unpack.add_argument( 247 | '-o', '--out_dir', help='directory to store unpacked images', default='.') 248 | unpack.add_argument( 249 | '-p', '--product', help='filter images by product', default='') 250 | unpack.add_argument( 251 | '-v', 252 | '--unpack_ver', 253 | help='Unpack version to a file', 254 | action='store_true') 255 | unpack.add_argument('file', help='packed image file') 256 | unpack.add_argument( 257 | 'partitions', 258 | metavar='PART', 259 | type=str, 260 | nargs='*', 261 | help='Partition names to extract (default all).') 262 | unpack.set_defaults(func=cmd_unpack) 263 | 264 | args = parser.parse_args() 265 | # make sure a command was passed 266 | if not hasattr(args, 'func'): 267 | parser.print_usage() 268 | print('fbpacktool.py: error: no command was passed') 269 | sys.exit(2) 270 | 271 | return args 272 | 273 | 274 | def main(): 275 | args = parse_args() 276 | 277 | if args.verbosity >= 2: 278 | log_level = logging.DEBUG 279 | elif args.verbosity == 1: 280 | log_level = logging.INFO 281 | else: 282 | log_level = logging.WARNING 283 | 284 | logging.basicConfig(level=log_level) 285 | 286 | # execute command 287 | args.func(args) 288 | 289 | 290 | if __name__ == '__main__': 291 | main() 292 | -------------------------------------------------------------------------------- /fbpacktool/packedstruct.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2021 The Android Open Source Project 2 | # SPDX-License-Identifier: Apache-2.0 3 | import collections 4 | import struct 5 | 6 | 7 | class PackedStruct(object): 8 | """Class representing a C style packed structure. 9 | 10 | Derived classes need to provide a dictionary where the keys are the attributes 11 | and the values are the format characters for each field. e.g. 12 | 13 | class Foo(PackedStruct): 14 | _FIELDS = { 15 | x: 'I', 16 | name: '64s', 17 | } 18 | 19 | In this case Foo.x will represent an "unsigned int" C value, while Foo.name 20 | will be a "char[64]" C value. 21 | """ 22 | _FIELDS: collections.OrderedDict 23 | 24 | def __init__(self, *args, **kwargs): 25 | self._fmt = '<' + ''.join(fmt for fmt in self._FIELDS.values()) 26 | for name in self._FIELDS: 27 | setattr(self, name, None) 28 | 29 | for name, val in zip(self._FIELDS.keys(), args): 30 | setattr(self, name, val) 31 | for name, val in kwargs.items(): 32 | setattr(self, name, val) 33 | 34 | def __repr__(self): 35 | return '{} {{\n'.format(self.__class__.__name__) + ',\n'.join( 36 | ' {!r}: {!r}'.format(k, getattr(self, k)) 37 | for k in self._FIELDS) + '\n}' 38 | 39 | def __str__(self): 40 | return struct.pack(self._fmt, *(getattr(self, x) for x in self._FIELDS)) 41 | 42 | def __bytes__(self): 43 | return struct.pack(self._fmt, *(getattr(self, x) for x in self._FIELDS)) 44 | 45 | def __len__(self): 46 | return struct.calcsize(self._fmt) 47 | 48 | @classmethod 49 | def from_bytes(cls, data): 50 | fmt_str = '<' + ''.join(fmt for fmt in cls._FIELDS.values()) 51 | return cls(*struct.unpack(fmt_str, data)) 52 | -------------------------------------------------------------------------------- /git-push-merge-review/README.md: -------------------------------------------------------------------------------- 1 | # git-push-merge-review 2 | 3 | ``` 4 | $ ./git-push-merge-review 5 | Usage ./git-push-merge-review [-t topic] 6 | ``` 7 | -------------------------------------------------------------------------------- /git-push-merge-review/git-push-merge-review: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # SPDX-FileCopyrightText: 2023-2024 The LineageOS Project 4 | # 5 | # SPDX-License-Identifier: Apache-2.0 6 | # 7 | 8 | usage() { 9 | echo "Usage ${0} [-t topic] " 10 | } 11 | 12 | # Parse CLI args 13 | while getopts "t:" opt; do 14 | case $opt in 15 | t) 16 | EXTRA+=",topic=${OPTARG}" 17 | ;; 18 | \?) 19 | exit 1 20 | ;; 21 | esac 22 | done 23 | 24 | shift $((OPTIND - 1)) 25 | 26 | # Verify argument count 27 | if [ "${#}" -ne 3 ]; then 28 | usage 29 | exit 1 30 | fi 31 | 32 | REMOTE="${1}" 33 | MERGE_SHA="${2}" 34 | BRANCH="${3}" 35 | FIRST_SHA="$(git show -s --pretty=%P ${MERGE_SHA} | cut -d ' ' -f 1)" 36 | SECOND_SHA="$(git show -s --pretty=%P ${MERGE_SHA} | cut -d ' ' -f 2)" 37 | 38 | git push "${REMOTE}" "${MERGE_SHA}":refs/for/"${BRANCH}"%base="${FIRST_SHA}",base="${SECOND_SHA}${EXTRA}" 39 | -------------------------------------------------------------------------------- /key-migration/export-keys.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # SPDX-FileCopyrightText: 2017 The LineageOS Project 4 | # SPDX-License-Identifier: Apache-2.0 5 | 6 | if ! cd "$1"; then 7 | echo "USAGE: $0 PATH" 8 | exit 1 9 | fi 10 | 11 | for x in platform media shared; do 12 | echo ${x}_key_release=\"$(openssl x509 -pubkey -noout -in $x.x509.pem | grep -v '-' | tr -d '\n')\" 13 | echo ${x}_cert_release=\"$(openssl x509 -outform der -in $x.x509.pem | xxd -p | tr -d '\n')\" 14 | done 15 | 16 | echo release_key=\"$(openssl x509 -pubkey -noout -in releasekey.x509.pem | grep -v '-' | tr -d '\n')\" 17 | echo release_cert=\"$(openssl x509 -outform der -in releasekey.x509.pem | xxd -p | tr -d '\n')\" 18 | -------------------------------------------------------------------------------- /lineage-priv-template/BUILD.bazel: -------------------------------------------------------------------------------- 1 | ../../../build/make/target/product/security/BUILD.bazel -------------------------------------------------------------------------------- /lineage-priv-template/README.md: -------------------------------------------------------------------------------- 1 | # lineage-priv-template 2 | 3 | # Usage 4 | 5 | 1. Copy to $TOP/vendor/lineage-priv/keys 6 | 2. cd $TOP/vendor/lineage-priv/keys 7 | 3. ./keys.sh 8 | 4. Profit 9 | 10 | # Testing 11 | 12 | Included `check_keys.py` script checks whether all apk/apex/capex files in the build out are signed with keys within its directory. Be aware that some targets are **expected** to be signed with vendor key, for example `com.android.apex.cts.shim.v1_prebuilt`. 13 | 14 | ``` 15 | $ ./check_keys.py ~/lineage/out/target/product/lemonadep 16 | /home/luk/lineage/out/target/product/lemonadep/obj/ETC/com.android.apex.cts.shim.v1_prebuilt_intermediates/com.android.apex.cts.shim.apex is signed with an unknown key! 17 | ``` 18 | -------------------------------------------------------------------------------- /lineage-priv-template/check_keys.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | # SPDX-FileCopyrightText: 2024 The LineageOS Project 5 | # SPDX-License-Identifier: Apache-2.0 6 | 7 | import glob 8 | import subprocess 9 | import sys 10 | from multiprocessing import Pool 11 | 12 | from cryptography import x509 13 | 14 | KNOWN_KEYS = [ 15 | x509.load_pem_x509_certificate(open(f, "rb").read()).public_key() 16 | for f in glob.glob("*.x509.pem") 17 | ] 18 | 19 | 20 | def check_public_key(path: str) -> None: 21 | certs = [] 22 | stdout = subprocess.run( 23 | [ 24 | "java", 25 | "-jar", 26 | "../../../prebuilts/sdk/tools/linux/lib/apksigner.jar", 27 | "verify", 28 | "--print-certs-pem", 29 | path, 30 | ], 31 | capture_output=True, 32 | ).stdout 33 | 34 | while begin := stdout.find(b"-----BEGIN CERTIFICATE-----"): 35 | end = stdout.find(b"-----END CERTIFICATE-----", begin) 36 | 37 | if end == -1: 38 | break 39 | 40 | certs.append(x509.load_pem_x509_certificate(stdout[begin : end + 25])) 41 | stdout = stdout[end + 25 :] 42 | 43 | if not any(x.public_key() in KNOWN_KEYS for x in certs): 44 | print(path, "is signed with an unknown key!") 45 | 46 | 47 | def main(): 48 | out = sys.argv[1] 49 | 50 | with Pool(8) as pool: 51 | pool.map( 52 | check_public_key, 53 | ( 54 | glob.glob(f"{out}/obj/**/*.apk", recursive=True) 55 | + glob.glob(f"{out}/obj/**/*.apex", recursive=True) 56 | + glob.glob(f"{out}/obj/**/*.capex", recursive=True) 57 | ), 58 | ) 59 | 60 | 61 | if __name__ == "__main__": 62 | main() 63 | -------------------------------------------------------------------------------- /lineage-priv-template/keys.mk: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2024 The LineageOS Project 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | PRODUCT_CERTIFICATE_OVERRIDES := \ 5 | com.android.adbd:com.android.adbd.certificate.override \ 6 | com.android.adservices:com.android.adservices.certificate.override \ 7 | com.android.adservices.api:com.android.adservices.api.certificate.override \ 8 | com.android.appsearch:com.android.appsearch.certificate.override \ 9 | com.android.appsearch.apk:com.android.appsearch.apk.certificate.override \ 10 | com.android.art:com.android.art.certificate.override \ 11 | com.android.bluetooth:com.android.bluetooth.certificate.override \ 12 | com.android.btservices:com.android.btservices.certificate.override \ 13 | com.android.cellbroadcast:com.android.cellbroadcast.certificate.override \ 14 | com.android.compos:com.android.compos.certificate.override \ 15 | com.android.configinfrastructure:com.android.configinfrastructure.certificate.override \ 16 | com.android.connectivity.resources:com.android.connectivity.resources.certificate.override \ 17 | com.android.conscrypt:com.android.conscrypt.certificate.override \ 18 | com.android.devicelock:com.android.devicelock.certificate.override \ 19 | com.android.extservices:com.android.extservices.certificate.override \ 20 | com.android.hardware.authsecret:com.android.hardware.authsecret.certificate.override \ 21 | com.android.hardware.biometrics.face.virtual:com.android.hardware.biometrics.face.virtual.override \ 22 | com.android.hardware.biometrics.fingerprint.virtual:com.android.hardware.biometrics.fingerprint.virtual.override \ 23 | com.android.hardware.boot:com.android.hardware.boot.certificate.override \ 24 | com.android.hardware.cas:com.android.hardware.cas.override \ 25 | com.android.hardware.neuralnetworks:com.android.hardware.neuralnetworks.certificate.override \ 26 | com.android.hardware.rebootescrow:com.android.hardware.rebootescrow.certificate.override \ 27 | com.android.hardware.wifi:com.android.hardware.wifi.certificate.override \ 28 | com.android.healthfitness:com.android.healthfitness.certificate.override \ 29 | com.android.hotspot2.osulogin:com.android.hotspot2.osulogin.certificate.override \ 30 | com.android.i18n:com.android.i18n.certificate.override \ 31 | com.android.ipsec:com.android.ipsec.certificate.override \ 32 | com.android.media:com.android.media.certificate.override \ 33 | com.android.media.swcodec:com.android.media.swcodec.certificate.override \ 34 | com.android.mediaprovider:com.android.mediaprovider.certificate.override \ 35 | com.android.nearby.halfsheet:com.android.nearby.halfsheet.certificate.override \ 36 | com.android.networkstack.tethering:com.android.networkstack.tethering.certificate.override \ 37 | com.android.neuralnetworks:com.android.neuralnetworks.certificate.override \ 38 | com.android.nfcservices:com.android.nfcservices.certificate.override \ 39 | com.android.ondevicepersonalization:com.android.ondevicepersonalization.certificate.override \ 40 | com.android.os.statsd:com.android.os.statsd.certificate.override \ 41 | com.android.permission:com.android.permission.certificate.override \ 42 | com.android.profiling:com.android.profiling.certificate.override \ 43 | com.android.resolv:com.android.resolv.certificate.override \ 44 | com.android.rkpd:com.android.rkpd.certificate.override \ 45 | com.android.runtime:com.android.runtime.certificate.override \ 46 | com.android.safetycenter.resources:com.android.safetycenter.resources.certificate.override \ 47 | com.android.scheduling:com.android.scheduling.certificate.override \ 48 | com.android.sdkext:com.android.sdkext.certificate.override \ 49 | com.android.support.apexer:com.android.support.apexer.certificate.override \ 50 | com.android.telephony:com.android.telephony.certificate.override \ 51 | com.android.telephonymodules:com.android.telephonymodules.certificate.override \ 52 | com.android.tethering:com.android.tethering.certificate.override \ 53 | com.android.tzdata:com.android.tzdata.certificate.override \ 54 | com.android.uwb:com.android.uwb.certificate.override \ 55 | com.android.uwb.resources:com.android.uwb.resources.certificate.override \ 56 | com.android.virt:com.android.virt.certificate.override \ 57 | com.android.vndk.current:com.android.vndk.current.certificate.override \ 58 | com.android.wifi:com.android.wifi.certificate.override \ 59 | com.android.wifi.dialog:com.android.wifi.dialog.certificate.override \ 60 | com.android.wifi.resources:com.android.wifi.resources.certificate.override \ 61 | com.google.pixel.vibrator.hal:com.google.pixel.vibrator.hal.certificate.override \ 62 | com.qorvo.uwb:com.qorvo.uwb.certificate.override 63 | 64 | PRODUCT_CERTIFICATE_OVERRIDES += \ 65 | AdServicesApk:com.android.adservices.api.certificate.override \ 66 | FederatedCompute:com.android.federatedcompute.certificate.override \ 67 | HealthConnectBackupRestore:com.android.health.connect.backuprestore.certificate.override \ 68 | HealthConnectController:com.android.healthconnect.controller.certificate.override \ 69 | OsuLogin:com.android.hotspot2.osulogin.certificate.override \ 70 | SafetyCenterResources:com.android.safetycenter.resources.certificate.override \ 71 | ServiceConnectivityResources:com.android.connectivity.resources.certificate.override \ 72 | ServiceUwbResources:com.android.uwb.resources.certificate.override \ 73 | ServiceWifiResources:com.android.wifi.resources.certificate.override \ 74 | WifiDialog:com.android.wifi.dialog.certificate.override 75 | 76 | PRODUCT_DEFAULT_DEV_CERTIFICATE := vendor/lineage-priv/keys/testkey 77 | PRODUCT_EXTRA_RECOVERY_KEYS := 78 | -------------------------------------------------------------------------------- /lineage-priv-template/keys.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # SPDX-FileCopyrightText: 2024 The LineageOS Project 4 | # SPDX-License-Identifier: Apache-2.0 5 | 6 | # Generate Android.bp 7 | echo "// DO NOT EDIT THIS FILE MANUALLY" > Android.bp 8 | 9 | for key in $(grep -o :\.\*override keys.mk | sort -u); do 10 | echo "" >> Android.bp 11 | echo "android_app_certificate {" >> Android.bp 12 | echo " name: \"${key:1}\"," >> Android.bp 13 | echo " certificate: \"${key:1}\"," >> Android.bp 14 | echo "}" >> Android.bp 15 | done 16 | 17 | # Generate keys 18 | for key in ../../../build/make/target/product/security/*.pk8; do 19 | ./make_key.sh $(basename $key .pk8) 20 | done 21 | 22 | for key in $(grep -o :\.\*override keys.mk | sort -u); do 23 | ./make_key.sh ${key:1} 4096 24 | done 25 | -------------------------------------------------------------------------------- /lineage-priv-template/make_key.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # SPDX-FileCopyrightText: 2024 The LineageOS Project 4 | # SPDX-License-Identifier: Apache-2.0 5 | 6 | set -u 7 | bash <(sed "s/2048/${2:-2048}/;/Enter password/,+1d" ../../../development/tools/make_key) \ 8 | $1 \ 9 | '/C=US/ST=California/L=Mountain View/O=Android/OU=Android/CN=Android/emailAddress=android@android.com' 10 | -------------------------------------------------------------------------------- /lineage-priv-template/releasekey.pk8: -------------------------------------------------------------------------------- 1 | testkey.pk8 -------------------------------------------------------------------------------- /lineage-priv-template/releasekey.x509.pem: -------------------------------------------------------------------------------- 1 | testkey.x509.pem -------------------------------------------------------------------------------- /lineage-priv-template/requirements.txt: -------------------------------------------------------------------------------- 1 | cryptography 2 | -------------------------------------------------------------------------------- /lineage-push/README.md: -------------------------------------------------------------------------------- 1 | # LineageOS Push Script 2 | 3 | ``` 4 | usage: lineage-push.py [-h] [-a] [-b] [-d] [-e] [-f] [-l LABEL] [-m [MESSAGE]] 5 | [-p [PRIVATE]] [-r REF] [-s] [-t TOPIC] [-w [WIP]] 6 | branch 7 | 8 | Pushes a local git repository's changes to Gerrit for code review 9 | 10 | positional arguments: 11 | branch upload change to branch 12 | 13 | optional arguments: 14 | -h, --help show this help message and exit 15 | -a, --hashtag add hashtag to change 16 | -b, --bypass bypass review and merge 17 | -d, --draft upload change as draft 18 | -e, --edit upload change as edit 19 | -f, --force force push 20 | -l LABEL, --label LABEL 21 | assign label 22 | -m [MESSAGE], --message [MESSAGE] 23 | add message to change 24 | -p [PRIVATE], --private [PRIVATE] 25 | upload change as private 26 | -r REF, --ref REF push to specified ref 27 | -s, --submit submit change 28 | -t TOPIC, --topic TOPIC 29 | append topic to change 30 | -w [WIP], --wip [WIP] 31 | upload change as WIP 32 | ``` 33 | ``` 34 | Examples: 35 | lineage-push -d -t test cm-14.1 36 | lineage-push -s -l "Code-Review+2,Verified+1" cm-14.1 37 | ``` 38 | -------------------------------------------------------------------------------- /lineage-push/lineage-push.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # SPDX-FileCopyrightText: 2017-2018 The LineageOS Project 4 | # SPDX-License-Identifier: Apache-2.0 5 | 6 | from __future__ import print_function 7 | 8 | import re 9 | import subprocess 10 | import sys 11 | from argparse import ArgumentParser, ArgumentTypeError 12 | 13 | try: 14 | from urllib.parse import quote_plus 15 | except ImportError: 16 | from urllib import quote_plus 17 | 18 | 19 | def push(args): 20 | command = 'git push' 21 | parameters = [] 22 | 23 | if args.force: 24 | command += ' -f' 25 | 26 | username = subprocess.check_output( 27 | ["git", "config", "review.review.lineageos.org.username"]).decode("utf-8").strip() 28 | remotes = subprocess.check_output( 29 | ["git", "remote", "-v"]).decode("utf-8").strip() 30 | if "github.com/LineageOS" in remotes or "git@github.com:LineageOS" in remotes: 31 | repo = re.search(r'LineageOS\S+', remotes).group(0) 32 | elif "android.googlesource.com" in remotes: 33 | repo = re.search(r'platform\S+', remotes).group(0) 34 | repo = repo.replace("/", "_").replace("platform", "LineageOS/android") 35 | 36 | command += ' ssh://{}@review.lineageos.org:29418/{}'.format( 37 | username, repo) 38 | command += ' HEAD:' 39 | 40 | if args.ref != 'for': 41 | command += 'refs/{}/'.format(args.ref) 42 | elif args.bypass: 43 | command += '' 44 | elif args.draft: 45 | command += 'refs/drafts/' 46 | else: 47 | command += 'refs/{}/'.format(args.ref) 48 | 49 | command += args.branch 50 | 51 | if args.label: 52 | for label in args.label.split(','): 53 | parameters.append('l={}'.format(label)) 54 | 55 | if args.edit: 56 | parameters.append('edit') 57 | 58 | if args.topic: 59 | parameters.append('topic={}'.format(args.topic)) 60 | 61 | if args.hashtag: 62 | parameters.append('hashtag={}'.format(args.hashtag)) 63 | 64 | if args.submit: 65 | parameters.append('submit') 66 | 67 | if args.private == True: 68 | parameters.append('private') 69 | elif args.private == False: 70 | parameters.append('remove-private') 71 | 72 | if args.wip == True: 73 | parameters.append('wip') 74 | elif args.wip == False: 75 | parameters.append('ready') 76 | 77 | if args.message: 78 | parameters.append('m={}'.format(quote_plus(args.message))) 79 | 80 | if len(parameters) > 0: 81 | command += "%" + ','.join(parameters) 82 | 83 | sys.exit(subprocess.call(command.split(' '))) 84 | 85 | 86 | def str2bool(v): 87 | if v.lower() in ('yes', 'true', 't', 'y', '1'): 88 | return True 89 | elif v.lower() in ('no', 'false', 'f', 'n', '0'): 90 | return False 91 | else: 92 | raise ArgumentTypeError('Boolean value expected.') 93 | 94 | 95 | def parse_cmdline(): 96 | parser = ArgumentParser( 97 | description='Pushes a local git repository\'s changes to Gerrit for code review') 98 | parser.add_argument('branch', help='upload change to branch') 99 | parser.add_argument('-a', '--hashtag', action='store_true', 100 | help='add hashtag to change') 101 | parser.add_argument('-b', '--bypass', action='store_true', 102 | help='bypass review and merge') 103 | parser.add_argument('-d', '--draft', action='store_true', 104 | help='upload change as draft') 105 | parser.add_argument('-e', '--edit', action='store_true', 106 | help='upload change as edit') 107 | parser.add_argument( 108 | '-f', '--force', action='store_true', help='force push') 109 | parser.add_argument('-l', '--label', help='assign label') 110 | parser.add_argument('-m', '--message', nargs='?', 111 | help='add message to change') 112 | parser.add_argument('-p', '--private', type=str2bool, nargs='?', 113 | const=True, help='upload change as private') 114 | parser.add_argument( 115 | '-r', '--ref', help='push to specified ref', default="for") 116 | parser.add_argument( 117 | '-s', '--submit', action='store_true', help='submit change') 118 | parser.add_argument('-t', '--topic', help='append topic to change') 119 | parser.add_argument('-w', '--wip', type=str2bool, nargs='?', 120 | const=True, help='upload change as WIP') 121 | return parser.parse_args() 122 | 123 | 124 | def main(): 125 | args = parse_cmdline() 126 | push(args) 127 | 128 | 129 | if __name__ == '__main__': 130 | main() 131 | -------------------------------------------------------------------------------- /motorola/info.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # SPDX-FileCopyrightText: 2023 The Calyx Institute 4 | # 5 | # SPDX-License-Identifier: Apache-2.0 6 | 7 | # 8 | # info: 9 | # 10 | # Dump information for one device 11 | # 12 | # 13 | ############################################################################## 14 | 15 | 16 | ### SET ### 17 | 18 | # use bash strict mode 19 | set -euo pipefail 20 | 21 | ### TRAPS ### 22 | 23 | # trap signals for clean exit 24 | trap 'exit $?' EXIT 25 | trap 'error_m interrupted!' SIGINT 26 | 27 | ### CONSTANTS ### 28 | readonly script_path="$(cd "$(dirname "$0")";pwd -P)" 29 | readonly top="${script_path}/../../.." 30 | readonly avbtool="${top}/external/avb/avbtool.py" 31 | 32 | ## HELP MESSAGE (USAGE INFO) 33 | # TODO 34 | 35 | ### FUNCTIONS ### 36 | 37 | info() { 38 | local source="${1}" 39 | 40 | local bootloader_version=$(cat ${source}/*.info.txt | grep "MBM Version:" | cut -c 14-) 41 | local build_id=$(cat ${source}/*.info.txt | grep "Build Id:" | cut -c 11-) 42 | local build_fingerprint=$(cat ${source}/*.info.txt | grep "Build Fingerprint:" | cut -c 20-) 43 | local build_description=$(cat ${source}/*.info.txt | grep "Version when read from CPV:" | cut -c 29-) 44 | local security_patch=$(python3 ${avbtool} info_image --image ${source}/vbmeta.img | grep "com.android.build.vendor.security_patch" | cut -c 54- | sed s/\'//g) 45 | local rollback_index=$(python3 ${avbtool} info_image --image ${source}/vbmeta.img | grep "Rollback Index:" | cut -c 27-) 46 | 47 | echo "Bootloader version: $bootloader_version" 48 | echo "Build ID: $build_id" 49 | echo "Build fingerprint: $build_fingerprint" 50 | echo "Build description: $build_description" 51 | echo "Vendor security patch: $security_patch" 52 | echo "AVB rollback index: $rollback_index" 53 | } 54 | 55 | # error message 56 | # ARG1: error message for STDERR 57 | # ARG2: error status 58 | error_m() { 59 | echo "ERROR: ${1:-'failed.'}" 1>&2 60 | return "${2:-1}" 61 | } 62 | 63 | # print help message. 64 | help_message() { 65 | echo "${help_message:-'No help available.'}" 66 | } 67 | 68 | main() { 69 | if [[ $# -eq 1 ]] ; then 70 | info "${1}" 71 | else 72 | error_m 73 | fi 74 | } 75 | 76 | ### RUN PROGRAM ### 77 | 78 | main "${@}" 79 | 80 | 81 | ## 82 | -------------------------------------------------------------------------------- /motorola/star.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # SPDX-FileCopyrightText: 2023 Hemanth Jabalpuri 4 | # SPDX-License-Identifier: CC0-1.0 5 | 6 | # This basic program is used for unpacking Motorola archives which are made using single image tar(star) utility 7 | # can run in dash. dd, od, tr are used mainly(busybox also compatible) 8 | # 9 | # Created : 1st Feb 2023 10 | # Author : HemanthJabalpuri 11 | 12 | if [ $# -lt 2 ]; then 13 | echo "Usage: star.sh file outdir" 14 | exit 15 | fi 16 | 17 | f="$1" 18 | outdir="$2" 19 | 20 | mkdir -p "$outdir" 2>/dev/null 21 | 22 | getData() { 23 | dd if="$f" bs=1 skip=$1 count=$2 2>/dev/null 24 | } 25 | 26 | getLong() { 27 | getData $1 8 | od -A n -t u8 | tr -d " " 28 | } 29 | 30 | magic=$(getData 0 15) 31 | if [ "$magic" != "SINGLE_N_LONELY" ]; then 32 | echo " Unsupported"; exit 1 33 | fi 34 | 35 | seekoff=256 36 | for i in $(seq 64); do 37 | name="$(getData $seekoff 248)" 38 | [ "$name" = "LONELY_N_SINGLE" ] && break 39 | length="$(getLong $((seekoff+248)))" 40 | offset="$((seekoff+256))" 41 | pad=$((length%4096)) 42 | [ "$pad" -ne 0 ] && pad=$((4096-pad)) 43 | echo "Name: $name, Offset: $offset, Size: $length, Padding: $pad" 44 | 45 | dd if="$f" of="$outdir/$name" iflag=skip_bytes,count_bytes status=none bs=4096 skip=$offset count=$length 46 | seekoff="$((offset+length+pad))" 47 | done 48 | -------------------------------------------------------------------------------- /pixel/README.md: -------------------------------------------------------------------------------- 1 | ## Pixel Scripts 2 | 3 | ### Variables 4 | 5 | `${TOP}/vendor/lineage/vars/` - This directory stores all variables for repositories that have upstreams that are regularly merged. 6 | 7 | Standard Variables: 8 | 9 | `$deviceName` - e.g. `sargo`, `redfin`, `coral` - Stores the following data: 10 | 11 | * `firmware_partitions` - A matrix of the partition names of proprietary firmware images relevant to this device 12 | * `device_repos` - A matrix of the file-paths of relevant AOSP repositories this device depends on. 13 | * `build_id` - Previous/current device stock build ID tags 14 | * `build_number` - Previous/current device stock build number strings 15 | * `image_url` - Direct link to device's latest factory image 16 | * `image_sha256` - SHA256 sum of device's latest factory image 17 | * `flash_url` - Stores a formatted link to Google's web-based [Flash tool](https://flash.android.com/welcome) which brings up the device's latest available image, additionally is used to fetch the data used to create build fingerprint changes 18 | * `ota_url` - Direct link to device's latest OTA image 19 | * `ota_sha256` - SHA256 sum of device's latest OTA image 20 | * `security_patch` - The device's stock vendor security patch level from the device's latest factory image 21 | 22 | `$kernelName` - Stores the following data 23 | 24 | * `{prev_,}common_aosp_tag` - Previous/current tracked AOSP kernel tag for the relevant device/platform 25 | 26 | See `../aosp-merger/README.md` for more 27 | 28 | ### Scripts and usage 29 | 30 | `all.sh` - Parallelly downloads factory images for all supported pixels in vars/pixels, and extracts files/firmware 31 | 32 | `build-desc-fingerprint.sh` - Updates build description/fingerprint in all Pixel device tree forks, and commits them, pulled from relevant `$deviceName` variable files 33 | 34 | `device.sh` - Downloads single device factory images/OTA images and extracts files/firmware for it. e.g. `device.sh raven` 35 | 36 | `download.sh` - Downloads single device factory images - e.g. `download.sh raven` 37 | 38 | `update-any-var.sh` - Update any var in `vendor/lineage/vars` - e.g. `update-any-var.sh build_id TQ1A.230105.001.A2 bluejay panther` 39 | 40 | `update-device-vars.sh` - Automatically update all `$deviceName` variables of supported devices after running `download.sh`. 41 | 42 | `extract-factory-image.sh` - Extracts factory image contents for a single device from already downloaded images. e.g. `extract-factory-image.sh raven` 43 | 44 | `firmware.sh` - Extracts firmware for a single device from already downloaded images. e.g. `firmware.sh raven` 45 | 46 | `get-new-device-vars.py` - For internal use by many of the scripts referenced above 47 | 48 | Relevant cross-script variables: 49 | 50 | `WORK_DIR` - Tell the scripts where to save factory images, defaults to `/tmp/pixel`. e.g. `export WORK_DIR=/mnt/android/stock/` 51 | -------------------------------------------------------------------------------- /pixel/all.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # SPDX-FileCopyrightText: 2022 The Calyx Institute 4 | # 5 | # SPDX-License-Identifier: Apache-2.0 6 | 7 | # 8 | # all: 9 | # 10 | # Do it all! 11 | # 12 | # 13 | ############################################################################## 14 | 15 | 16 | ### SET ### 17 | 18 | # use bash strict mode 19 | set -euo pipefail 20 | 21 | ### TRAPS ### 22 | 23 | # trap signals for clean exit 24 | trap 'exit $?' EXIT 25 | trap 'error_m interrupted!' SIGINT 26 | 27 | ### CONSTANTS ### 28 | readonly script_path="$(cd "$(dirname "$0")";pwd -P)" 29 | readonly vars_path="${script_path}/../../../vendor/lineage/vars" 30 | readonly top="${script_path}/../../.." 31 | 32 | readonly work_dir="${WORK_DIR:-/tmp/pixel}" 33 | 34 | source "${vars_path}/pixels" 35 | 36 | ## HELP MESSAGE (USAGE INFO) 37 | # TODO 38 | 39 | ### FUNCTIONS ### 40 | 41 | device() { 42 | local device="${1}" 43 | local script_path="${2}" 44 | 45 | "${script_path}/device.sh" "${device}" 46 | } 47 | 48 | export -f device 49 | 50 | # error message 51 | # ARG1: error message for STDERR 52 | # ARG2: error status 53 | error_m() { 54 | echo "ERROR: ${1:-'failed.'}" 1>&2 55 | return "${2:-1}" 56 | } 57 | 58 | # print help message. 59 | help_message() { 60 | echo "${help_message:-'No help available.'}" 61 | } 62 | 63 | main() { 64 | if [[ $# -ne 0 ]] ; then 65 | parallel --line-buffer --tag device ::: "${@}" ::: "${script_path}" 66 | else 67 | parallel --line-buffer --tag device ::: ${devices[@]} ::: "${script_path}" 68 | fi 69 | } 70 | 71 | ### RUN PROGRAM ### 72 | 73 | main "${@}" 74 | 75 | 76 | ## 77 | -------------------------------------------------------------------------------- /pixel/build-desc-fingerprint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # SPDX-FileCopyrightText: 2022 The Calyx Institute 4 | # 5 | # SPDX-License-Identifier: Apache-2.0 6 | 7 | # 8 | # build-desc-fingerprint: 9 | # 10 | # Update build.prop build description and fingerprint overrides to match stock 11 | # 12 | # 13 | ############################################################################## 14 | 15 | 16 | ### SET ### 17 | 18 | # use bash strict mode 19 | set -euo pipefail 20 | 21 | 22 | ### TRAPS ### 23 | 24 | # trap signals for clean exit 25 | trap 'error_m interrupted!' SIGINT 26 | 27 | ### CONSTANTS ### 28 | readonly script_path="$(cd "$(dirname "$0")";pwd -P)" 29 | readonly vars_path="${script_path}/../../../vendor/lineage/vars" 30 | readonly top="${script_path}/../../.." 31 | 32 | source "${vars_path}/pixels" 33 | source "${vars_path}/common" 34 | 35 | ## HELP MESSAGE (USAGE INFO) 36 | # TODO 37 | 38 | ### FUNCTIONS ### 39 | 40 | 41 | 42 | # error message 43 | # ARG1: error message for STDERR 44 | # ARG2: error status 45 | error_m() { 46 | echo "ERROR: ${1:-'failed.'}" 1>&2 47 | return "${2:-1}" 48 | } 49 | 50 | # print help message. 51 | help_message() { 52 | echo "${help_message:-'No help available.'}" 53 | } 54 | 55 | main() { 56 | if [[ $# -ne 0 ]]; then 57 | local ds="${@}" 58 | else 59 | local ds="${devices[@]}" 60 | fi 61 | 62 | # Update the makefiles 63 | for d in ${ds}; do 64 | ( 65 | local dv="${vars_path}/${d}" 66 | source "${dv}" 67 | local mk="$(ls ${top}/device/google/*/lineage_${d}.mk)" 68 | desc="${d}-user ${android_version} ${build_id} ${build_number} release-keys" 69 | fingerprint="google/${d}/${d}:${android_version}/${build_id}/${build_number}:user/release-keys" 70 | sed -i "/BuildDesc/c\ BuildDesc=\"${desc}\" \\\\" "${mk}" 71 | sed -i "/BuildFingerprint/c\ BuildFingerprint=${fingerprint} \\\\" "${mk}" 72 | ) 73 | done 74 | 75 | # Commit the changes 76 | for d in ${ds}; do 77 | ( 78 | local dv="${vars_path}/${d}" 79 | source "${dv}" 80 | local dir="$(ls ${top}/device/google/*/lineage_${d}.mk | sed s#/lineage_${d}.mk##)" 81 | cd "${dir}" 82 | if [[ -n "$(git status --porcelain)" ]]; then 83 | git commit -a -m "Update fingerprint/build description from ${build_id}" 84 | fi 85 | ) 86 | done 87 | } 88 | 89 | ### RUN PROGRAM ### 90 | 91 | main "${@}" 92 | 93 | 94 | ## 95 | -------------------------------------------------------------------------------- /pixel/device.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # SPDX-FileCopyrightText: 2022-2023 The Calyx Institute 4 | # 5 | # SPDX-License-Identifier: Apache-2.0 6 | 7 | # 8 | # device: 9 | # 10 | # Do it all for one device 11 | # 12 | # 13 | ############################################################################## 14 | 15 | 16 | ### SET ### 17 | 18 | # use bash strict mode 19 | set -euo pipefail 20 | 21 | ### TRAPS ### 22 | 23 | # trap signals for clean exit 24 | trap 'exit $?' EXIT 25 | trap 'error_m interrupted!' SIGINT 26 | 27 | ### CONSTANTS ### 28 | readonly script_path="$(cd "$(dirname "$0")";pwd -P)" 29 | readonly vars_path="${script_path}/../../../vendor/lineage/vars" 30 | readonly top="${script_path}/../../.." 31 | 32 | readonly work_dir="${WORK_DIR:-/tmp/pixel}" 33 | 34 | source "${vars_path}/pixels" 35 | source "${vars_path}/common" 36 | 37 | KEEP_DUMP=${KEEP_DUMP:-false} 38 | 39 | ## HELP MESSAGE (USAGE INFO) 40 | # TODO 41 | 42 | ### FUNCTIONS ### 43 | 44 | device() { 45 | local device="${1}" 46 | source "${vars_path}/${device}" 47 | 48 | if [[ "$os_branch" == "lineage-19.1" || "$os_branch" == "lineage-20.0" || "$os_branch" == "lineage-21.0" ]]; then 49 | local factory_dir="${work_dir}/${device}/${build_id}/factory/${device}-${build_id,,}" 50 | 51 | "${script_path}/download.sh" "${device}" 52 | "${script_path}/extract-factory-image.sh" "${device}" 53 | 54 | pushd "${top}" 55 | device/google/${device}/extract-files.sh "${factory_dir}" 56 | popd 57 | 58 | if [[ "$os_branch" == "lineage-19.1" || "$os_branch" == "lineage-20.0" ]]; then 59 | "${script_path}/firmware.sh" "${device}" 60 | fi 61 | else 62 | local factory_zip="${work_dir}/${device}/${build_id}/$(basename ${image_url})" 63 | local extract_args="${factory_zip}" 64 | 65 | "${script_path}/download.sh" "${device}" 66 | 67 | if [ "$KEEP_DUMP" == "true" ] || [ "$KEEP_DUMP" == "1" ]; then 68 | extract_args+=" --keep-dump" 69 | fi 70 | 71 | extract_args+=" --regenerate" 72 | 73 | pushd "${top}/device/google/${device}" 74 | ./extract-files.py ${extract_args} 75 | popd 76 | fi 77 | } 78 | 79 | # error message 80 | # ARG1: error message for STDERR 81 | # ARG2: error status 82 | error_m() { 83 | echo "ERROR: ${1:-'failed.'}" 1>&2 84 | return "${2:-1}" 85 | } 86 | 87 | # print help message. 88 | help_message() { 89 | echo "${help_message:-'No help available.'}" 90 | } 91 | 92 | main() { 93 | if [[ $# -eq 1 ]] ; then 94 | device "${1}" 95 | else 96 | error_m 97 | fi 98 | } 99 | 100 | ### RUN PROGRAM ### 101 | 102 | main "${@}" 103 | 104 | 105 | ## 106 | -------------------------------------------------------------------------------- /pixel/download.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # SPDX-FileCopyrightText: 2022 The Calyx Institute 4 | # 5 | # SPDX-License-Identifier: Apache-2.0 6 | 7 | # 8 | # download: 9 | # 10 | # Download Pixel factory images and OTA updates from Google 11 | # 12 | # 13 | ############################################################################## 14 | 15 | 16 | ### SET ### 17 | 18 | # use bash strict mode 19 | set -euo pipefail 20 | 21 | 22 | ### TRAPS ### 23 | 24 | # trap signals for clean exit 25 | trap 'exit $?' EXIT 26 | trap 'error_m interrupted!' SIGINT 27 | 28 | ### CONSTANTS ### 29 | readonly script_path="$(cd "$(dirname "$0")";pwd -P)" 30 | readonly vars_path="${script_path}/../../../vendor/lineage/vars" 31 | 32 | readonly work_dir="${WORK_DIR:-/tmp/pixel}" 33 | 34 | source "${vars_path}/pixels" 35 | 36 | readonly device="${1}" 37 | source "${vars_path}/${device}" 38 | 39 | ## HELP MESSAGE (USAGE INFO) 40 | # TODO 41 | 42 | ### FUNCTIONS ### 43 | 44 | download_factory_image() { 45 | local factory_dir="${work_dir}/${device}/${build_id}" 46 | mkdir -p "${factory_dir}" 47 | local output="${factory_dir}/$(basename ${image_url})" 48 | curl --http1.1 -C - -L -o "${output}" "${image_url}" 49 | echo "${image_sha256} ${output}" | sha256sum --check --status 50 | } 51 | 52 | download_ota_zip() { 53 | local ota_dir="${work_dir}/${device}/${build_id}" 54 | mkdir -p "${ota_dir}" 55 | local output="${ota_dir}/$(basename ${ota_url})" 56 | curl --http1.1 -C - -L -o "${output}" "${ota_url}" 57 | echo "${ota_sha256} ${output}" | sha256sum --check --status 58 | } 59 | 60 | # error message 61 | # ARG1: error message for STDERR 62 | # ARG2: error status 63 | error_m() { 64 | echo "ERROR: ${1:-'failed.'}" 1>&2 65 | return "${2:-1}" 66 | } 67 | 68 | # print help message. 69 | help_message() { 70 | echo "${help_message:-'No help available.'}" 71 | } 72 | 73 | main() { 74 | download_factory_image 75 | # Not all devices need OTA, most are supported in image_unpacker 76 | if [[ -n ${needs_ota-} ]]; then 77 | download_ota_zip 78 | fi 79 | } 80 | 81 | ### RUN PROGRAM ### 82 | 83 | main "${@}" 84 | 85 | 86 | ## 87 | -------------------------------------------------------------------------------- /pixel/extract-factory-image.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # SPDX-FileCopyrightText: 2022-2023 The Calyx Institute 4 | # 5 | # SPDX-License-Identifier: Apache-2.0 6 | 7 | # 8 | # extract-factory-image: 9 | # 10 | # Extract Pixel factory images 11 | # 12 | # 13 | ############################################################################## 14 | 15 | 16 | ### SET ### 17 | 18 | # use bash strict mode 19 | set -euo pipefail 20 | 21 | 22 | ### TRAPS ### 23 | 24 | # trap signals for clean exit 25 | trap 'exit $?' EXIT 26 | trap 'error_m interrupted!' SIGINT 27 | 28 | ### CONSTANTS ### 29 | readonly script_path="$(cd "$(dirname "$0")";pwd -P)" 30 | readonly vars_path="${script_path}/../../../vendor/lineage/vars" 31 | 32 | readonly work_dir="${WORK_DIR:-/tmp/pixel}" 33 | 34 | source "${vars_path}/pixels" 35 | 36 | readonly device="${1}" 37 | source "${vars_path}/${device}" 38 | 39 | ## HELP MESSAGE (USAGE INFO) 40 | # TODO 41 | 42 | ### FUNCTIONS ### 43 | 44 | extract_factory_image() { 45 | local factory_dir="${work_dir}/${device}/${build_id}/factory" 46 | if [[ -d "${factory_dir}" ]]; then 47 | echo "Skipping factory image extraction, ${factory_dir} already exists" 48 | exit 49 | fi 50 | mkdir -p "${factory_dir}" 51 | local factory_zip="${work_dir}/${device}/${build_id}/$(basename ${image_url})" 52 | echo "${image_sha256} ${factory_zip}" | sha256sum --check --status 53 | pushd "${factory_dir}" 54 | unzip -o "${factory_zip}" 55 | pushd ${device}-${build_id,,} 56 | unzip -o "image-${device}-${build_id,,}.zip" 57 | popd 58 | popd 59 | } 60 | 61 | # error message 62 | # ARG1: error message for STDERR 63 | # ARG2: error status 64 | error_m() { 65 | echo "ERROR: ${1:-'failed.'}" 1>&2 66 | return "${2:-1}" 67 | } 68 | 69 | # print help message. 70 | help_message() { 71 | echo "${help_message:-'No help available.'}" 72 | } 73 | 74 | main() { 75 | extract_factory_image 76 | } 77 | 78 | ### RUN PROGRAM ### 79 | 80 | main "${@}" 81 | 82 | 83 | ## 84 | -------------------------------------------------------------------------------- /pixel/firmware.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # SPDX-FileCopyrightText: 2022-2023 The Calyx Institute 4 | # 5 | # SPDX-License-Identifier: Apache-2.0 6 | 7 | # 8 | # firmware: 9 | # 10 | # Setup pixel firmware 11 | # 12 | # 13 | ############################################################################## 14 | 15 | 16 | ### SET ### 17 | 18 | # use bash strict mode 19 | set -euo pipefail 20 | 21 | 22 | ### TRAPS ### 23 | 24 | # trap signals for clean exit 25 | trap 'exit $?' EXIT 26 | trap 'error_m interrupted!' SIGINT 27 | 28 | ### CONSTANTS ### 29 | readonly script_path="$(cd "$(dirname "$0")";pwd -P)" 30 | readonly vars_path="${script_path}/../../../vendor/lineage/vars" 31 | readonly top="${script_path}/../../.." 32 | 33 | readonly fbpacktool="${top}/lineage/scripts/fbpacktool/fbpacktool.py" 34 | readonly extract_ota_py="${top}/tools/extract-utils/extract_ota.py" 35 | 36 | readonly work_dir="${WORK_DIR:-/tmp/pixel}" 37 | 38 | source "${vars_path}/pixels" 39 | 40 | readonly device="${1}" 41 | source "${vars_path}/${device}" 42 | 43 | readonly factory_dir="${work_dir}/${device}/${build_id}/factory/${device}-${build_id,,}" 44 | readonly ota_zip="${work_dir}/${device}/${build_id}/$(basename ${ota_url})" 45 | readonly ota_firmware_dir="${work_dir}/${device}/${build_id}/firmware" 46 | 47 | readonly vendor_path="${top}/vendor/firmware/${device}" 48 | 49 | ## HELP MESSAGE (USAGE INFO) 50 | # TODO 51 | 52 | ### FUNCTIONS ### 53 | 54 | # Unpack the seperate partitions needed for OTA 55 | # from the factory image's bootloader.img 56 | unpack_firmware() { 57 | radio_img=$(compgen -G "${factory_dir}/radio-*.img") 58 | if [ -n "${radio_img}" ]; then 59 | python3 "${fbpacktool}" unpack -o "${ota_firmware_dir}" "${radio_img}" 60 | fi 61 | 62 | python3 "${fbpacktool}" unpack -o "${ota_firmware_dir}" "${factory_dir}"/bootloader-*.img 63 | } 64 | 65 | extract_firmware() { 66 | echo "${ota_sha256} ${ota_zip}" | sha256sum --check --status 67 | python3 ${extract_ota_py} ${ota_zip} -o "${ota_firmware_dir}" -p ${firmware_partitions[@]} 68 | } 69 | 70 | # Firmware included in OTAs, separate partitions 71 | # Can be extracted from bootloader.img inside the factory image, 72 | # or directly from the OTA zip 73 | copy_ota_firmware() { 74 | for fp in ${firmware_partitions[@]}; do 75 | cp "${ota_firmware_dir}/${fp}.img" "${vendor_path}/radio/${fp}.img" 76 | done 77 | } 78 | 79 | setup_makefiles() { 80 | echo "AB_OTA_PARTITIONS += \\" > "${vendor_path}/config.mk" 81 | for fp in ${firmware_partitions[@]}; do 82 | echo " ${fp} \\" >> "${vendor_path}/config.mk" 83 | done 84 | 85 | echo "LOCAL_PATH := \$(call my-dir)" > "${vendor_path}/firmware.mk" 86 | echo >> "${vendor_path}/firmware.mk" 87 | echo "ifeq (\$(TARGET_DEVICE),${device})" >> "${vendor_path}/firmware.mk" 88 | for fp in ${firmware_partitions[@]}; do 89 | echo "\$(call add-radio-file,radio/${fp}.img)" >> "${vendor_path}/firmware.mk" 90 | done 91 | echo "endif" >> "${vendor_path}/firmware.mk" 92 | } 93 | 94 | # error message 95 | # ARG1: error message for STDERR 96 | # ARG2: error status 97 | error_m() { 98 | echo "ERROR: ${1:-'failed.'}" 1>&2 99 | return "${2:-1}" 100 | } 101 | 102 | # print help message. 103 | help_message() { 104 | echo "${help_message:-'No help available.'}" 105 | } 106 | 107 | main() { 108 | rm -rf "${ota_firmware_dir}" 109 | mkdir -p "${ota_firmware_dir}" 110 | rm -rf "${vendor_path}/radio" 111 | mkdir -p "${vendor_path}/radio" 112 | 113 | # Not all devices need OTA, most are supported in image_unpacker 114 | if [[ -n ${needs_ota-} ]]; then 115 | extract_firmware 116 | else 117 | unpack_firmware 118 | fi 119 | copy_ota_firmware 120 | setup_makefiles 121 | } 122 | 123 | ### RUN PROGRAM ### 124 | 125 | main "${@}" 126 | 127 | 128 | ## 129 | -------------------------------------------------------------------------------- /pixel/get-new-device-vars.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # 3 | # Tries to get device-specific info from various Google sites 4 | # Best effort 5 | # We require manual input of build id here to keep things easier 6 | 7 | import argparse 8 | import base64 9 | from functools import partial 10 | import re 11 | from bs4 import BeautifulSoup 12 | from git import cmd 13 | import os 14 | import urllib.request 15 | 16 | SCRIPT_PATH = os.path.realpath(os.path.dirname(__file__)) 17 | VARS_PATH = SCRIPT_PATH + os.path.sep + os.path.pardir + os.path.sep + "vars" 18 | 19 | IMAGE_URL = "https://developers.google.com/android/images" 20 | OTA_URL = "https://developers.google.com/android/ota" 21 | COOKIE = {'Cookie': 'devsite_wall_acks=nexus-image-tos,nexus-ota-tos'} 22 | 23 | PLATFORM_BUILD_URL = "https://android.googlesource.com/platform/build" 24 | BUILD_ID_URL = "https://android.googlesource.com/platform/build/+/refs/{}/core/build_id.mk?format=TEXT" 25 | BUILD_ID_FILTER = "BUILD_ID=" 26 | SECURITY_PATCH_URL = "https://android.googlesource.com/platform/build/+/refs/{}/core/version_defaults.mk?format=TEXT" 27 | SECURITY_PATCH_FILTER = "PLATFORM_SECURITY_PATCH :=" 28 | NEW_SECURITY_PATCH_URL = "https://android.googlesource.com/platform/build/release/+/refs/tags/{}/flag_values/{}/RELEASE_PLATFORM_SECURITY_PATCH.textproto?format=TEXT" 29 | NEW_SECURITY_PATCH_FILTER = "string_value: " 30 | 31 | def handle_image(soup, html_id, output_fn): 32 | td = soup.find(id=html_id).find_all('td') 33 | flash_url = td[1].a['href'] 34 | image_url = td[2].a['href'] 35 | image_sha256 = td[3].contents[0] 36 | build_number = flash_url.split("/")[4].split("?")[0] 37 | output_fn('new_build_number="{0}"\nnew_flash_url="{1}"\nnew_image_url="{2}"\nnew_image_sha256="{3}"'.format(build_number, flash_url, image_url, image_sha256)) 38 | 39 | def handle_ota(soup, html_id, output_fn): 40 | td = soup.find(id=html_id).find_all('td') 41 | ota_url = td[1].a['href'] 42 | ota_sha256 = td[2].contents[0] 43 | output_fn('new_ota_url="{0}"\nnew_ota_sha256="{1}"'.format(ota_url, ota_sha256)) 44 | 45 | def get_all_aosp_tags(tag_filter): 46 | all_tags = [] 47 | try: 48 | for line in cmd.Git().ls_remote("--sort=v:refname", PLATFORM_BUILD_URL, tag_filter, tags=True, refs=True).split('\n'): 49 | try: 50 | (ref, tag) = line.split('\t') 51 | except ValueError: 52 | pass 53 | all_tags.append(tag.replace("refs/tags/", "")) 54 | return all_tags 55 | except Exception as e: 56 | return all_tags 57 | 58 | def get_aosp_tags_for_build_ids(aosp_tags, m): 59 | try: 60 | for aosp_tag in aosp_tags: 61 | output = base64.decodebytes(urllib.request.urlopen(BUILD_ID_URL.format("tags/" + aosp_tag)).read()).decode() 62 | for line in output.split('\n'): 63 | if BUILD_ID_FILTER in line: 64 | found_build_id = line.split("=")[1] 65 | m[found_build_id] = aosp_tag 66 | except Exception as e: 67 | pass 68 | 69 | def get_security_patch_for_aosp_tag(aosp_tag): 70 | try: 71 | output = base64.decodebytes(urllib.request.urlopen(SECURITY_PATCH_URL.format("tags/" + aosp_tag)).read()).decode() 72 | except: 73 | return None 74 | 75 | for line in output.split('\n'): 76 | if SECURITY_PATCH_FILTER in line: 77 | security_patch = line.split(":=")[1].strip() 78 | return security_patch 79 | 80 | return None 81 | 82 | def get_security_patches_for_aosp_tags(aosp_tags, m): 83 | for aosp_tag in aosp_tags: 84 | security_patch = get_security_patch_for_aosp_tag(aosp_tag) 85 | if security_patch is None: 86 | continue 87 | 88 | m[aosp_tag] = security_patch 89 | 90 | 91 | def get_security_patch(aosp_tag, release): 92 | try: 93 | url = NEW_SECURITY_PATCH_URL.format(aosp_tag, release) 94 | output = base64.decodebytes(urllib.request.urlopen(url).read()).decode() 95 | except: 96 | return None 97 | 98 | for line in output.split('\n'): 99 | if NEW_SECURITY_PATCH_FILTER in line: 100 | match = re.search(r'\d{4}-\d{2}-\d{2}', line) 101 | security_patch = match.group(0) if match else None 102 | return security_patch 103 | 104 | return None 105 | 106 | 107 | def get_security_patches(aosp_tags, build_ids, m): 108 | for build_id in build_ids: 109 | release, _ = build_id.split('.', 1) 110 | release = release.lower() 111 | 112 | for aosp_tag in aosp_tags: 113 | security_patch = get_security_patch(aosp_tag, release) 114 | if security_patch is None: 115 | continue 116 | 117 | m[aosp_tag] = security_patch 118 | 119 | 120 | def main(): 121 | parser = argparse.ArgumentParser() 122 | parser.add_argument('--devices', help="Device codenames", type=str, nargs='+', required=True) 123 | parser.add_argument('--build-ids', help="Build IDs", type=str, nargs='+', required=True) 124 | parser.add_argument('--tmps', help="Temporary files to store device vars into", type=str, nargs='+', required=True) 125 | parser.add_argument('-t', '--tags_match', default="android-13.0", help='Android version tag to match', type=str) 126 | args = parser.parse_args() 127 | 128 | assert len(args.devices) == len(args.build_ids) == len(args.tmps) 129 | 130 | image_html = urllib.request.urlopen(urllib.request.Request(IMAGE_URL, headers=COOKIE)).read() 131 | image_soup = BeautifulSoup(image_html, 'html.parser') 132 | 133 | ota_html = urllib.request.urlopen(urllib.request.Request(OTA_URL, headers=COOKIE)).read() 134 | ota_soup = BeautifulSoup(ota_html, 'html.parser') 135 | 136 | all_aosp_tags = get_all_aosp_tags("{0}*".format(args.tags_match)) 137 | build_ids = set(args.build_ids) 138 | 139 | build_id_aosp_tag_map = {} 140 | get_aosp_tags_for_build_ids(all_aosp_tags, build_id_aosp_tag_map) 141 | 142 | aosp_tag_security_patch_map = {} 143 | get_security_patches_for_aosp_tags(all_aosp_tags, aosp_tag_security_patch_map) 144 | get_security_patches(all_aosp_tags, build_ids, aosp_tag_security_patch_map) 145 | 146 | def handle_device(device, build_id, output_fn): 147 | html_id = "{0}{1}".format(device, build_id.lower()) 148 | handle_image(image_soup, html_id, output_fn) 149 | handle_ota(ota_soup, html_id, output_fn) 150 | aosp_tag = build_id_aosp_tag_map.get(build_id, 'unknown') 151 | output_fn('new_aosp_tag="{0}"'.format(aosp_tag)) 152 | security_patch = aosp_tag_security_patch_map.get(aosp_tag, 'unknown') 153 | output_fn('new_security_patch="{0}"'.format(security_patch)) 154 | 155 | for device, build_id, tmp in zip(args.devices, args.build_ids, args.tmps): 156 | with open(tmp, 'w', encoding='utf-8') as f: 157 | def output_fns(fs, s): 158 | fs.write(s) 159 | fs.write('\n') 160 | 161 | output_fn = partial(output_fns, f) 162 | handle_device(device, build_id, output_fn) 163 | 164 | 165 | if __name__ == "__main__": 166 | main() 167 | -------------------------------------------------------------------------------- /pixel/prepare-firmware.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # SPDX-FileCopyrightText: 2022-2023 The Calyx Institute 4 | # 5 | # SPDX-License-Identifier: Apache-2.0 6 | 7 | # 8 | # prepare-firmware: 9 | # 10 | # Pixel firmware preparation hook for extract-utils 11 | # 12 | # 13 | ############################################################################## 14 | 15 | 16 | ### SET ### 17 | 18 | # use bash strict mode 19 | set -euo pipefail 20 | 21 | 22 | ### TRAPS ### 23 | 24 | # trap signals for clean exit 25 | trap 'exit $?' EXIT 26 | trap 'error_m interrupted!' SIGINT 27 | 28 | ### CONSTANTS ### 29 | readonly script_path="$(cd "$(dirname "$0")";pwd -P)" 30 | readonly top="${script_path}/../../.." 31 | 32 | readonly fbpacktool="${top}/lineage/scripts/fbpacktool/fbpacktool.py" 33 | 34 | readonly src_dir="${2}" 35 | 36 | ## HELP MESSAGE (USAGE INFO) 37 | # TODO 38 | 39 | ### FUNCTIONS ### 40 | 41 | # Unpack the seperate partitions needed for OTA 42 | # from the factory image's bootloader.img & radio.img 43 | unpack_firmware() { 44 | radio_img=$(compgen -G "${src_dir}/radio-*.img" || true) 45 | if [ -n "${radio_img}" ]; then 46 | python3 "${fbpacktool}" unpack -o "${src_dir}" "${radio_img}" 47 | fi 48 | 49 | python3 "${fbpacktool}" unpack -o "${src_dir}" "${src_dir}"/bootloader-*.img 50 | } 51 | 52 | # error message 53 | # ARG1: error message for STDERR 54 | # ARG2: error status 55 | error_m() { 56 | echo "ERROR: ${1:-'failed.'}" 1>&2 57 | return "${2:-1}" 58 | } 59 | 60 | # print help message. 61 | help_message() { 62 | echo "${help_message:-'No help available.'}" 63 | } 64 | 65 | main() { 66 | unpack_firmware 67 | } 68 | 69 | ### RUN PROGRAM ### 70 | 71 | main "${@}" 72 | 73 | 74 | ## 75 | -------------------------------------------------------------------------------- /pixel/update-any-var.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # SPDX-FileCopyrightText: 2022 The Calyx Institute 4 | # 5 | # SPDX-License-Identifier: Apache-2.0 6 | 7 | # 8 | # update-vars: 9 | # 10 | # Update Pixel device-specific variables by parsing Google's pages 11 | # 12 | # 13 | ############################################################################## 14 | 15 | 16 | ### SET ### 17 | 18 | # use bash strict mode 19 | set -euo pipefail 20 | 21 | 22 | ### TRAPS ### 23 | 24 | # trap signals for clean exit 25 | trap 'error_m interrupted!' SIGINT 26 | 27 | ### CONSTANTS ### 28 | readonly script_path="$(cd "$(dirname "$0")";pwd -P)" 29 | readonly vars_path="${script_path}/../../../vendor/lineage/vars" 30 | 31 | source "${vars_path}/pixels" 32 | 33 | ## HELP MESSAGE (USAGE INFO) 34 | # TODO 35 | 36 | ### FUNCTIONS ### 37 | 38 | 39 | 40 | # error message 41 | # ARG1: error message for STDERR 42 | # ARG2: error status 43 | error_m() { 44 | echo "ERROR: ${1:-'failed.'}" 1>&2 45 | return "${2:-1}" 46 | } 47 | 48 | # print help message. 49 | help_message() { 50 | echo "${help_message:-'No help available.'}" 51 | } 52 | 53 | main() { 54 | local key="${1}" 55 | local value="${2}" 56 | shift; shift 57 | if [[ $# -ne 0 ]]; then 58 | local files="${@}" 59 | else 60 | local files="${devices[@]}" 61 | fi 62 | 63 | for f in ${files}; do 64 | ( 65 | local fv="${vars_path}/${f}" 66 | source "${fv}" 67 | sed -i "/ prev_${key}=/c\readonly prev_${key}=\"${!key}\"" "${fv}" 68 | sed -i "/ ${key}=/c\readonly ${key}=\"$value\"" "${fv}" 69 | ) 70 | done 71 | } 72 | 73 | ### RUN PROGRAM ### 74 | 75 | main "${@}" 76 | 77 | 78 | ## 79 | -------------------------------------------------------------------------------- /pixel/update-device-vars.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # SPDX-FileCopyrightText: 2022 The Calyx Institute 4 | # 5 | # SPDX-License-Identifier: Apache-2.0 6 | 7 | # 8 | # update-vars: 9 | # 10 | # Update Pixel device-specific variables by parsing Google's pages 11 | # 12 | # 13 | ############################################################################## 14 | 15 | 16 | ### SET ### 17 | 18 | # use bash strict mode 19 | set -euo pipefail 20 | 21 | 22 | ### TRAPS ### 23 | 24 | # trap signals for clean exit 25 | trap 'rm -rf ${tmp_dir} && exit $?' EXIT 26 | trap 'error_m interrupted!' SIGINT 27 | 28 | ### CONSTANTS ### 29 | readonly script_path="$(cd "$(dirname "$0")";pwd -P)" 30 | readonly vars_path="${script_path}/../../../vendor/lineage/vars" 31 | 32 | readonly tmp_dir="${TMPDIR:-/tmp}/pixel" 33 | 34 | source "${vars_path}/pixels" 35 | source "${vars_path}/common" 36 | 37 | ## HELP MESSAGE (USAGE INFO) 38 | # TODO 39 | 40 | ### FUNCTIONS ### 41 | 42 | 43 | 44 | # error message 45 | # ARG1: error message for STDERR 46 | # ARG2: error status 47 | error_m() { 48 | echo "ERROR: ${1:-'failed.'}" 1>&2 49 | return "${2:-1}" 50 | } 51 | 52 | # print help message. 53 | help_message() { 54 | echo "${help_message:-'No help available.'}" 55 | } 56 | 57 | main() { 58 | mkdir -p "${tmp_dir}" 59 | if [[ $# -ne 0 ]]; then 60 | ds=("${@}") 61 | else 62 | ds=("${devices[@]}") 63 | fi 64 | 65 | declare -a tmps 66 | declare -a build_ids 67 | for d in "${ds[@]}"; do 68 | local tmp=$(mktemp "${tmp_dir}/${d}.XXXXXXXXXX") 69 | tmps+=("$tmp") 70 | 71 | # Variables are marked readonly, do this to avoid it 72 | build_id=$( 73 | local dv="${vars_path}/${d}" 74 | source "${dv}" 75 | echo "${build_id}" 76 | ) 77 | build_ids+=("${build_id}") 78 | done 79 | 80 | ${script_path}/get-new-device-vars.py --devices "${ds[@]}" --build-ids "${build_ids[@]}" --tmps "${tmps[@]}" -t ${aosp_tag_match} 81 | 82 | for i in "${!ds[@]}"; do 83 | d="${ds[$i]}" 84 | tmp="${tmps[$i]}" 85 | ( 86 | local dv="${vars_path}/${d}" 87 | source "${dv}" 88 | source "${tmp}" 89 | if [[ "${new_aosp_tag}" != "${aosp_tag}" ]]; then 90 | sed -i "/ prev_aosp_tag=/c\readonly prev_aosp_tag=\"$aosp_tag\"" "${dv}" 91 | sed -i "/ aosp_tag=/c\readonly aosp_tag=\"$new_aosp_tag\"" "${dv}" 92 | fi 93 | sed -i "/ build_number=/c\readonly build_number=\"$new_build_number\"" "${dv}" 94 | sed -i "/ image_url=/c\readonly image_url=\"$new_image_url\"" "${dv}" 95 | sed -i "/ image_sha256=/c\readonly image_sha256=\"$new_image_sha256\"" "${dv}" 96 | sed -i "/ flash_url=/c\readonly flash_url=\"$new_flash_url\"" "${dv}" 97 | sed -i "/ ota_url=/c\readonly ota_url=\"$new_ota_url\"" "${dv}" 98 | sed -i "/ ota_sha256=/c\readonly ota_sha256=\"$new_ota_sha256\"" "${dv}" 99 | sed -i "/ security_patch=/c\readonly security_patch=\"$new_security_patch\"" "${dv}" 100 | ) 101 | done 102 | } 103 | 104 | ### RUN PROGRAM ### 105 | 106 | main "${@}" 107 | 108 | 109 | ## 110 | -------------------------------------------------------------------------------- /reuse_helper/.gitignore: -------------------------------------------------------------------------------- 1 | .idea 2 | __pycache__ 3 | -------------------------------------------------------------------------------- /reuse_helper/README.md: -------------------------------------------------------------------------------- 1 | # REUSE compliance converter 2 | 3 | This script will try to parse and replace existing comments to the proper SPDX ones. 4 | 5 | ## Requirements 6 | 7 | This requires the `pipx` package to be installed 8 | 9 | ## Usage 10 | 11 | ``` 12 | reuse_helper.py -p 13 | ``` 14 | 15 | Required arguments:\ 16 | -p PROJECT, --project PROJECT Specify the path of the project you want to convert (relative to lineage sources) 17 | 18 | optional arguments:\ 19 | -h, --help show this help message and exit 20 | -r ROOT, --root ROOT Specify the root path of your sources 21 | -------------------------------------------------------------------------------- /reuse_helper/reuse_helper.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # SPDX-FileCopyrightText: 2023 The LineageOS Project 4 | # SPDX-License-Identifier: Apache-2.0 5 | 6 | # REUSE-IgnoreStart 7 | import argparse 8 | import os 9 | import re 10 | import sys 11 | 12 | from pathlib import Path 13 | from utils import check_dependencies, run_subprocess 14 | 15 | 16 | def fix_files(project_path, extension, args): 17 | extension_map = { 18 | "*.aidl": ["java"], 19 | "*.flags": ["py"], 20 | "*.java": ["java"], 21 | "*.kt": ["java"], 22 | "*.bp": ["go"], 23 | "*.proto": ["java", "c"], 24 | "*.xml": ["xml"], 25 | "*.py": ["py"], 26 | } 27 | path_list = Path(project_path).rglob(extension) 28 | for item in path_list: 29 | path_in_str = str(item) 30 | if extension in extension_map: 31 | for comment_style in extension_map[extension]: 32 | clean_file(path_in_str, comment_style, args) 33 | return 34 | 35 | 36 | def clean_file(file, comment_style, args): 37 | if should_ignore_file(file): 38 | return 39 | 40 | try: 41 | fh = open(file, "r+") 42 | except OSError: 43 | print(f"Something went wrong while opening file {file}") 44 | return 45 | 46 | content = fh.read() 47 | 48 | pattern_map = { 49 | "c": r"((//[^\n]*\n)*(//)?)", 50 | "go": r"((//[^\n]*\n)*(//)?)", 51 | "java": r"(/\*.*?\*/)", 52 | "py": r"((#[^\n]*\n)*#?)", 53 | "xml": r"()", 54 | } 55 | if comment_style not in pattern_map: 56 | print(f"Comment style '{comment_style}' unsupported!") 57 | return 58 | pattern = pattern_map[comment_style] 59 | match = re.search(pattern, content, re.DOTALL) 60 | if match is None: 61 | fh.close() 62 | return 63 | 64 | comment = match.group(1) 65 | license_type = get_license_type(comment) 66 | parts = comment.split("\n") 67 | if len(parts) == 1: 68 | fh.close() 69 | return 70 | 71 | i = 0 72 | match = None 73 | while match is None: 74 | if len(parts) <= i: 75 | break 76 | match = re.search(r".*Copyright (?:\([cC]\))?\s*(.*)", parts[i]) 77 | if not match: 78 | i += 1 79 | 80 | if match is None: 81 | fh.close() 82 | return 83 | 84 | copyright_lines = [match.group(1)] 85 | pattern = re.compile(r"\s*\*?\s+(?:(?:Copyright )?\([Cc]\))?\s*((\d+)(.*))") 86 | match = pattern.match(parts[i + 1]) 87 | while match is not None: 88 | copyright_lines.append(match.group(1)) 89 | i += 1 90 | match = pattern.match(parts[i + 1]) 91 | 92 | if license_type is not None: 93 | new_comment = build_spdx_comment(comment_style, copyright_lines, license_type) 94 | new_content = content.replace(comment, new_comment) 95 | if args.fix_newlines: 96 | if new_content[-1] != "\n": 97 | new_content += "\n" 98 | fh.seek(0) 99 | fh.write(new_content) 100 | fh.truncate() 101 | fh.close() 102 | 103 | 104 | def should_ignore_file(file): 105 | if not "/res/values-" in file: 106 | return False 107 | else: 108 | # We want to ignore translations 109 | can_modify_values = ["land", "large", "night", "television", "v2", "v3"] 110 | for m in can_modify_values: 111 | if re.search(rf"/values-{m}", file): 112 | return False 113 | return True 114 | 115 | 116 | def build_spdx_comment(comment_style, copyright_lines, license_type): 117 | if comment_style == "go": 118 | return build_comment(copyright_lines, license_type, "//\n", "// ", "//\n") 119 | elif comment_style == "java" or comment_style == "c": 120 | return build_comment(copyright_lines, license_type, "/*\n", " * ", " */") 121 | elif comment_style == "xml": 122 | return build_comment(copyright_lines, license_type, "") 123 | elif comment_style == "py": 124 | return build_comment(copyright_lines, license_type, "", "# ", "") 125 | else: 126 | return "" 127 | 128 | 129 | def build_comment(copyright_lines, license_type, comment_start, comment_middle, comment_end): 130 | comment = comment_start 131 | for line in copyright_lines: 132 | comment += f"{comment_middle}SPDX-FileCopyrightText: {line}\n" 133 | comment += f"{comment_middle}SPDX-License-Identifier: {license_type}\n" 134 | comment += comment_end 135 | return comment 136 | 137 | 138 | def get_license_type(comment): 139 | lic = None 140 | if "http://www.apache.org/licenses/LICENSE-2.0" in comment: 141 | lic = "Apache-2.0" 142 | elif "GNU General Public" in comment and "version 2" in comment: 143 | lic = "GPL-2.0-or-later" 144 | 145 | return lic 146 | 147 | 148 | def parse_args(): 149 | parser = argparse.ArgumentParser(description="Make project REUSE compliant") 150 | parser.add_argument( 151 | "-r", "--root", default=None, help="Specify the root path of your sources" 152 | ) 153 | parser.add_argument( 154 | "-p", 155 | "--project", 156 | required=True, 157 | help="Specify the relative path of the project you want to convert", 158 | ) 159 | parser.add_argument( 160 | "-f", 161 | "--fix_newlines", 162 | action="store_true", 163 | help="Add newlines to files that miss them", 164 | ) 165 | return parser.parse_args() 166 | 167 | 168 | def main(): 169 | args = parse_args() 170 | root = args.root 171 | if args.root is None: 172 | root = str(Path.cwd()) 173 | root = root.replace("/lineage/scripts/reuse_helper", "") 174 | 175 | path = os.path.join(root, args.project) 176 | 177 | # We need "pipx" 178 | if not check_dependencies(): 179 | sys.exit(-1) 180 | 181 | # Parse and change known file-/comment-types 182 | extensions = ["aidl", "flags", "java", "kt", "xml", "bp", "proto", "py"] 183 | for ext in extensions: 184 | fix_files(path, f"*.{ext}", args) 185 | 186 | # Download all licenses automatically 187 | os.chdir(path) 188 | _, code = run_subprocess(["pipx", "run", "reuse", "download", "--all"], True) 189 | 190 | 191 | if __name__ == "__main__": 192 | main() 193 | # REUSE-IgnoreEnd 194 | -------------------------------------------------------------------------------- /reuse_helper/utils.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # SPDX-FileCopyrightText: 2023 The LineageOS Project 4 | # SPDX-License-Identifier: Apache-2.0 5 | 6 | import sys 7 | from subprocess import Popen, PIPE 8 | 9 | 10 | def run_subprocess(cmd, silent=False): 11 | p = Popen(cmd, stdout=PIPE, stderr=PIPE, universal_newlines=True) 12 | comm = p.communicate() 13 | exit_code = p.returncode 14 | if exit_code != 0 and not silent: 15 | print( 16 | "There was an error running the subprocess.\n" 17 | "cmd: %s\n" 18 | "exit code: %d\n" 19 | "stdout: %s\n" 20 | "stderr: %s" % (cmd, exit_code, comm[0], comm[1]), 21 | file=sys.stderr, 22 | ) 23 | return comm, exit_code 24 | 25 | 26 | def check_run(cmd): 27 | p = Popen(cmd, stdout=sys.stdout, stderr=sys.stderr) 28 | ret = p.wait() 29 | if ret != 0: 30 | joined = " ".join(cmd) 31 | print(f"Failed to run cmd: {joined}", file=sys.stderr) 32 | sys.exit(ret) 33 | 34 | 35 | def check_dependencies(): 36 | # Check for Java version of crowdin 37 | cmd = ["which", "pipx"] 38 | msg, code = run_subprocess(cmd, silent=True) 39 | if code != 0: 40 | print("You have not installed pipx.", file=sys.stderr) 41 | return False 42 | return True 43 | -------------------------------------------------------------------------------- /set-default-branch/set-default-branch: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # SPDX-FileCopyrightText: 2025 The LineageOS Project 4 | # SPDX-License-Identifier: Apache-2.0 5 | 6 | set -u 7 | 8 | ORG="$1" 9 | BRANCHES="${@:2}" 10 | 11 | for repo in $(gh repo list "${ORG}" -L 99999 --json name | jq ".[].name" --raw-output); do 12 | for branch in $BRANCHES; do 13 | gh repo edit "${ORG}/${repo}" --default-branch "${branch}" 14 | done 15 | done 16 | -------------------------------------------------------------------------------- /shipper/requirements.txt: -------------------------------------------------------------------------------- 1 | PyYAML==6.0.1 2 | -------------------------------------------------------------------------------- /shipper/shipper.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # SPDX-FileCopyrightText: 2024 The LineageOS Project 4 | # SPDX-License-Identifier: Apache-2.0 5 | 6 | import argparse 7 | import collections 8 | import dataclasses 9 | import glob 10 | import pathlib 11 | import subprocess 12 | import textwrap 13 | 14 | import yaml 15 | 16 | CROOT = str(pathlib.Path(__file__).parents[3]) 17 | 18 | 19 | @dataclasses.dataclass 20 | class WikiData: 21 | codename: str 22 | device_names: list 23 | maintainers: set 24 | versions: list 25 | 26 | def to_jekyll_table(self) -> str: 27 | return '| {} |'.format(' | '.join([ 28 | ' / '.join(self.device_names), 29 | f'[{self.codename}](https://wiki.lineageos.org/devices/{self.codename})', 30 | ', '.join(self.maintainers), 31 | str(self.versions[-2]) if len(self.versions) > 1 else '' 32 | ])) 33 | 34 | 35 | def get_build_targets(git_head: str) -> dict: 36 | build_targets = {} 37 | 38 | for line in subprocess.run(['git', 'show', f'{git_head}:lineage-build-targets'], 39 | cwd=f'{CROOT}/lineage/hudson', 40 | stdout=subprocess.PIPE).stdout.decode().splitlines(): 41 | if line and not line.startswith('#'): 42 | device, build_type, version, cadence = line.split() 43 | build_targets[device] = version 44 | 45 | return build_targets 46 | 47 | 48 | def get_wiki_data(codename: str) -> WikiData: 49 | device_names = [] 50 | maintainers = [] 51 | versions = [] 52 | 53 | for path in sorted(glob.glob(f'{CROOT}/lineage/wiki/_data/devices/{codename}.yml') + glob.glob( 54 | f'{CROOT}/lineage/wiki/_data/devices/{codename}_variant*.yml')): 55 | doc = yaml.load(open(path, 'r').read(), Loader=yaml.SafeLoader) 56 | if doc['maintainers']: 57 | device_names.append(f'{doc["vendor"]} {doc["name"]}') 58 | maintainers = doc['maintainers'] 59 | versions = doc['versions'] 60 | 61 | return WikiData(codename, device_names, maintainers, versions) 62 | 63 | 64 | def parse_cmdline() -> argparse.Namespace: 65 | parser = argparse.ArgumentParser(description='Generate device table for the blog post') 66 | parser.add_argument('hudson_git_head', 67 | help='Hudson Git HEAD as of the previous changelog post') 68 | return parser.parse_args() 69 | 70 | 71 | def main() -> None: 72 | args = parse_cmdline() 73 | 74 | build_targets_before = get_build_targets(args.hudson_git_head) 75 | build_targets_after = get_build_targets('HEAD') 76 | 77 | new_devices = collections.defaultdict(list) 78 | 79 | for codename, branch in build_targets_after.items(): 80 | if build_targets_before.get(codename, None) != branch: 81 | new_devices[branch].append(codename) 82 | 83 | for branch, codenames in new_devices.items(): 84 | _, version = branch.split('-') 85 | 86 | if version.endswith('.0'): 87 | version = version[:-2] 88 | 89 | print(textwrap.dedent(f'''\ 90 | #### Added {version} devices 91 | 92 | {{: .table }} 93 | | Device name | Wiki | Maintainers | Moved from | 94 | |-------------|------|-------------|------------|''')) 95 | print('\n'.join( 96 | sorted([get_wiki_data(x).to_jekyll_table() for x in codenames]) 97 | )) 98 | print() 99 | 100 | 101 | if __name__ == '__main__': 102 | main() 103 | --------------------------------------------------------------------------------